file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
urls.py | import logging
from django.conf.urls import include, url
from django.core.exceptions import ImproperlyConfigured
from corehq.apps.reports.standard.forms.reports import ReprocessXFormErrorView
from corehq.apps.userreports.reports.view import (
ConfigurableReportView,
CustomConfigurableReportDispatcher,
)
from corehq.apps.userreports.views import (
ConfigureReport,
EditReportInBuilder,
ReportBuilderDataSourceSelect,
ReportBuilderPaywallActivatingSubscription,
ReportBuilderPaywallPricing,
ReportPreview,
)
from .dispatcher import (
CustomProjectReportDispatcher,
ProjectReportDispatcher,
)
from .filters import urls as filter_urls
from .util import get_installed_custom_modules
from .views import (
AddSavedReportConfigView,
CaseAttachmentsView,
CaseDataView,
EditFormInstance,
FormDataView,
MySavedReportsView,
ScheduledReportsView,
archive_form,
case_form_data,
case_forms,
case_property_changes,
case_property_names,
case_xml,
close_case_view,
delete_config,
delete_scheduled_report,
download_case_history,
download_form,
edit_case_view,
edit_form,
email_report,
export_case_transactions,
export_report,
project_health_user_details,
rebuild_case_view,
resave_case_view,
resave_form_view,
restore_edit,
send_test_scheduled_report,
unarchive_form,
undo_close_case_view,
view_scheduled_report,
)
custom_report_urls = [
CustomProjectReportDispatcher.url_pattern(),
]
urlpatterns = [
ConfigurableReportView.url_pattern(),
CustomConfigurableReportDispatcher.url_pattern(),
# Report Builder
url(r'^builder/select_source/$', ReportBuilderDataSourceSelect.as_view(),
name=ReportBuilderDataSourceSelect.urlname),
url(r'^builder/configure/$', ConfigureReport.as_view(), name=ConfigureReport.urlname),
url(r'^builder/preview/(?P<data_source>[\w\-]+)/$', ReportPreview.as_view(), name=ReportPreview.urlname),
url(r'^builder/edit/(?P<report_id>[\w\-]+)/$', EditReportInBuilder.as_view(), name='edit_report_in_builder'),
url(r'builder/subscribe/pricing/$', ReportBuilderPaywallPricing.as_view(),
name=ReportBuilderPaywallPricing.urlname),
url(r'builder/subscribe/activating_subscription/$', ReportBuilderPaywallActivatingSubscription.as_view(),
name=ReportBuilderPaywallActivatingSubscription.urlname),
url(r'^$', MySavedReportsView.as_view(), name="reports_home"),
url(r'^saved/', MySavedReportsView.as_view(), name=MySavedReportsView.urlname),
url(r'^saved_reports', MySavedReportsView.as_view(), name="old_saved_reports"),
url(r'^case_data/(?P<case_id>[\w\-]+)/$', CaseDataView.as_view(), name=CaseDataView.urlname),
url(r'^case_data/(?P<case_id>[\w\-]+)/forms/$', case_forms, name="single_case_forms"),
url(r'^case_data/(?P<case_id>[\w\-]+)/attachments/$',
CaseAttachmentsView.as_view(), name=CaseAttachmentsView.urlname),
url(r'^case_data/(?P<case_id>[\w\-]+)/view/xml/$', case_xml, name="single_case_xml"),
url(r'^case_data/(?P<case_id>[\w\-]+)/properties/$', case_property_names, name="case_property_names"),
url(r'^case_data/(?P<case_id>[\w\-]+)/history/$', download_case_history, name="download_case_history"),
url(r'^case_data/(?P<case_id>[\w\-]+)/edit/$', edit_case_view, name="edit_case"),
url(r'^case_data/(?P<case_id>[\w\-]+)/rebuild/$', rebuild_case_view, name="rebuild_case"),
url(r'^case_data/(?P<case_id>[\w\-]+)/resave/$', resave_case_view, name="resave_case"),
url(r'^case_data/(?P<case_id>[\w\-]+)/close/$', close_case_view, name="close_case"),
url(r'^case_data/(?P<case_id>[\w\-]+)/undo-close/(?P<xform_id>[\w\-:]+)/$',
undo_close_case_view, name="undo_close_case"),
url(r'^case_data/(?P<case_id>[\w\-]+)/export_transactions/$',
export_case_transactions, name="export_case_transactions"),
url(r'^case_data/(?P<case_id>[\w\-]+)/(?P<xform_id>[\w\-:]+)/$', case_form_data, name="case_form_data"),
url(r'^case_data/(?P<case_id>[\w\-]+)/case_property/(?P<case_property_name>[\w_\-.]+)/$',
case_property_changes, name="case_property_changes"),
# Download and view form data
url(r'^form_data/(?P<instance_id>[\w\-:]+)/$', FormDataView.as_view(), name=FormDataView.urlname),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/download/$', download_form, name='download_form'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/edit/$', EditFormInstance.as_view(), name='edit_form_instance'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/restore_version/$', restore_edit, name='restore_edit'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/correct_data/$', edit_form, name='edit_form'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/archive/$', archive_form, name='archive_form'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/unarchive/$', unarchive_form, name='unarchive_form'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/rebuild/$', resave_form_view, name='resave_form'),
# project health ajax
url(r'^project_health/ajax/(?P<user_id>[\w\-]+)/$', project_health_user_details,
name='project_health_user_details'),
# Full Excel export
url(r'^full_excel_export/(?P<export_hash>[\w\-]+)/(?P<format>[\w\-]+)$', export_report, name="export_report"),
# once off email
url(r"^email_onceoff/(?P<report_slug>[\w_]+)/$", email_report, kwargs=dict(once=True), name='email_report'),
url(r"^custom/email_onceoff/(?P<report_slug>[\w_]+)/$", email_report,
kwargs=dict(report_type=CustomProjectReportDispatcher.prefix, once=True), name='email_onceoff'),
# Saved reports
url(r"^configs$", AddSavedReportConfigView.as_view(), name=AddSavedReportConfigView.name),
url(r"^configs/(?P<config_id>[\w-]+)$", delete_config,
name='delete_report_config'),
| delete_scheduled_report, name='delete_scheduled_report'),
url(r'^send_test_scheduled_report/(?P<scheduled_report_id>[\w-]+)/$',
send_test_scheduled_report, name='send_test_scheduled_report'),
url(r'^view_scheduled_report/(?P<scheduled_report_id>[\w_]+)/$',
view_scheduled_report, name='view_scheduled_report'),
# V2 Reports
url(r'^v2/', include('corehq.apps.reports.v2.urls')),
# Internal Use
url(r'^reprocess_error_form/$', ReprocessXFormErrorView.as_view(),
name=ReprocessXFormErrorView.urlname),
url(r'^custom/', include(custom_report_urls)),
url(r'^filters/', include(filter_urls)),
ProjectReportDispatcher.url_pattern(),
]
for module in get_installed_custom_modules():
module_name = module.__name__.split('.')[-1]
try:
custom_report_urls += [
url(r"^%s/" % module_name, include('{0}.urls'.format(module.__name__))),
]
except ImproperlyConfigured:
logging.info("Module %s does not provide urls" % module_name) | # Scheduled reports
url(r'^scheduled_reports/(?P<scheduled_report_id>[\w-]+)?$',
ScheduledReportsView.as_view(), name=ScheduledReportsView.urlname),
url(r'^scheduled_report/(?P<scheduled_report_id>[\w-]+)/delete$', |
chain_balance.rs | // Copyright 2020, The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use crate::{
blocks::BlockHeader,
chain_storage::{BlockchainBackend, BlockchainDatabase},
consensus::ConsensusManager,
transactions::{
tari_amount::MicroTari,
types::{Commitment, CryptoFactories, HashOutput, PrivateKey},
},
validation::{FinalHeaderStateValidation, ValidationError},
};
use log::*;
use tari_crypto::{commitment::HomomorphicCommitmentFactory, tari_utilities::hash::Hashable};
const LOG_TARGET: &str = "c::bn::states::horizon_state_sync::chain_balance";
/// Validate that the chain balances at a given height.
pub struct ChainBalanceValidator<B> {
rules: ConsensusManager,
db: BlockchainDatabase<B>,
factories: CryptoFactories,
}
impl<B: BlockchainBackend> ChainBalanceValidator<B> {
pub fn new(db: BlockchainDatabase<B>, rules: ConsensusManager, factories: CryptoFactories) -> Self {
Self { db, rules, factories }
}
}
impl<B: BlockchainBackend> FinalHeaderStateValidation for ChainBalanceValidator<B> {
fn validate(&self, horizon_header: &BlockHeader) -> Result<(), ValidationError> {
let hash = horizon_header.hash();
let emission_h = self.get_emission_commitment_at(horizon_header.height);
let kernel_excess = self.db.fetch_kernel_commitment_sum(&hash)?;
let output = self.db.fetch_utxo_commitment_sum(&hash)?;
let total_offset = self.fetch_total_offset_commitment(hash)?;
let input = &(&emission_h + &kernel_excess) + &total_offset;
if output != input {
return Err(ValidationError::ChainBalanceValidationFailed(horizon_header.height));
}
Ok(())
}
}
impl<B: BlockchainBackend> ChainBalanceValidator<B> {
fn fetch_total_offset_commitment(&self, hash: HashOutput) -> Result<Commitment, ValidationError> {
let offset = self
.db
.fetch_header_accumulated_data(hash)?
.ok_or_else(|| ValidationError::CustomError("Could not find header accumulated data".to_string()))?
.total_kernel_offset;
Ok(self.factories.commitment.commit(&offset, &0u64.into()))
}
fn get_emission_commitment_at(&self, height: u64) -> Commitment {
let total_supply =
self.rules.get_total_emission_at(height) + self.rules.consensus_constants(height).faucet_value();
trace!(
target: LOG_TARGET,
"Expected emission at height {} is {}",
height,
total_supply
);
self.commit_value(total_supply)
}
#[inline]
fn commit_value(&self, v: MicroTari) -> Commitment |
}
| {
self.factories.commitment.commit_value(&PrivateKey::default(), v.into())
} |
dns.go | /*
Copyright AppsCode Inc. and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by lister-gen. DO NOT EDIT.
package v1alpha1
import (
v1alpha1 "kubeform.dev/provider-alicloud-api/apis/dns/v1alpha1"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/client-go/tools/cache"
)
// DnsLister helps list Dnses.
// All objects returned here must be treated as read-only.
type DnsLister interface {
// List lists all Dnses in the indexer.
// Objects returned here must be treated as read-only.
List(selector labels.Selector) (ret []*v1alpha1.Dns, err error)
// Dnses returns an object that can list and get Dnses.
Dnses(namespace string) DnsNamespaceLister
DnsListerExpansion
}
// dnsLister implements the DnsLister interface.
type dnsLister struct {
indexer cache.Indexer
}
// NewDnsLister returns a new DnsLister.
func NewDnsLister(indexer cache.Indexer) DnsLister {
return &dnsLister{indexer: indexer}
}
// List lists all Dnses in the indexer.
func (s *dnsLister) List(selector labels.Selector) (ret []*v1alpha1.Dns, err error) {
err = cache.ListAll(s.indexer, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha1.Dns))
})
return ret, err
}
// Dnses returns an object that can list and get Dnses.
func (s *dnsLister) Dnses(namespace string) DnsNamespaceLister {
return dnsNamespaceLister{indexer: s.indexer, namespace: namespace}
}
// DnsNamespaceLister helps list and get Dnses.
// All objects returned here must be treated as read-only.
type DnsNamespaceLister interface {
// List lists all Dnses in the indexer for a given namespace.
// Objects returned here must be treated as read-only.
List(selector labels.Selector) (ret []*v1alpha1.Dns, err error)
// Get retrieves the Dns from the indexer for a given namespace and name.
// Objects returned here must be treated as read-only.
Get(name string) (*v1alpha1.Dns, error)
DnsNamespaceListerExpansion
}
// dnsNamespaceLister implements the DnsNamespaceLister
// interface.
type dnsNamespaceLister struct {
indexer cache.Indexer
namespace string
}
// List lists all Dnses in the indexer for a given namespace.
func (s dnsNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.Dns, err error) {
err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha1.Dns))
})
return ret, err
}
// Get retrieves the Dns from the indexer for a given namespace and name.
func (s dnsNamespaceLister) Get(name string) (*v1alpha1.Dns, error) {
obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name)
if err != nil {
return nil, err
}
if !exists |
return obj.(*v1alpha1.Dns), nil
}
| {
return nil, errors.NewNotFound(v1alpha1.Resource("dns"), name)
} |
webhook_test.go | /*
Copyright 2018 Bitnami
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package webhook
import (
"fmt"
"reflect"
"testing"
"github.com/tghaas/kubewatch/config"
)
func TestWebhookInit(t *testing.T) | {
s := &Webhook{}
expectedError := fmt.Errorf(webhookErrMsg, "Missing Webhook url")
var Tests = []struct {
webhook config.Webhook
err error
}{
{config.Webhook{Url: "foo"}, nil},
{config.Webhook{}, expectedError},
}
for _, tt := range Tests {
c := &config.Config{}
c.Handler.Webhook = tt.webhook
if err := s.Init(c); !reflect.DeepEqual(err, tt.err) {
t.Fatalf("Init(): %v", err)
}
}
} |
|
legacy_registry.go | /*
Copyright 2019 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package plugins
import (
"encoding/json"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/sets"
utilfeature "k8s.io/apiserver/pkg/util/feature"
"k8s.io/klog"
"k8s.io/kubernetes/pkg/features"
"k8s.io/kubernetes/pkg/scheduler/apis/config"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/defaultpodtopologyspread"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/imagelocality"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/interpodaffinity"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/nodeaffinity"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/nodelabel"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/nodename"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/nodeports"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/nodepreferavoidpods"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/noderesources"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/nodeunschedulable"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/nodevolumelimits"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/podtopologyspread"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/serviceaffinity"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/tainttoleration"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/volumebinding"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/volumerestrictions"
"k8s.io/kubernetes/pkg/scheduler/framework/plugins/volumezone"
)
const (
// EqualPriority defines the name of prioritizer function that gives an equal weight of one to all nodes.
EqualPriority = "EqualPriority"
// MostRequestedPriority defines the name of prioritizer function that gives used nodes higher priority.
MostRequestedPriority = "MostRequestedPriority"
// RequestedToCapacityRatioPriority defines the name of RequestedToCapacityRatioPriority.
RequestedToCapacityRatioPriority = "RequestedToCapacityRatioPriority"
// SelectorSpreadPriority defines the name of prioritizer function that spreads pods by minimizing
// the number of pods (belonging to the same service or replication controller) on the same node.
SelectorSpreadPriority = "SelectorSpreadPriority"
// ServiceSpreadingPriority is largely replaced by "SelectorSpreadPriority".
ServiceSpreadingPriority = "ServiceSpreadingPriority"
// InterPodAffinityPriority defines the name of prioritizer function that decides which pods should or
// should not be placed in the same topological domain as some other pods.
InterPodAffinityPriority = "InterPodAffinityPriority"
// LeastRequestedPriority defines the name of prioritizer function that prioritize nodes by least
// requested utilization.
LeastRequestedPriority = "LeastRequestedPriority"
// BalancedResourceAllocation defines the name of prioritizer function that prioritizes nodes
// to help achieve balanced resource usage.
BalancedResourceAllocation = "BalancedResourceAllocation"
// NodePreferAvoidPodsPriority defines the name of prioritizer function that priorities nodes according to
// the node annotation "scheduler.alpha.kubernetes.io/preferAvoidPods".
NodePreferAvoidPodsPriority = "NodePreferAvoidPodsPriority"
// NodeAffinityPriority defines the name of prioritizer function that prioritizes nodes which have labels
// matching NodeAffinity.
NodeAffinityPriority = "NodeAffinityPriority"
// TaintTolerationPriority defines the name of prioritizer function that prioritizes nodes that marked
// with taint which pod can tolerate.
TaintTolerationPriority = "TaintTolerationPriority"
// ImageLocalityPriority defines the name of prioritizer function that prioritizes nodes that have images
// requested by the pod present.
ImageLocalityPriority = "ImageLocalityPriority"
// ResourceLimitsPriority defines the nodes of prioritizer function ResourceLimitsPriority.
ResourceLimitsPriority = "ResourceLimitsPriority"
// EvenPodsSpreadPriority defines the name of prioritizer function that prioritizes nodes
// which have pods and labels matching the incoming pod's topologySpreadConstraints.
EvenPodsSpreadPriority = "EvenPodsSpreadPriority"
)
const (
// MatchInterPodAffinityPred defines the name of predicate MatchInterPodAffinity.
MatchInterPodAffinityPred = "MatchInterPodAffinity"
// CheckVolumeBindingPred defines the name of predicate CheckVolumeBinding.
CheckVolumeBindingPred = "CheckVolumeBinding"
// GeneralPred defines the name of predicate GeneralPredicates.
GeneralPred = "GeneralPredicates"
// HostNamePred defines the name of predicate HostName.
HostNamePred = "HostName"
// PodFitsHostPortsPred defines the name of predicate PodFitsHostPorts.
PodFitsHostPortsPred = "PodFitsHostPorts"
// MatchNodeSelectorPred defines the name of predicate MatchNodeSelector.
MatchNodeSelectorPred = "MatchNodeSelector"
// PodFitsResourcesPred defines the name of predicate PodFitsResources.
PodFitsResourcesPred = "PodFitsResources"
// NoDiskConflictPred defines the name of predicate NoDiskConflict.
NoDiskConflictPred = "NoDiskConflict"
// PodToleratesNodeTaintsPred defines the name of predicate PodToleratesNodeTaints.
PodToleratesNodeTaintsPred = "PodToleratesNodeTaints"
// CheckNodeUnschedulablePred defines the name of predicate CheckNodeUnschedulablePredicate.
CheckNodeUnschedulablePred = "CheckNodeUnschedulable"
// CheckNodeLabelPresencePred defines the name of predicate CheckNodeLabelPresence.
CheckNodeLabelPresencePred = "CheckNodeLabelPresence"
// CheckServiceAffinityPred defines the name of predicate checkServiceAffinity.
CheckServiceAffinityPred = "CheckServiceAffinity"
// MaxEBSVolumeCountPred defines the name of predicate MaxEBSVolumeCount.
// DEPRECATED
// All cloudprovider specific predicates are deprecated in favour of MaxCSIVolumeCountPred.
MaxEBSVolumeCountPred = "MaxEBSVolumeCount"
// MaxGCEPDVolumeCountPred defines the name of predicate MaxGCEPDVolumeCount.
// DEPRECATED
// All cloudprovider specific predicates are deprecated in favour of MaxCSIVolumeCountPred.
MaxGCEPDVolumeCountPred = "MaxGCEPDVolumeCount"
// MaxAzureDiskVolumeCountPred defines the name of predicate MaxAzureDiskVolumeCount.
// DEPRECATED
// All cloudprovider specific predicates are deprecated in favour of MaxCSIVolumeCountPred.
MaxAzureDiskVolumeCountPred = "MaxAzureDiskVolumeCount"
// MaxCinderVolumeCountPred defines the name of predicate MaxCinderDiskVolumeCount.
// DEPRECATED
// All cloudprovider specific predicates are deprecated in favour of MaxCSIVolumeCountPred.
MaxCinderVolumeCountPred = "MaxCinderVolumeCount"
// MaxCSIVolumeCountPred defines the predicate that decides how many CSI volumes should be attached.
MaxCSIVolumeCountPred = "MaxCSIVolumeCountPred"
// NoVolumeZoneConflictPred defines the name of predicate NoVolumeZoneConflict.
NoVolumeZoneConflictPred = "NoVolumeZoneConflict"
// EvenPodsSpreadPred defines the name of predicate EvenPodsSpread.
EvenPodsSpreadPred = "EvenPodsSpread"
)
// PredicateOrdering returns the ordering of predicate execution.
func PredicateOrdering() []string {
return []string{CheckNodeUnschedulablePred,
GeneralPred, HostNamePred, PodFitsHostPortsPred,
MatchNodeSelectorPred, PodFitsResourcesPred, NoDiskConflictPred,
PodToleratesNodeTaintsPred, CheckNodeLabelPresencePred,
CheckServiceAffinityPred, MaxEBSVolumeCountPred, MaxGCEPDVolumeCountPred, MaxCSIVolumeCountPred,
MaxAzureDiskVolumeCountPred, MaxCinderVolumeCountPred, CheckVolumeBindingPred, NoVolumeZoneConflictPred,
EvenPodsSpreadPred, MatchInterPodAffinityPred}
}
// LegacyRegistry is used to store current state of registered predicates and priorities.
type LegacyRegistry struct {
// maps that associate predicates/priorities with framework plugin configurations.
PredicateToConfigProducer map[string]ConfigProducer
PriorityToConfigProducer map[string]ConfigProducer
// predicates that will always be configured.
MandatoryPredicates sets.String
// predicates and priorities that will be used if either was set to nil in a
// given v1.Policy configuration.
DefaultPredicates sets.String
DefaultPriorities map[string]int64
}
// ConfigProducerArgs contains arguments that are passed to the producer.
// As we add more predicates/priorities to framework plugins mappings, more arguments
// may be added here.
type ConfigProducerArgs struct {
// Weight used for priority functions.
Weight int32
// NodeLabelArgs is the args for the NodeLabel plugin.
NodeLabelArgs *nodelabel.Args
// RequestedToCapacityRatioArgs is the args for the RequestedToCapacityRatio plugin.
RequestedToCapacityRatioArgs *noderesources.RequestedToCapacityRatioArgs
// ServiceAffinityArgs is the args for the ServiceAffinity plugin.
ServiceAffinityArgs *serviceaffinity.Args
// NodeResourcesFitArgs is the args for the NodeResources fit filter.
NodeResourcesFitArgs *noderesources.FitArgs
// InterPodAffinityArgs is the args for InterPodAffinity plugin
InterPodAffinityArgs *interpodaffinity.Args
}
// ConfigProducer returns the set of plugins and their configuration for a
// predicate/priority given the args.
type ConfigProducer func(args ConfigProducerArgs) (config.Plugins, []config.PluginConfig)
// NewLegacyRegistry returns a legacy algorithm registry of predicates and priorities.
func NewLegacyRegistry() *LegacyRegistry {
registry := &LegacyRegistry{
// MandatoryPredicates the set of keys for predicates that the scheduler will
// be configured with all the time.
MandatoryPredicates: sets.NewString(
PodToleratesNodeTaintsPred,
CheckNodeUnschedulablePred,
),
// Used as the default set of predicates if Policy was specified, but predicates was nil.
DefaultPredicates: sets.NewString(
NoVolumeZoneConflictPred,
MaxEBSVolumeCountPred,
MaxGCEPDVolumeCountPred,
MaxAzureDiskVolumeCountPred,
MaxCSIVolumeCountPred,
MatchInterPodAffinityPred,
NoDiskConflictPred,
GeneralPred,
PodToleratesNodeTaintsPred,
CheckVolumeBindingPred,
CheckNodeUnschedulablePred,
),
// Used as the default set of predicates if Policy was specified, but priorities was nil.
DefaultPriorities: map[string]int64{
SelectorSpreadPriority: 1,
InterPodAffinityPriority: 1,
LeastRequestedPriority: 1,
BalancedResourceAllocation: 1,
NodePreferAvoidPodsPriority: 10000,
NodeAffinityPriority: 1,
TaintTolerationPriority: 1,
ImageLocalityPriority: 1,
},
PredicateToConfigProducer: make(map[string]ConfigProducer),
PriorityToConfigProducer: make(map[string]ConfigProducer),
}
registry.registerPredicateConfigProducer(GeneralPred,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
// GeneralPredicate is a combination of predicates.
plugins.Filter = appendToPluginSet(plugins.Filter, noderesources.FitName, nil)
plugins.PreFilter = appendToPluginSet(plugins.PreFilter, noderesources.FitName, nil)
if args.NodeResourcesFitArgs != nil {
pluginConfig = append(pluginConfig, NewPluginConfig(noderesources.FitName, args.NodeResourcesFitArgs))
}
plugins.Filter = appendToPluginSet(plugins.Filter, nodename.Name, nil)
plugins.Filter = appendToPluginSet(plugins.Filter, nodeports.Name, nil)
plugins.PreFilter = appendToPluginSet(plugins.PreFilter, nodeports.Name, nil)
plugins.Filter = appendToPluginSet(plugins.Filter, nodeaffinity.Name, nil)
return
})
registry.registerPredicateConfigProducer(PodToleratesNodeTaintsPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, tainttoleration.Name, nil)
return
})
registry.registerPredicateConfigProducer(PodFitsResourcesPred,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, noderesources.FitName, nil)
plugins.PreFilter = appendToPluginSet(plugins.PreFilter, noderesources.FitName, nil)
if args.NodeResourcesFitArgs != nil {
pluginConfig = append(pluginConfig, NewPluginConfig(noderesources.FitName, args.NodeResourcesFitArgs))
}
return
})
registry.registerPredicateConfigProducer(HostNamePred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, nodename.Name, nil)
return
})
registry.registerPredicateConfigProducer(PodFitsHostPortsPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, nodeports.Name, nil)
plugins.PreFilter = appendToPluginSet(plugins.PreFilter, nodeports.Name, nil)
return
})
registry.registerPredicateConfigProducer(MatchNodeSelectorPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, nodeaffinity.Name, nil)
return
})
registry.registerPredicateConfigProducer(CheckNodeUnschedulablePred,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, nodeunschedulable.Name, nil)
return
})
registry.registerPredicateConfigProducer(CheckVolumeBindingPred,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, volumebinding.Name, nil)
return
})
registry.registerPredicateConfigProducer(NoDiskConflictPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, volumerestrictions.Name, nil)
return
})
registry.registerPredicateConfigProducer(NoVolumeZoneConflictPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, volumezone.Name, nil)
return
})
registry.registerPredicateConfigProducer(MaxCSIVolumeCountPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, nodevolumelimits.CSIName, nil)
return
})
registry.registerPredicateConfigProducer(MaxEBSVolumeCountPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, nodevolumelimits.EBSName, nil)
return
})
registry.registerPredicateConfigProducer(MaxGCEPDVolumeCountPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, nodevolumelimits.GCEPDName, nil)
return
})
registry.registerPredicateConfigProducer(MaxAzureDiskVolumeCountPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, nodevolumelimits.AzureDiskName, nil)
return
})
registry.registerPredicateConfigProducer(MaxCinderVolumeCountPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, nodevolumelimits.CinderName, nil)
return
})
registry.registerPredicateConfigProducer(MatchInterPodAffinityPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, interpodaffinity.Name, nil)
plugins.PreFilter = appendToPluginSet(plugins.PreFilter, interpodaffinity.Name, nil)
return
})
registry.registerPredicateConfigProducer(CheckNodeLabelPresencePred,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, nodelabel.Name, nil)
if args.NodeLabelArgs != nil {
pluginConfig = append(pluginConfig, NewPluginConfig(nodelabel.Name, args.NodeLabelArgs))
}
return
})
registry.registerPredicateConfigProducer(CheckServiceAffinityPred,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Filter = appendToPluginSet(plugins.Filter, serviceaffinity.Name, nil)
if args.ServiceAffinityArgs != nil {
pluginConfig = append(pluginConfig, NewPluginConfig(serviceaffinity.Name, args.ServiceAffinityArgs))
}
plugins.PreFilter = appendToPluginSet(plugins.PreFilter, serviceaffinity.Name, nil)
return
})
// Register Priorities.
registry.registerPriorityConfigProducer(SelectorSpreadPriority,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Score = appendToPluginSet(plugins.Score, defaultpodtopologyspread.Name, &args.Weight)
plugins.PreScore = appendToPluginSet(plugins.PreScore, defaultpodtopologyspread.Name, nil)
return
})
registry.registerPriorityConfigProducer(TaintTolerationPriority,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.PreScore = appendToPluginSet(plugins.PreScore, tainttoleration.Name, nil)
plugins.Score = appendToPluginSet(plugins.Score, tainttoleration.Name, &args.Weight)
return
})
registry.registerPriorityConfigProducer(NodeAffinityPriority,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Score = appendToPluginSet(plugins.Score, nodeaffinity.Name, &args.Weight)
return
})
registry.registerPriorityConfigProducer(ImageLocalityPriority,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Score = appendToPluginSet(plugins.Score, imagelocality.Name, &args.Weight)
return
})
registry.registerPriorityConfigProducer(InterPodAffinityPriority,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.PreScore = appendToPluginSet(plugins.PreScore, interpodaffinity.Name, nil)
plugins.Score = appendToPluginSet(plugins.Score, interpodaffinity.Name, &args.Weight)
if args.InterPodAffinityArgs != nil {
pluginConfig = append(pluginConfig, NewPluginConfig(interpodaffinity.Name, args.InterPodAffinityArgs))
}
return
})
registry.registerPriorityConfigProducer(NodePreferAvoidPodsPriority,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Score = appendToPluginSet(plugins.Score, nodepreferavoidpods.Name, &args.Weight)
return
})
registry.registerPriorityConfigProducer(MostRequestedPriority,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Score = appendToPluginSet(plugins.Score, noderesources.MostAllocatedName, &args.Weight)
return
})
registry.registerPriorityConfigProducer(BalancedResourceAllocation,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Score = appendToPluginSet(plugins.Score, noderesources.BalancedAllocationName, &args.Weight)
return
})
registry.registerPriorityConfigProducer(LeastRequestedPriority,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Score = appendToPluginSet(plugins.Score, noderesources.LeastAllocatedName, &args.Weight)
return
})
registry.registerPriorityConfigProducer(noderesources.RequestedToCapacityRatioName,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.Score = appendToPluginSet(plugins.Score, noderesources.RequestedToCapacityRatioName, &args.Weight)
if args.RequestedToCapacityRatioArgs != nil {
pluginConfig = append(pluginConfig, NewPluginConfig(noderesources.RequestedToCapacityRatioName, args.RequestedToCapacityRatioArgs))
}
return
})
registry.registerPriorityConfigProducer(nodelabel.Name,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
// If there are n LabelPreference priorities in the policy, the weight for the corresponding
// score plugin is n*weight (note that the validation logic verifies that all LabelPreference
// priorities specified in Policy have the same weight).
weight := args.Weight * int32(len(args.NodeLabelArgs.PresentLabelsPreference)+len(args.NodeLabelArgs.AbsentLabelsPreference))
plugins.Score = appendToPluginSet(plugins.Score, nodelabel.Name, &weight)
if args.NodeLabelArgs != nil {
pluginConfig = append(pluginConfig, NewPluginConfig(nodelabel.Name, args.NodeLabelArgs))
}
return
})
registry.registerPriorityConfigProducer(serviceaffinity.Name,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
// If there are n ServiceAffinity priorities in the policy, the weight for the corresponding
// score plugin is n*weight (note that the validation logic verifies that all ServiceAffinity
// priorities specified in Policy have the same weight).
weight := args.Weight * int32(len(args.ServiceAffinityArgs.AntiAffinityLabelsPreference))
plugins.Score = appendToPluginSet(plugins.Score, serviceaffinity.Name, &weight)
if args.ServiceAffinityArgs != nil {
pluginConfig = append(pluginConfig, NewPluginConfig(serviceaffinity.Name, args.ServiceAffinityArgs))
}
return
})
// The following two features are the last ones to be supported as predicate/priority.
// Once they graduate to GA, there will be no more checking for feature gates here.
// Only register EvenPodsSpread predicate & priority if the feature is enabled
if utilfeature.DefaultFeatureGate.Enabled(features.EvenPodsSpread) {
klog.Infof("Registering EvenPodsSpread predicate and priority function")
registry.registerPredicateConfigProducer(EvenPodsSpreadPred,
func(_ ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.PreFilter = appendToPluginSet(plugins.PreFilter, podtopologyspread.Name, nil)
plugins.Filter = appendToPluginSet(plugins.Filter, podtopologyspread.Name, nil)
return
})
registry.DefaultPredicates.Insert(EvenPodsSpreadPred)
registry.registerPriorityConfigProducer(EvenPodsSpreadPriority,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.PreScore = appendToPluginSet(plugins.PreScore, podtopologyspread.Name, nil)
plugins.Score = appendToPluginSet(plugins.Score, podtopologyspread.Name, &args.Weight)
return
})
registry.DefaultPriorities[EvenPodsSpreadPriority] = 1
}
// Prioritizes nodes that satisfy pod's resource limits
if utilfeature.DefaultFeatureGate.Enabled(features.ResourceLimitsPriorityFunction) {
klog.Infof("Registering resourcelimits priority function")
registry.registerPriorityConfigProducer(ResourceLimitsPriority,
func(args ConfigProducerArgs) (plugins config.Plugins, pluginConfig []config.PluginConfig) {
plugins.PreScore = appendToPluginSet(plugins.PreScore, noderesources.ResourceLimitsName, nil)
plugins.Score = appendToPluginSet(plugins.Score, noderesources.ResourceLimitsName, &args.Weight)
return
})
registry.DefaultPriorities[ResourceLimitsPriority] = 1
}
return registry
}
// registers a config producer for a predicate.
func (lr *LegacyRegistry) registerPredicateConfigProducer(name string, producer ConfigProducer) {
if _, exist := lr.PredicateToConfigProducer[name]; exist {
klog.Fatalf("already registered %q", name)
}
lr.PredicateToConfigProducer[name] = producer
}
// registers a framework config producer for a priority.
func (lr *LegacyRegistry) registerPriorityConfigProducer(name string, producer ConfigProducer) {
if _, exist := lr.PriorityToConfigProducer[name]; exist {
klog.Fatalf("already registered %q", name)
}
lr.PriorityToConfigProducer[name] = producer
}
func appendToPluginSet(set *config.PluginSet, name string, weight *int32) *config.PluginSet {
if set == nil {
set = &config.PluginSet{}
}
cfg := config.Plugin{Name: name}
if weight != nil {
cfg.Weight = *weight
}
set.Enabled = append(set.Enabled, cfg)
return set
}
// NewPluginConfig builds a PluginConfig with the struct of args marshaled.
// It panics if it fails to marshal.
func NewPluginConfig(pluginName string, args interface{}) config.PluginConfig {
encoding, err := json.Marshal(args)
if err != nil {
klog.Fatalf("failed to marshal %+v: %v", args, err)
}
return config.PluginConfig{
Name: pluginName,
Args: runtime.Unknown{Raw: encoding},
}
}
// ProcessPredicatePolicy given a PredicatePolicy, return the plugin name implementing the predicate and update
// the ConfigProducerArgs if necessary.
func (lr *LegacyRegistry) ProcessPredicatePolicy(policy config.PredicatePolicy, pluginArgs *ConfigProducerArgs) string {
validatePredicateOrDie(policy)
predicateName := policy.Name
if policy.Name == "PodFitsPorts" {
// For compatibility reasons, "PodFitsPorts" as a key is still supported.
predicateName = PodFitsHostPortsPred
}
if _, ok := lr.PredicateToConfigProducer[predicateName]; ok {
// checking to see if a pre-defined predicate is requested
klog.V(2).Infof("Predicate type %s already registered, reusing.", policy.Name)
return predicateName
}
if policy.Argument == nil || (policy.Argument.ServiceAffinity == nil &&
policy.Argument.LabelsPresence == nil) {
klog.Fatalf("Invalid configuration: Predicate type not found for %q", policy.Name)
}
// generate the predicate function, if a custom type is requested
if policy.Argument.ServiceAffinity != nil {
// map LabelsPresence policy to ConfigProducerArgs that's used to configure the ServiceAffinity plugin.
if pluginArgs.ServiceAffinityArgs == nil {
pluginArgs.ServiceAffinityArgs = &serviceaffinity.Args{}
}
pluginArgs.ServiceAffinityArgs.AffinityLabels = append(pluginArgs.ServiceAffinityArgs.AffinityLabels, policy.Argument.ServiceAffinity.Labels...)
// We use the ServiceAffinity predicate name for all ServiceAffinity custom predicates.
// It may get called multiple times but we essentially only register one instance of ServiceAffinity predicate.
// This name is then used to find the registered plugin and run the plugin instead of the predicate.
predicateName = CheckServiceAffinityPred
}
if policy.Argument.LabelsPresence != nil {
// Map LabelPresence policy to ConfigProducerArgs that's used to configure the NodeLabel plugin.
if pluginArgs.NodeLabelArgs == nil {
pluginArgs.NodeLabelArgs = &nodelabel.Args{}
}
if policy.Argument.LabelsPresence.Presence {
pluginArgs.NodeLabelArgs.PresentLabels = append(pluginArgs.NodeLabelArgs.PresentLabels, policy.Argument.LabelsPresence.Labels...)
} else {
pluginArgs.NodeLabelArgs.AbsentLabels = append(pluginArgs.NodeLabelArgs.AbsentLabels, policy.Argument.LabelsPresence.Labels...)
}
// We use the CheckNodeLabelPresencePred predicate name for all kNodeLabel custom predicates.
// It may get called multiple times but we essentially only register one instance of NodeLabel predicate.
// This name is then used to find the registered plugin and run the plugin instead of the predicate.
predicateName = CheckNodeLabelPresencePred
}
return predicateName
}
// ProcessPriorityPolicy given a PriorityPolicy, return the plugin name implementing the priority and update
// the ConfigProducerArgs if necessary.
func (lr *LegacyRegistry) ProcessPriorityPolicy(policy config.PriorityPolicy, configProducerArgs *ConfigProducerArgs) string {
validatePriorityOrDie(policy)
priorityName := policy.Name
if policy.Name == ServiceSpreadingPriority {
// For compatibility reasons, "ServiceSpreadingPriority" as a key is still supported.
priorityName = SelectorSpreadPriority
}
if _, ok := lr.PriorityToConfigProducer[priorityName]; ok {
klog.V(2).Infof("Priority type %s already registered, reusing.", priorityName)
return priorityName
}
// generate the priority function, if a custom priority is requested
if policy.Argument == nil ||
(policy.Argument.ServiceAntiAffinity == nil &&
policy.Argument.RequestedToCapacityRatioArguments == nil &&
policy.Argument.LabelPreference == nil) {
klog.Fatalf("Invalid configuration: Priority type not found for %q", priorityName)
}
if policy.Argument.ServiceAntiAffinity != nil |
if policy.Argument.LabelPreference != nil {
// We use the NodeLabel plugin name for all NodeLabel custom priorities.
// It may get called multiple times but we essentially only register one instance of NodeLabel priority.
// This name is then used to find the registered plugin and run the plugin instead of the priority.
priorityName = nodelabel.Name
if configProducerArgs.NodeLabelArgs == nil {
configProducerArgs.NodeLabelArgs = &nodelabel.Args{}
}
if policy.Argument.LabelPreference.Presence {
configProducerArgs.NodeLabelArgs.PresentLabelsPreference = append(
configProducerArgs.NodeLabelArgs.PresentLabelsPreference,
policy.Argument.LabelPreference.Label,
)
} else {
configProducerArgs.NodeLabelArgs.AbsentLabelsPreference = append(
configProducerArgs.NodeLabelArgs.AbsentLabelsPreference,
policy.Argument.LabelPreference.Label,
)
}
}
if policy.Argument.RequestedToCapacityRatioArguments != nil {
configProducerArgs.RequestedToCapacityRatioArgs = &noderesources.RequestedToCapacityRatioArgs{
RequestedToCapacityRatioArguments: *policy.Argument.RequestedToCapacityRatioArguments,
}
// We do not allow specifying the name for custom plugins, see #83472
priorityName = noderesources.RequestedToCapacityRatioName
}
return priorityName
}
func validatePredicateOrDie(predicate config.PredicatePolicy) {
if predicate.Argument != nil {
numArgs := 0
if predicate.Argument.ServiceAffinity != nil {
numArgs++
}
if predicate.Argument.LabelsPresence != nil {
numArgs++
}
if numArgs != 1 {
klog.Fatalf("Exactly 1 predicate argument is required, numArgs: %v, Predicate: %s", numArgs, predicate.Name)
}
}
}
func validatePriorityOrDie(priority config.PriorityPolicy) {
if priority.Argument != nil {
numArgs := 0
if priority.Argument.ServiceAntiAffinity != nil {
numArgs++
}
if priority.Argument.LabelPreference != nil {
numArgs++
}
if priority.Argument.RequestedToCapacityRatioArguments != nil {
numArgs++
}
if numArgs != 1 {
klog.Fatalf("Exactly 1 priority argument is required, numArgs: %v, Priority: %s", numArgs, priority.Name)
}
}
}
| {
// We use the ServiceAffinity plugin name for all ServiceAffinity custom priorities.
// It may get called multiple times but we essentially only register one instance of
// ServiceAffinity priority.
// This name is then used to find the registered plugin and run the plugin instead of the priority.
priorityName = serviceaffinity.Name
if configProducerArgs.ServiceAffinityArgs == nil {
configProducerArgs.ServiceAffinityArgs = &serviceaffinity.Args{}
}
configProducerArgs.ServiceAffinityArgs.AntiAffinityLabelsPreference = append(
configProducerArgs.ServiceAffinityArgs.AntiAffinityLabelsPreference,
policy.Argument.ServiceAntiAffinity.Label,
)
} |
iareadurl.py | # -*- encoding: utf-8 -*-
# Module iareadurl
def | (url):
from StringIO import StringIO
import urllib
import PIL
import adpil
file = StringIO(urllib.urlopen(url).read())
img = PIL.Image.open(file)
return adpil.pil2array(img)
| iareadurl |
boxes.py | import math
| items_box = int(input("Enter the number of items per box: "))
boxes = math.ceil(items / items_box)
print(f"For {items} items, packing {items_box} items in each box, you will need {boxes} boxes.") | items = int(input("Enter the number of items: ")) |
DefaultSearchResultTab.tsx | import * as React from 'react';
import { Spinner } from 'core/widgets';
import { Tooltip } from 'core/presentation';
import { SearchService } from '../search.service';
import { SearchStatus } from './SearchResults';
import { ISearchResultTabProps } from './searchResultType';
export class DefaultSearchResultTab extends React.Component<ISearchResultTabProps<any>> {
public render() {
const { isActive, resultSet } = this.props;
const { type, results, status, error } = resultSet;
const iconClass = type.iconClass;
const resultsCount = results.length;
const countLabel = resultsCount < SearchService.DEFAULT_PAGE_SIZE ? `${resultsCount}` : `${resultsCount}+`;
const Badge = () => {
switch (status) {
case SearchStatus.SEARCHING:
return <Spinner size="small" />;
case SearchStatus.ERROR:
return ( | <i className="fa fa-exclamation-triangle" />
</Tooltip>
);
default:
if (results.length) {
return <div className="badge">{countLabel}</div>;
}
return <div className="badge faded">{countLabel}</div>;
}
};
const focusOrBlurClass = isActive ? 'search-group--focus' : 'search-group--blur';
return (
<div className={`flex-container-h baseline search-group ${focusOrBlurClass}`}>
<span className={`flex-nogrow search-group-icon ${iconClass}`} />
<div className="flex-grow search-group-name">{type.displayName}</div>
<div className="flex-nogrow">
<Badge />
</div>
</div>
);
}
} | <Tooltip value={error && error.toString()}> |
users.py | from flask import Blueprint, request, session, render_template
from models.user import requires_login
user_blueprint = Blueprint('users', __name__)
@user_blueprint.route('/login')
def login_user():
|
@user_blueprint.route('/register')
def register_user():
is_logged_in = False if not session.get('email') else True
return render_template("users/register.html", is_logged_in=is_logged_in)
@user_blueprint.route('/profile', methods=['GET', 'POST'])
@requires_login
def profile():
is_logged_in = False if not session.get('email') else True
if request.method == 'POST':
uname = request.form['uname']
api_key = request.form['key']
return render_template("users/profile.html", uname=uname, api_key=api_key, is_logged_in=is_logged_in)
return render_template("users/login.html", is_logged_in=is_logged_in)
@user_blueprint.route('/logout')
@requires_login
def logout():
session.pop('email')
return render_template("home.html", is_logged_in=False)
| is_logged_in = False if not session.get('email') else True
return render_template("users/login.html", is_logged_in=is_logged_in) |
wait_for_k8s_services.go | package misc
import (
"path"
bashscript "github.com/platform9/nodelet/nodelet/pkg/phases/bash_script_based_phases"
"github.com/platform9/nodelet/nodelet/pkg/utils/constants"
sunpikev1alpha1 "github.com/platform9/pf9-qbert/sunpike/apiserver/pkg/apis/sunpike/v1alpha1"
)
func NewWaitForK8sSvcPhase(baseDir string) *bashscript.Phase | {
k8sServicePhase := &bashscript.Phase{
Filename: path.Join(baseDir, "wait_for_k8s_services.sh"),
HostPhase: &sunpikev1alpha1.HostPhase{
Name: "Wait for k8s services and network to be up",
Order: int32(constants.WaitForK8sSvcPhaseOrder),
},
}
return k8sServicePhase
} |
|
progress_reset.rs | use crate::server::utils::RequestUnpack;
use crate::server::{AppData, Authentication, CommonResponse};
use anyhow::Result;
use serde::Deserialize;
use sql_client::internal::progress_reset_manager::ProgressResetManager;
use tide::{Request, Response};
pub(crate) async fn get_progress_reset_list<A>(request: Request<AppData<A>>) -> Result<Response>
where
A: Authentication + Clone + Send + Sync + 'static,
|
pub(crate) async fn add_progress_reset_item<A>(request: Request<AppData<A>>) -> Result<Response>
where
A: Authentication + Clone + Send + Sync + 'static,
{
#[derive(Deserialize)]
struct Query {
problem_id: String,
reset_epoch_second: i64,
}
let internal_user_id = request.get_authorized_id().await?;
let pool = request.state().pg_pool.clone();
let query = request.parse_body::<Query>().await?;
pool.add_item(
&internal_user_id,
&query.problem_id,
query.reset_epoch_second,
)
.await?;
Ok(Response::ok())
}
pub(crate) async fn delete_progress_reset_item<A>(request: Request<AppData<A>>) -> Result<Response>
where
A: Authentication + Clone + Send + Sync + 'static,
{
#[derive(Deserialize)]
struct Query {
problem_id: String,
}
let internal_user_id = request.get_authorized_id().await?;
let pool = request.state().pg_pool.clone();
let query = request.parse_body::<Query>().await?;
pool.remove_item(&internal_user_id, &query.problem_id)
.await?;
Ok(Response::ok())
}
| {
let user_id = request.get_authorized_id().await?;
let pool = request.state().pg_pool.clone();
let list = pool.get_progress_reset_list(&user_id).await?;
let response = Response::json(&list)?;
Ok(response)
} |
sink.expanded.rs | use futures_enum::*;
enum Enum<A, B> {
A(A),
B(B),
}
#[allow(unsafe_code)]
impl<A, B, __Item> ::futures::sink::Sink<__Item> for Enum<A, B>
where
A: ::futures::sink::Sink<__Item>,
B: ::futures::sink::Sink<__Item, Error = <A as ::futures::sink::Sink<__Item>>::Error>,
{
type Error = <A as ::futures::sink::Sink<__Item>>::Error;
#[inline]
fn poll_ready(
self: ::core::pin::Pin<&mut Self>,
cx: &mut ::core::task::Context<'_>,
) -> ::core::task::Poll<::core::result::Result<(), Self::Error>> {
unsafe {
match self.get_unchecked_mut() {
Enum::A(x) => {
::futures::sink::Sink::poll_ready(::core::pin::Pin::new_unchecked(x), cx)
}
Enum::B(x) => {
::futures::sink::Sink::poll_ready(::core::pin::Pin::new_unchecked(x), cx)
}
}
}
}
#[inline]
fn start_send(
self: ::core::pin::Pin<&mut Self>,
item: __Item,
) -> ::core::result::Result<(), Self::Error> {
unsafe {
match self.get_unchecked_mut() {
Enum::A(x) => {
::futures::sink::Sink::start_send(::core::pin::Pin::new_unchecked(x), item)
}
Enum::B(x) => {
::futures::sink::Sink::start_send(::core::pin::Pin::new_unchecked(x), item)
}
}
}
}
#[inline]
fn poll_flush(
self: ::core::pin::Pin<&mut Self>,
cx: &mut ::core::task::Context<'_>,
) -> ::core::task::Poll<::core::result::Result<(), Self::Error>> {
unsafe {
match self.get_unchecked_mut() {
Enum::A(x) => {
::futures::sink::Sink::poll_flush(::core::pin::Pin::new_unchecked(x), cx)
}
Enum::B(x) => {
::futures::sink::Sink::poll_flush(::core::pin::Pin::new_unchecked(x), cx)
}
}
}
}
#[inline]
fn poll_close(
self: ::core::pin::Pin<&mut Self>,
cx: &mut ::core::task::Context<'_>,
) -> ::core::task::Poll<::core::result::Result<(), Self::Error>> {
unsafe {
match self.get_unchecked_mut() {
Enum::A(x) => {
::futures::sink::Sink::poll_close(::core::pin::Pin::new_unchecked(x), cx)
}
Enum::B(x) => {
::futures::sink::Sink::poll_close(::core::pin::Pin::new_unchecked(x), cx)
}
}
}
}
}
fn | () {}
| main |
lib.rs | //! Rome's official formatter.
//!
//! The crate exposes some API and utilities to implement the formatting logic.
//!
//! The formatter relies on an [IR], which allows to format any kind of data structure.
//!
//! In order to implement the formatting logic, you need to implement the trait [FormatValue] for
//! the data structure you want to format.
//!
//! Let's say, for example that you have a small data structure that represents a key/value data:
//!
//! ```rust,no_test
//! struct KeyValue {
//! key: &'static str,
//! value: &'static str
//! }
//! ```
//!
//! Now, we do want to create this IR for the data structure:
//! ```rust
//! use rome_formatter::{format_elements, format_element, Formatter, ToFormatElement, FormatElement, FormatResult, FormatOptions, space_token, token };
//!
//! struct KeyValue {
//! key: &'static str,
//! value: &'static str
//! }
//!
//! impl ToFormatElement for KeyValue {
//! fn to_format_element(&self, formatter: &Formatter)-> FormatResult<FormatElement> {
//! Ok(format_elements![
//! token(self.key),
//! space_token(),
//! token("=>"),
//! space_token(),
//! token(self.value)
//! ])
//! }
//! }
//!
//! fn my_function() {
//! let key_value = KeyValue { key: "lorem", value: "ipsum" };
//! let element = key_value.to_format_element(&Formatter::default()).unwrap();
//! let result = format_element(&element, FormatOptions::default());
//! assert_eq!(result.as_code(), "lorem => ipsum");
//! }
//!
//! ```
//! [IR]: https://en.wikipedia.org/wiki/Intermediate_representation
mod cst;
mod format_element;
mod format_elements;
mod formatter;
pub mod formatter_traits;
mod intersperse;
mod js;
mod printer;
mod ts;
mod utils;
pub use formatter::Formatter;
use rome_js_syntax::{SyntaxError, SyntaxNode};
use rome_rowan::TextRange;
use rome_rowan::TextSize;
use rome_rowan::TokenAtOffset;
use std::fmt::Display;
pub use format_element::{
block_indent, comment, concat_elements, empty_element, empty_line, fill_elements,
group_elements, hard_group_elements, hard_line_break, if_group_breaks,
if_group_fits_on_single_line, indent, join_elements, join_elements_hard_line, line_suffix,
soft_block_indent, soft_line_break, soft_line_break_or_space, soft_line_indent_or_space,
space_token, token, FormatElement, Token,
};
pub use printer::Printer;
pub use printer::PrinterOptions;
use std::str::FromStr;
use thiserror::Error;
/// This trait should be implemented on each node/value that should have a formatted representation
pub trait ToFormatElement {
fn to_format_element(&self, formatter: &Formatter) -> FormatResult<FormatElement>;
}
/// Public return type of the formatter
pub type FormatResult<F> = Result<F, FormatError>;
#[derive(Debug, PartialEq, Error)]
/// Series of errors encountered during formatting
pub enum FormatError {
/// Node is missing and it should be required for a correct formatting
#[error("missing required child")]
MissingRequiredChild,
/// In case our formatter doesn't know how to format a certain language
#[error("language is not supported")]
UnsupportedLanguage,
/// When the ability to format the current file has been turned off on purpose
#[error("formatting capability is disabled")]
CapabilityDisabled,
}
impl From<SyntaxError> for FormatError {
fn from(syntax_error: SyntaxError) -> Self {
match syntax_error {
SyntaxError::MissingRequiredChild(_node) => FormatError::MissingRequiredChild,
}
}
}
impl From<&SyntaxError> for FormatError {
fn from(syntax_error: &SyntaxError) -> Self {
match syntax_error {
SyntaxError::MissingRequiredChild(_node) => FormatError::MissingRequiredChild,
}
}
}
#[derive(Debug, Eq, PartialEq, Clone, Copy)]
pub enum IndentStyle {
/// Tab
Tab,
/// Space, with its quantity
Space(u8),
}
impl Default for IndentStyle {
fn default() -> Self {
Self::Tab
}
}
impl FromStr for IndentStyle {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"tab" => Ok(Self::Tab),
"space" => Ok(Self::Space(2)),
// TODO: replace this error with a diagnostic
_ => Err("Value not supported for IndentStyle"),
}
}
}
impl Display for IndentStyle {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
IndentStyle::Tab => write!(f, "Tab"),
IndentStyle::Space(size) => write!(f, "Spaces, size: {}", size),
}
}
}
#[derive(Debug, Clone, Copy)]
pub struct FormatOptions {
/// The indent style
pub indent_style: IndentStyle,
/// What's the max width of a line. Defaults to 80
pub line_width: u16,
}
impl FormatOptions {
pub fn new(indent_style: IndentStyle) -> Self {
Self {
indent_style,
..Self::default()
}
}
}
impl Default for FormatOptions {
fn | () -> Self {
Self {
indent_style: IndentStyle::default(),
line_width: 80,
}
}
}
impl Display for FormatOptions {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "Indent style: {}", self.indent_style)?;
writeln!(f, "Line width: {}", self.line_width)?;
Ok(())
}
}
/// Lightweight sourcemap marker between source and output tokens
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct SourceMarker {
/// Position of the marker in the original source
pub source: TextSize,
/// Position of the marker in the output code
pub dest: TextSize,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Formatted {
code: String,
range: Option<TextRange>,
sourcemap: Vec<SourceMarker>,
verbatim_source: Vec<(String, TextRange)>,
}
impl Formatted {
fn new(
code: String,
range: Option<TextRange>,
sourcemap: Vec<SourceMarker>,
verbatim_source: Vec<(String, TextRange)>,
) -> Self {
Self {
code,
range,
sourcemap,
verbatim_source,
}
}
/// Construct an empty formatter result
fn new_empty() -> Self {
Self {
code: String::new(),
range: None,
sourcemap: Vec::new(),
verbatim_source: Vec::new(),
}
}
/// Range of the input source file covered by this formatted code,
/// or None if the entire file is covered in this instance
pub fn range(&self) -> Option<TextRange> {
self.range
}
/// Returns a list of [SourceMarker] mapping byte positions
/// in the output string to the input source code
pub fn sourcemap(&self) -> &[SourceMarker] {
&self.sourcemap
}
/// Access the resulting code, borrowing the result
pub fn as_code(&self) -> &str {
&self.code
}
/// Access the resulting code, consuming the result
pub fn into_code(self) -> String {
self.code
}
pub fn verbatim(&self) -> &[(String, TextRange)] {
&self.verbatim_source
}
}
/// Formats a JavaScript (and its super languages) file based on its features.
///
/// It returns a [Formatted] result, which the user can use to override a file.
pub fn format(options: FormatOptions, syntax: &SyntaxNode) -> FormatResult<Formatted> {
let element = Formatter::new(options).format_root(syntax)?;
Ok(Printer::new(options).print(&element))
}
/// Outputs formatter IR for a JavaScript (and its super languages) file
///
/// It returns a [FormatElement] result. Mostly for debugging purposes.
pub fn to_format_element(
options: FormatOptions,
syntax: &SyntaxNode,
) -> FormatResult<FormatElement> {
Formatter::new(options).format_root(syntax)
}
/// Formats a range within a file, supported by Rome
///
/// This runs a simple heuristic to determine the initial indentation
/// level of the node based on the provided [FormatOptions], which
/// must match currently the current initial of the file. Additionally,
/// because the reformatting happens only locally the resulting code
/// will be indented with the same level as the original selection,
/// even if it's a mismatch from the rest of the block the selection is in
///
/// It returns a [Formatted] result with a range corresponding to the
/// range of the input that was effectively overwritten by the formatter
pub fn format_range(
options: FormatOptions,
root: &SyntaxNode,
range: TextRange,
) -> FormatResult<Formatted> {
// Find the tokens corresponding to the start and end of the range
let start_token = root.token_at_offset(range.start());
let end_token = root.token_at_offset(range.end());
// If these tokens were not found this means either:
// 1. The input [SyntaxNode] was empty
// 2. The input node was not the root [SyntaxNode] of the file
// In the first case we can return an empty result immediately,
// otherwise default to the first and last tokens in the root node
let start_token = match start_token {
// If the start of the range lies between two tokens,
// start at the rightmost one
TokenAtOffset::Between(_, token) => token,
TokenAtOffset::Single(token) => token,
TokenAtOffset::None => match root.first_token() {
Some(token) => token,
// root node is empty
None => return Ok(Formatted::new_empty()),
},
};
let end_token = match end_token {
// If the end of the range lies between two tokens,
// end at the leftmost one
TokenAtOffset::Between(token, _) => token,
TokenAtOffset::Single(token) => token,
TokenAtOffset::None => match root.last_token() {
Some(token) => token,
// root node is empty
None => return Ok(Formatted::new_empty()),
},
};
// Find the lowest common ancestor node for the start and end token
// by building the path to the root node from both tokens and
// iterating along the two paths at once to find the first divergence
#[allow(clippy::needless_collect)]
let start_to_root: Vec<_> = start_token.ancestors().collect();
#[allow(clippy::needless_collect)]
let end_to_root: Vec<_> = end_token.ancestors().collect();
let common_root = start_to_root
.into_iter()
.rev()
.zip(end_to_root.into_iter().rev())
.map_while(|(lhs, rhs)| if lhs == rhs { Some(lhs) } else { None })
.last();
// Logically this should always return at least the root node,
// fallback to said node just in case
let common_root = common_root.as_ref().unwrap_or(root);
// Perform the actual formatting of the root node with
// an appropriate indentation level
let formatted = format_node(options, common_root)?;
// This finds the closest marker to the beginning of the source
// starting before or at said starting point, and the closest
// marker to the end of the source range starting after or at
// said ending point respectively
let mut range_start = None;
let mut range_end = None;
for marker in &formatted.sourcemap {
if let Some(start_dist) = marker.source.checked_sub(range.start()) {
range_start = match range_start {
Some((prev_marker, prev_dist)) => {
if start_dist < prev_dist {
Some((marker, start_dist))
} else {
Some((prev_marker, prev_dist))
}
}
None => Some((marker, start_dist)),
}
}
if let Some(end_dist) = range.end().checked_sub(marker.source) {
range_end = match range_end {
Some((prev_marker, prev_dist)) => {
if end_dist < prev_dist {
Some((marker, end_dist))
} else {
Some((prev_marker, prev_dist))
}
}
None => Some((marker, end_dist)),
}
}
}
// If no start or end were found, this means that the edge of the formatting
// range was near the edge of the input, and no marker were emitted before
// the start (or after the end) of the formatting range: in this case
// the start/end marker default to the start/end of the input
let (start_source, start_dest) = match range_start {
Some((start_marker, _)) => (start_marker.source, start_marker.dest),
None => (common_root.text_range().start(), TextSize::from(0)),
};
let (end_source, end_dest) = match range_end {
Some((end_marker, _)) => (end_marker.source, end_marker.dest),
None => (
common_root.text_range().end(),
TextSize::try_from(formatted.code.len()).expect("code length out of bounds"),
),
};
let input_range = TextRange::new(start_source, end_source);
let output_range = TextRange::new(start_dest, end_dest);
let code = &formatted.code[output_range];
Ok(Formatted::new(
code.into(),
Some(input_range),
formatted.sourcemap,
formatted.verbatim_source,
))
}
/// Formats a single node within a file, supported by Rome
///
/// This runs a simple heuristic to determine the initial indentation
/// level of the node based on the provided [FormatOptions], which
/// must match currently the current initial of the file. Additionally,
/// because the reformatting happens only locally the resulting code
/// will be indented with the same level as the original selection,
/// even if it's a mismatch from the rest of the block the selection is in
///
/// It returns a [Formatted] result
pub fn format_node(options: FormatOptions, root: &SyntaxNode) -> FormatResult<Formatted> {
// Determine the initial indentation level for the printer by inspecting the trivias
// of each token from the first token of the common root towards the start of the file
let mut tokens = std::iter::successors(root.first_token(), |token| token.prev_token());
// From the iterator of tokens, build an iterator of trivia pieces (once again the iterator is
// reversed, starting from the last trailing trivia towards the first leading trivia).
// The first token is handled specially as we only wan to consider its leading trivias
let first_token = tokens.next();
let first_token_trivias = first_token
.into_iter()
.flat_map(|token| token.leading_trivia().pieces().rev());
let next_tokens_trivias = tokens.flat_map(|token| {
token
.trailing_trivia()
.pieces()
.rev()
.chain(token.leading_trivia().pieces().rev())
});
let trivias = first_token_trivias
.chain(next_tokens_trivias)
.filter(|piece| {
// We're only interested in newline and whitespace trivias, skip over comments
let is_newline = piece.is_newline();
let is_whitespace = piece.is_whitespace();
is_newline || is_whitespace
});
// Finally run the iterator until a newline trivia is found, and get the last whitespace trivia before it
let last_whitespace = trivias.map_while(|piece| piece.as_whitespace()).last();
let initial_indent = match last_whitespace {
Some(trivia) => {
// This logic is based on the formatting options passed in
// the be user (or the editor) as we do not have any kind
// of indentation type detection yet. Unfortunately this
// may not actually match the current content of the file
let length = trivia.text().len() as u16;
match options.indent_style {
IndentStyle::Tab => length,
IndentStyle::Space(width) => length / u16::from(width),
}
}
// No whitespace was found between the start of the range
// and the start of the file
None => 0,
};
let element = Formatter::new(options).format_root(root)?;
let formatted = Printer::new(options).print_with_indent(&element, initial_indent);
Ok(Formatted::new(
formatted.code,
Some(root.text_range()),
formatted.sourcemap,
formatted.verbatim_source,
))
}
pub fn format_element(element: &FormatElement, options: FormatOptions) -> Formatted {
let printer = Printer::new(options);
printer.print(element)
}
#[cfg(test)]
mod tests {
use super::{format_range, FormatOptions};
use crate::IndentStyle;
use rome_rowan::{TextRange, TextSize};
use rslint_parser::parse_script;
#[test]
fn test_range_formatting() {
let input = "
while(
true
) {
function func() {
func( /* comment */
);
let array =
[ 1
, 2];
}
function func2()
{
const no_format = () => {};
}
}
";
// Start the formatting range two characters before the "let" keywords,
// in the middle of the indentation whitespace for the line
let range_start = TextSize::try_from(input.find("let").unwrap() - 2).unwrap();
let range_end = TextSize::try_from(input.find("const").unwrap()).unwrap();
let tree = parse_script(input, 0);
let result = format_range(
FormatOptions {
indent_style: IndentStyle::Space(4),
..FormatOptions::default()
},
&tree.syntax(),
TextRange::new(range_start, range_end),
);
let result = result.expect("range formatting failed");
assert_eq!(
result.range(),
Some(TextRange::new(range_start + TextSize::from(2), range_end))
);
assert_eq!(
result.as_code(),
"let array = [1, 2];\n }\n\n function func2() {\n "
);
}
#[test]
fn test_range_formatting_indentation() {
let input = "
function() {
const veryLongIdentifierToCauseALineBreak = { veryLongKeyToCauseALineBreak: 'veryLongValueToCauseALineBreak' }
}
";
let range_start = TextSize::try_from(input.find("const").unwrap()).unwrap();
let range_end = TextSize::try_from(input.find('}').unwrap()).unwrap();
let tree = parse_script(input, 0);
let result = format_range(
FormatOptions {
indent_style: IndentStyle::Space(4),
..FormatOptions::default()
},
&tree.syntax(),
TextRange::new(range_start, range_end),
);
let result = result.expect("range formatting failed");
assert_eq!(result.range(), Some(TextRange::new(range_start, range_end)));
// As a result of the indentation normalization, the number of spaces within
// the object expression is currently rounded down from an odd indentation level
assert_eq!(
result.as_code(),
"const veryLongIdentifierToCauseALineBreak = {\n veryLongKeyToCauseALineBreak: \"veryLongValueToCauseALineBreak\",\n "
);
}
}
#[cfg(test)]
mod test {
use crate::format;
use crate::FormatOptions;
use rslint_parser::{parse, SourceType};
#[test]
#[ignore]
// use this test check if your snippet prints as you wish, without using a snapshot
fn quick_test() {
let src = r#"
`something ${ () => { var hey; const looooooooooong_expression = "loooooooooong_expression" }} something else ${ ehy }`;
"#;
let syntax = SourceType::ts();
let tree = parse(src, 0, syntax);
let result = format(FormatOptions::default(), &tree.syntax()).unwrap();
assert_eq!(
result.as_code(),
r#"let g = [[], [0, 1], [0, 1]];
"#
);
}
}
| default |
test_atlas_apimixin.py | import unittest
import pprint
from atlascli.atlasapi import AtlasAPI
class TestAPIMixin(unittest.TestCase):
def setUp(self):
self._api= AtlasAPI()
self._api.authenticate()
def tearDown(self):
pass
def test_get(self):
r = self._api.get("https://httpbin.org/get")
#pprint.pprint(r)
self.assertEqual(r["args"], {'itemsPerPage': '100', 'pageNum': '1'})
org=self._api.atlas_get("/orgs/599eeced9f78f769464d175c")
#pprint.pprint(org)
self.assertEqual(org["name"], "Open Data at MongoDB")
def test_post(self):
r = self._api.post("https://httpbin.org/post", {"Hello":"World"})
self.assertEqual(r["data"], '{"Hello": "World"}')
def test_patch(self):
r = self._api.patch("https://httpbin.org/patch", {"Hello":"World"})
self.assertEqual(r["data"], '{"Hello": "World"}')
def test_delete(self):
|
if __name__ == '__main__':
unittest.main() | r = self._api.delete("https://httpbin.org/delete")
self.assertEqual(r["data"], '') |
builder.rs | use super::header::*;
use super::question::*;
use super::resource::*;
use super::*;
use crate::error::*;
use std::collections::HashMap;
// A Builder allows incrementally packing a DNS message.
//
// Example usage:
// b := NewBuilder(Header{...})
// b.enable_compression()
// // Optionally start a section and add things to that section.
// // Repeat adding sections as necessary.
// buf, err := b.Finish()
// // If err is nil, buf[2:] will contain the built bytes.
#[derive(Default)]
pub struct Builder {
// msg is the storage for the message being built.
pub msg: Option<Vec<u8>>,
// section keeps track of the current section being built.
pub section: Section,
// header keeps track of what should go in the header when Finish is
// called.
pub header: HeaderInternal,
// start is the starting index of the bytes allocated in msg for header.
pub start: usize,
// compression is a mapping from name suffixes to their starting index
// in msg.
pub compression: Option<HashMap<String, usize>>,
}
impl Builder {
// NewBuilder creates a new builder with compression disabled.
//
// Note: Most users will want to immediately enable compression with the
// enable_compression method. See that method's comment for why you may or may
// not want to enable compression.
//
// The DNS message is appended to the provided initial buffer buf (which may be
// nil) as it is built. The final message is returned by the (*Builder).Finish
// method, which may return the same underlying array if there was sufficient
// capacity in the slice.
pub fn new(h: &Header) -> Self {
let (id, bits) = h.pack();
Builder {
msg: Some(vec![0; HEADER_LEN]),
start: 0,
section: Section::Header,
header: HeaderInternal {
id,
bits,
..Default::default()
},
compression: None,
}
//var hb [HEADER_LEN]byte
//b.msg = append(b.msg, hb[:]...)
//return b
}
// enable_compression enables compression in the Builder.
//
// Leaving compression disabled avoids compression related allocations, but can
// result in larger message sizes. Be careful with this mode as it can cause
// messages to exceed the UDP size limit.
//
// According to RFC 1035, section 4.1.4, the use of compression is optional, but
// all implementations must accept both compressed and uncompressed DNS
// messages.
//
// Compression should be enabled before any sections are added for best results.
pub fn enable_compression(&mut self) {
self.compression = Some(HashMap::new());
}
fn start_check(&self, section: Section) -> Result<()> {
if self.section <= Section::NotStarted {
return Err(Error::ErrNotStarted);
}
if self.section > section {
return Err(Error::ErrSectionDone);
}
Ok(())
}
// start_questions prepares the builder for packing Questions.
pub fn start_questions(&mut self) -> Result<()> {
self.start_check(Section::Questions)?;
self.section = Section::Questions;
Ok(())
}
// start_answers prepares the builder for packing Answers.
pub fn start_answers(&mut self) -> Result<()> |
// start_authorities prepares the builder for packing Authorities.
pub fn start_authorities(&mut self) -> Result<()> {
self.start_check(Section::Authorities)?;
self.section = Section::Authorities;
Ok(())
}
// start_additionals prepares the builder for packing Additionals.
pub fn start_additionals(&mut self) -> Result<()> {
self.start_check(Section::Additionals)?;
self.section = Section::Additionals;
Ok(())
}
fn increment_section_count(&mut self) -> Result<()> {
let section = self.section;
let (count, err) = match section {
Section::Questions => (&mut self.header.questions, Error::ErrTooManyQuestions),
Section::Answers => (&mut self.header.answers, Error::ErrTooManyAnswers),
Section::Authorities => (&mut self.header.authorities, Error::ErrTooManyAuthorities),
Section::Additionals => (&mut self.header.additionals, Error::ErrTooManyAdditionals),
Section::NotStarted => return Err(Error::ErrNotStarted),
Section::Done => return Err(Error::ErrSectionDone),
Section::Header => return Err(Error::ErrSectionHeader),
};
if *count == u16::MAX {
Err(err)
} else {
*count += 1;
Ok(())
}
}
// question adds a single question.
pub fn add_question(&mut self, q: &Question) -> Result<()> {
if self.section < Section::Questions {
return Err(Error::ErrNotStarted);
}
if self.section > Section::Questions {
return Err(Error::ErrSectionDone);
}
let msg = self.msg.take();
if let Some(mut msg) = msg {
msg = q.pack(msg, &mut self.compression, self.start)?;
self.increment_section_count()?;
self.msg = Some(msg);
}
Ok(())
}
fn check_resource_section(&self) -> Result<()> {
if self.section < Section::Answers {
return Err(Error::ErrNotStarted);
}
if self.section > Section::Additionals {
return Err(Error::ErrSectionDone);
}
Ok(())
}
// Resource adds a single resource.
pub fn add_resource(&mut self, r: &mut Resource) -> Result<()> {
self.check_resource_section()?;
if let Some(body) = &r.body {
r.header.typ = body.real_type();
} else {
return Err(Error::ErrNilResourceBody);
}
if let Some(msg) = self.msg.take() {
let (mut msg, len_off) = r.header.pack(msg, &mut self.compression, self.start)?;
let pre_len = msg.len();
if let Some(body) = &r.body {
msg = body.pack(msg, &mut self.compression, self.start)?;
r.header.fix_len(&mut msg, len_off, pre_len)?;
self.increment_section_count()?;
}
self.msg = Some(msg);
}
Ok(())
}
// Finish ends message building and generates a binary message.
pub fn finish(&mut self) -> Result<Vec<u8>> {
if self.section < Section::Header {
return Err(Error::ErrNotStarted);
}
self.section = Section::Done;
// Space for the header was allocated in NewBuilder.
let buf = self.header.pack(vec![]);
assert_eq!(buf.len(), HEADER_LEN);
if let Some(mut msg) = self.msg.take() {
msg[..HEADER_LEN].copy_from_slice(&buf[..HEADER_LEN]);
Ok(msg)
} else {
Err(Error::ErrEmptyBuilderMsg)
}
}
}
| {
self.start_check(Section::Answers)?;
self.section = Section::Answers;
Ok(())
} |
led_shift_register.py | from treehopper.libraries.displays import LedDriver
from treehopper.libraries.io.expander.shift_register import ChainableShiftRegisterOutput
class LedShiftRegister(ChainableShiftRegisterOutput, LedDriver):
def __init__(self): | super().__init__() |
|
messages.go | // Copyright (c) 2017 Cisco and/or its affiliates.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package client
import (
"fmt"
"time"
"github.com/Shopify/sarama"
"github.com/golang/protobuf/proto"
"github.com/ligato/cn-infra/db/keyval"
)
// Encoder defines an interface that is used as argument of producer functions.
// It wraps the sarama.Encoder
type Encoder interface {
sarama.Encoder
}
// ConsumerMessage encapsulates a Kafka message returned by the consumer.
type ConsumerMessage struct {
Key, Value, PrevValue []byte
Topic string
Partition int32
Offset int64
Timestamp time.Time
}
// GetTopic returns the topic associated with the message
func (cm *ConsumerMessage) GetTopic() string {
return cm.Topic
}
// GetPartition returns the partition associated with the message
func (cm *ConsumerMessage) GetPartition() int32 {
return cm.Partition
}
// GetOffset returns the offset associated with the message
func (cm *ConsumerMessage) GetOffset() int64 {
return cm.Offset
}
// GetKey returns the key associated with the message.
func (cm *ConsumerMessage) GetKey() string {
return string(cm.Key)
}
// GetValue returns the value associated with the message.
func (cm *ConsumerMessage) GetValue() []byte {
return cm.Value
}
// GetPrevValue returns the previous value associated with the message.
func (cm *ConsumerMessage) GetPrevValue() []byte {
return cm.PrevValue
}
// ProtoConsumerMessage encapsulates a Kafka message returned by the consumer and provides means
// to unmarshal the value into proto.Message.
type ProtoConsumerMessage struct {
*ConsumerMessage
serializer keyval.Serializer
}
// NewProtoConsumerMessage creates new instance of ProtoConsumerMessage
func NewProtoConsumerMessage(msg *ConsumerMessage, serializer keyval.Serializer) *ProtoConsumerMessage {
return &ProtoConsumerMessage{msg, serializer}
}
// GetTopic returns the topic associated with the message.
func (cm *ProtoConsumerMessage) GetTopic() string {
return cm.Topic
}
// GetPartition returns the partition associated with the message.
func (cm *ProtoConsumerMessage) GetPartition() int32 {
return cm.Partition
}
// GetOffset returns the offset associated with the message.
func (cm *ProtoConsumerMessage) GetOffset() int64 {
return cm.Offset
}
// GetKey returns the key associated with the message.
func (cm *ProtoConsumerMessage) GetKey() string {
return string(cm.Key)
}
// GetValue returns the value associated with the message.
func (cm *ProtoConsumerMessage) GetValue(msg proto.Message) error {
err := cm.serializer.Unmarshal(cm.ConsumerMessage.GetValue(), msg)
if err != nil {
return err
}
return nil
}
// GetPrevValue returns the previous value associated with the latest message.
func (cm *ProtoConsumerMessage) GetPrevValue(msg proto.Message) (prevValueExist bool, err error) {
prevVal := cm.ConsumerMessage.GetPrevValue()
if prevVal == nil |
err = cm.serializer.Unmarshal(prevVal, msg)
if err != nil {
return true, err
}
return true, nil
}
// ProducerMessage is the collection of elements passed to the Producer in order to send a message.
type ProducerMessage struct {
// The Kafka topic for this message.
Topic string
// The partitioning key for this message. Pre-existing Encoders include
// StringEncoder and ByteEncoder.
Key Encoder
// The actual message to store in Kafka. Pre-existing Encoders include
// StringEncoder and ByteEncoder.
Value Encoder
// This field is used to hold arbitrary data you wish to include so it
// will be available when receiving on the Successes and Errors channels.
// Sarama completely ignores this field and is only to be used for
// pass-through data.
Metadata interface{}
// Below this point are filled in by the producer as the message is processed
// Offset is the offset of the message stored on the broker. This is only
// guaranteed to be defined if the message was successfully delivered and
// RequiredAcks is not NoResponse.
Offset int64
// Partition is the partition that the message was sent to. This is only
// guaranteed to be defined if the message was successfully delivered.
Partition int32
}
// GetTopic returns the topic associated with the message.
func (pm *ProducerMessage) GetTopic() string {
return pm.Topic
}
// GetPartition returns the partition associated with the message.
func (pm *ProducerMessage) GetPartition() int32 {
return pm.Partition
}
// GetOffset returns the offset associated with the message.
func (pm *ProducerMessage) GetOffset() int64 {
return pm.Offset
}
// GetKey returns the key associated with the message.
func (pm *ProducerMessage) GetKey() string {
key, _ := pm.Key.Encode()
return string(key)
}
// GetValue returns the content of the message.
func (pm *ProducerMessage) GetValue() []byte {
val, _ := pm.Value.Encode()
return val
}
// GetPrevValue returns nil for the producer
func (pm *ProducerMessage) GetPrevValue() []byte {
return nil
}
func (pm *ProducerMessage) String() string {
var meta string
switch t := pm.Metadata.(type) {
default:
meta = fmt.Sprintf("unexpected type %T", t) // %T prints whatever type t has
case string:
meta = t
case *string:
meta = *t
case []byte:
meta = string(t)
case bool:
meta = fmt.Sprintf("%t", t) // t has type bool
case int:
meta = fmt.Sprintf("%d", t) // t has type int
case *bool:
meta = fmt.Sprintf("%t", *t) // t has type *bool
case *int:
meta = fmt.Sprintf("%d", *t) // t has type *int
}
key, _ := pm.Key.Encode()
val, _ := pm.Value.Encode()
return fmt.Sprintf("ProducerMessage - Topic: %s, Key: %s, Value: %s, Meta: %v, Offset: %d, Partition: %d\n", pm.Topic, string(key), string(val), meta, pm.Offset, pm.Partition)
}
// ProducerError is the type of error generated when the producer fails to deliver a message.
// It contains the original ProducerMessage as well as the actual error value.
type ProducerError struct {
*ProducerMessage
Err error
}
func (ref *ProducerError) Error() error {
return ref.Err
}
func (ref *ProducerError) String() string {
return fmt.Sprintf("ProducerError: %s, error: %v\n", ref.ProducerMessage, ref.Err.Error())
}
// ProtoProducerMessage is wrapper of a producer message that simplify work with proto-modelled data.
type ProtoProducerMessage struct {
*ProducerMessage
Serializer keyval.Serializer
}
// GetTopic returns the topic associated with the message.
func (ppm *ProtoProducerMessage) GetTopic() string {
return ppm.Topic
}
// GetPartition returns the partition associated with the message.
func (ppm *ProtoProducerMessage) GetPartition() int32 {
return ppm.Partition
}
// GetOffset returns the offset associated with the message.
func (ppm *ProtoProducerMessage) GetOffset() int64 {
return ppm.Offset
}
// GetKey returns the key associated with the message.
func (ppm *ProtoProducerMessage) GetKey() string {
key, _ := ppm.Key.Encode()
return string(key)
}
// GetValue unmarshalls the content of the msg into provided structure.
func (ppm *ProtoProducerMessage) GetValue(msg proto.Message) error {
err := ppm.Serializer.Unmarshal(ppm.ProducerMessage.GetValue(), msg)
if err != nil {
return err
}
return nil
}
// GetPrevValue for producer returns false (value does not exist)
func (ppm *ProtoProducerMessage) GetPrevValue(msg proto.Message) (prevValueExist bool, err error) {
return false, nil
}
// ProtoProducerMessageErr represents a proto-modelled message that was not published successfully.
type ProtoProducerMessageErr struct {
*ProtoProducerMessage
Err error
}
func (pme *ProtoProducerMessageErr) Error() error {
return pme.Err
}
| {
return false, nil
} |
debug.rs | use shipyard::*;
use crate::components::*;
pub fn log_todos (
todos:View<Todo>,
order:UniqueView<Order>,
) {
//let output = todos.iter().with_id()
let output =
order
.iter()
.map(|id| {
(id, todos.get(*id).unwrap())
})
.fold(String::new(), |acc, (id, todo)| {
let spacer = if acc.is_empty() | else { "\n" };
format!("{}{}{:?}\n{:?}", acc, spacer, id, todo)
});
log::info!("{}", output);
}
| { "" } |
deprecated_validate_participation.rs | use {
crate::{
deprecated_state::{AuctionManagerV1, ParticipationStateV1},
error::MetaplexError,
state::{AuctionManagerStatus, Store},
utils::{
assert_at_least_one_creator_matches_or_store_public_and_all_verified,
assert_authority_correct, assert_derivation, assert_initialized, assert_owned_by,
assert_rent_exempt, assert_store_safety_vault_manager_match,
},
},
borsh::BorshSerialize,
mpl_token_metadata::state::{MasterEditionV1, Metadata},
mpl_token_vault::state::{SafetyDepositBox, Vault},
solana_program::{
account_info::{next_account_info, AccountInfo},
entrypoint::ProgramResult,
program_option::COption,
pubkey::Pubkey,
rent::Rent,
sysvar::Sysvar,
},
spl_token::state::Account,
};
pub fn process_deprecated_validate_participation(
program_id: &Pubkey,
accounts: &[AccountInfo],
) -> ProgramResult | {
let account_info_iter = &mut accounts.iter();
let auction_manager_info = next_account_info(account_info_iter)?;
let open_edition_metadata_info = next_account_info(account_info_iter)?;
let open_master_edition_info = next_account_info(account_info_iter)?;
let printing_authorization_token_account_info = next_account_info(account_info_iter)?;
let authority_info = next_account_info(account_info_iter)?;
let whitelisted_creator_info = next_account_info(account_info_iter)?;
let store_info = next_account_info(account_info_iter)?;
let safety_deposit_box_info = next_account_info(account_info_iter)?;
let safety_deposit_box_token_store_info = next_account_info(account_info_iter)?;
let vault_info = next_account_info(account_info_iter)?;
let rent_info = next_account_info(account_info_iter)?;
let rent = &Rent::from_account_info(&rent_info)?;
let mut auction_manager = AuctionManagerV1::from_account_info(auction_manager_info)?;
let store = Store::from_account_info(store_info)?;
let vault = Vault::from_account_info(vault_info)?;
let safety_deposit_token_store: Account =
assert_initialized(safety_deposit_box_token_store_info)?;
let safety_deposit = SafetyDepositBox::from_account_info(safety_deposit_box_info)?;
let printing_token_account: Account =
assert_initialized(printing_authorization_token_account_info)?;
let open_edition_metadata = Metadata::from_account_info(open_edition_metadata_info)?;
let master_edition = MasterEditionV1::from_account_info(open_master_edition_info)?;
if vault.authority != *auction_manager_info.key {
return Err(MetaplexError::VaultAuthorityMismatch.into());
}
// top level authority and ownership check
assert_authority_correct(&auction_manager.authority, authority_info)?;
assert_owned_by(auction_manager_info, program_id)?;
assert_owned_by(open_edition_metadata_info, &store.token_metadata_program)?;
assert_owned_by(open_master_edition_info, &store.token_metadata_program)?;
assert_owned_by(
printing_authorization_token_account_info,
&store.token_program,
)?;
if *whitelisted_creator_info.key != solana_program::system_program::id() {
if whitelisted_creator_info.data_is_empty() {
return Err(MetaplexError::Uninitialized.into());
}
assert_owned_by(whitelisted_creator_info, program_id)?;
}
assert_owned_by(store_info, program_id)?;
assert_owned_by(safety_deposit_box_info, &store.token_vault_program)?;
assert_owned_by(safety_deposit_box_token_store_info, &store.token_program)?;
assert_owned_by(vault_info, &store.token_vault_program)?;
// is it the right vault, safety deposit, and token store?
assert_store_safety_vault_manager_match(
&auction_manager.vault,
&safety_deposit_box_info,
vault_info,
&store.token_vault_program,
)?;
// do the vault and store belong to this AM?
if auction_manager.store != *store_info.key {
return Err(MetaplexError::AuctionManagerStoreMismatch.into());
}
if auction_manager.vault != *vault_info.key {
return Err(MetaplexError::AuctionManagerVaultMismatch.into());
}
// Check creators
assert_at_least_one_creator_matches_or_store_public_and_all_verified(
program_id,
&auction_manager,
&open_edition_metadata,
whitelisted_creator_info,
store_info,
)?;
// Make sure master edition is the right master edition for this metadata given
assert_derivation(
&store.token_metadata_program,
open_master_edition_info,
&[
mpl_token_metadata::state::PREFIX.as_bytes(),
store.token_metadata_program.as_ref(),
&open_edition_metadata.mint.as_ref(),
mpl_token_metadata::state::EDITION.as_bytes(),
],
)?;
// Assert the holding account for authorization tokens is rent filled, owned correctly, and ours
assert_owned_by(
printing_authorization_token_account_info,
&store.token_program,
)?;
assert_rent_exempt(rent, printing_authorization_token_account_info)?;
if printing_token_account.owner != *auction_manager_info.key {
return Err(MetaplexError::IncorrectOwner.into());
}
if printing_token_account.mint != master_edition.printing_mint {
return Err(MetaplexError::PrintingTokenAccountMintMismatch.into());
}
if printing_token_account.delegate != COption::None {
return Err(MetaplexError::DelegateShouldBeNone.into());
}
if printing_token_account.close_authority != COption::None {
return Err(MetaplexError::CloseAuthorityShouldBeNone.into());
}
if master_edition.max_supply.is_some() {
return Err(MetaplexError::CantUseLimitedSupplyEditionsWithOpenEditionAuction.into());
}
if master_edition.one_time_printing_authorization_mint != safety_deposit_token_store.mint {
return Err(MetaplexError::MasterEditionOneTimeAuthorizationMintMismatch.into());
}
if let Some(participation_config) = &auction_manager.settings.participation_config {
if participation_config.safety_deposit_box_index > vault.token_type_count {
return Err(MetaplexError::InvalidSafetyDepositBox.into());
}
if participation_config.safety_deposit_box_index != safety_deposit.order {
return Err(MetaplexError::SafetyDepositIndexMismatch.into());
}
if let Some(state) = auction_manager.state.participation_state {
if state.validated {
return Err(MetaplexError::AlreadyValidated.into());
}
auction_manager.state.participation_state = Some(ParticipationStateV1 {
collected_to_accept_payment: state.collected_to_accept_payment,
primary_sale_happened: open_edition_metadata.primary_sale_happened,
validated: true,
printing_authorization_token_account: Some(
*printing_authorization_token_account_info.key,
),
});
}
if auction_manager.settings.winning_configs.is_empty() {
auction_manager.state.status = AuctionManagerStatus::Validated;
}
auction_manager.serialize(&mut *auction_manager_info.data.borrow_mut())?;
}
Ok(())
} |
|
html2ts.ts | /// <reference path="../../defs/tsd.d.ts"/>
import _ = require('lodash');
import fs = require('fs');
import path = require('path');
import utils = require('./utils');
var grunt = utils.grunt;
/////////////////////////////////////////////////////////////////////
// HTML -> TS
////////////////////////////////////////////////////////////////////
// html -> js processing functions:
// Originally from karma-html2js-preprocessor
// Refactored nicely in html2js grunt task
// https://github.com/karlgoldstein/grunt-html2js/blob/master/tasks/html2js.js
// Modified nlReplace to be an empty string
var escapeContent = function (content: string, quoteChar= '\''): string {
var quoteRegexp = new RegExp('\\' + quoteChar, 'g');
var nlReplace = '';
return content.replace(quoteRegexp, '\\' + quoteChar).replace(/\r?\n/g, nlReplace);
};
// Convert a string to camelCase
// Inspired by http://jamesroberts.name/blog/2010/02/22/string-functions-for-javascript-trim-to-camel-case-to-dashed-and-to-underscore/
// Solves the issue of serving a module name that includes dashes
var toCamel = function(str){
return str.replace(/(\-[a-z])/g, function($1){return $1.toUpperCase().replace('-', ''); });
};
// Remove bom when reading utf8 files
function stripBOM(str) {
return 0xFEFF === str.charCodeAt(0)
? str.substring(1)
: str;
}
function htmlInternalTemplate(lineEnding: string) {
return '/* tslint:disable:max-line-length */' + lineEnding +
'module <%= modulename %> {' + lineEnding +
' export var <%= varname %> = \'<%= content %>\';' + lineEnding +
'}' + lineEnding;
};
export interface IHtml2TSOptions {
moduleFunction: Function;
varFunction: Function;
htmlOutputTemplate: string;
htmlOutDir: string;
flatten: boolean;
eol: string;
}
// Compile an HTML file to a TS file
// Return the filename. This filename will be required by reference.ts
export function compileHTML(filename: string, options: IHtml2TSOptions): string {
grunt.log.verbose.writeln('Compiling HTML: ' + filename);
var htmlContent = escapeContent(fs.readFileSync(filename).toString());
htmlContent = stripBOM(htmlContent);
// TODO: place a minification pipeline here if you want.
var ext = path.extname(filename).replace('.', '');
var extFreename = path.basename(filename, '.' + ext);
var moduleName = toCamel(options.moduleFunction({ ext: ext, filename: extFreename }));
var varName = toCamel(options.varFunction({ ext: ext, filename: extFreename }).replace(/\./g, '_'));
var fileContent;
if (!options.htmlOutputTemplate) {
fileContent = _.template(
htmlInternalTemplate(options.eol)
)({ modulename: moduleName, varname: varName, content: htmlContent });
} else {
fileContent = _.template(
replaceNewLines(options.htmlOutputTemplate, options.eol)
)({ modulename: moduleName, varname: varName, content: htmlContent });
}
// Write the content to a file
var outputfile = getOutputFile(filename, options.htmlOutDir, options.flatten);
mkdirParent(path.dirname(outputfile));
fs.writeFileSync(outputfile, fileContent);
return outputfile;
}
// Replace user-supplied templates newlines with newlines appropriate for the current OS
function | (input: string, newLines: string) {
return input.replace(/\r/g, '').replace(/\n/g, newLines);
}
function getOutputFile(filename: string, htmlOutDir: string, flatten: boolean): string {
var outputfile = filename;
// NOTE If an htmlOutDir was specified
if (htmlOutDir !== null) {
var dir = getPath(htmlOutDir);
if (fs.existsSync(dir)) {
var relativeFilename = filename;
if (flatten) {
relativeFilename = path.basename(filename);
}
outputfile = path.join(dir, relativeFilename);
}
}
return outputfile + '.ts';
}
function getPath(dir: string): string {
// NOTE If we don't have a valid absolute path
if (!fs.existsSync(dir)) {
// NOTE Try relative from the current working directory
dir = path.join(process.cwd(), dir);
}
return dir;
}
function mkdirParent(dirPath: string, mode?: number) {
// NOTE Call the standard fs.mkdirSync
try {
fs.mkdirSync(dirPath, mode);
} catch (error) {
// NOTE When it fail in this way, do the custom steps
if (error && error.errno === 34) {
// NOTE Create all the parents recursively
mkdirParent(path.dirname(dirPath), mode);
// NOTE And then the directory
mkdirParent(dirPath, mode);
}
}
}
| replaceNewLines |
package.py | # -*- coding: utf-8 -*-
name = 'powershell'
version = '6.0.2'
author = ['microsoft']
tools = ["pwsh"]
requires = []
variants = [
['platform-windows'],
]
def | ():
import os
applications_path = os.environ["APPLICATIONS_PATH"]
env.PATH.append(os.path.join(applications_path, "powershell", "%s"%version).replace('/', os.sep))
| commands |
ueditor.config.js | /**
* ueditor完整配置项
* 可以在这里配置整个编辑器的特性
*/
/**************************提示********************************
* 所有被注释的配置项均为UEditor默认值。
* 修改默认配置请首先确保已经完全明确该参数的真实用途。
* 主要有两种修改方案,一种是取消此处注释,然后修改成对应参数;另一种是在实例化编辑器时传入对应参数。
* 当升级编辑器时,可直接使用旧版配置文件替换新版配置文件,不用担心旧版配置文件中因缺少新功能所需的参数而导致脚本报错。
**************************提示********************************/
(function () {
/**
* 编辑器资源文件根路径。它所表示的含义是:以编辑器实例化页面为当前路径,指向编辑器资源文件(即dialog等文件夹)的路径。
* 鉴于很多同学在使用编辑器的时候出现的种种路径问题,此处强烈建议大家使用"相对于网站根目录的相对路径"进行配置。
* "相对于网站根目录的相对路径"也就是以斜杠开头的形如"/myProject/ueditor/"这样的路径。
* 如果站点中有多个不在同一层级的页面需要实例化编辑器,且引用了同一UEditor的时候,此处的URL可能不适用于每个页面的编辑器。
* 因此,UEditor提供了针对不同页面的编辑器可单独配置的根路径,具体来说,在需要实例化编辑器的页面最顶部写上如下代码即可。当然,需要令此处的URL等于对应的配置。
* window.UEDITOR_HOME_URL = "/xxxx/xxxx/";
*/
var URL = window.UEDITOR_HOME_URL || getUEBasePath();
/**
* 配置项主体。注意,此处所有涉及到路径的配置别遗漏URL变量。
*/
// alert("<?=BASE_URL(adminpanel/news/ueditor)?>");
window.UEDITOR_CONFIG = {
//为编辑器实例添加一个路径,这个不能被注释
UEDITOR_HOME_URL: URL
// 服务器统一请求接口路径
, serverUrl: SITE_URL + "adminpanel/news/ueditor?action=config"
//工具栏上的所有的功能按钮和下拉框,可以在new编辑器的实例时选择自己需要的重新定义
, toolbars: [[
'fullscreen', 'source', '|', 'undo', 'redo', '|',
'bold', 'italic', 'underline', 'fontborder', 'strikethrough', 'superscript', 'subscript', 'removeformat', 'formatmatch', 'autotypeset', 'blockquote', 'pasteplain', '|', 'forecolor', 'backcolor', 'insertorderedlist', 'insertunorderedlist', 'selectall', 'cleardoc', '|',
'rowspacingtop', 'rowspacingbottom', 'lineheight', '|',
'customstyle', 'paragraph', 'fontfamily', 'fontsize', '|',
'directionalityltr', 'directionalityrtl', 'indent', '|',
'justifyleft', 'justifycenter', 'justifyright', 'justifyjustify', '|', 'touppercase', 'tolowercase', '|',
'link', 'unlink', 'anchor', '|', 'imagenone', 'imageleft', 'imageright', 'imagecenter', '|',
'simpleupload', 'insertimage', 'emotion', 'scrawl', 'insertvideo', 'music', 'attachment', 'map', 'gmap', 'insertframe', 'insertcode', 'webapp', 'pagebreak', 'template', 'background', '|',
'horizontal', 'date', 'time', 'spechars', 'snapscreen', 'wordimage', '|',
'inserttable', 'deletetable', 'insertparagraphbeforetable', 'insertrow', 'deleterow', 'insertcol', 'deletecol', 'mergecells', 'mergeright', 'mergedown', 'splittocells', 'splittorows', 'splittocols', 'charts', '|',
'print', 'preview', 'searchreplace', 'drafts', 'help'
]]
//当鼠标放在工具栏上时显示的tooltip提示,留空支持自动多语言配置,否则以配置值为准
//,labelMap:{
// 'anchor':'', 'undo':''
//}
//语言配置项,默认是zh-cn。有需要的话也可以使用如下这样的方式来自动多语言切换,当然,前提条件是lang文件夹下存在对应的语言文件:
//lang值也可以通过自动获取 (navigator.language||navigator.browserLanguage ||navigator.userLanguage).toLowerCase()
//,lang:"zh-cn"
//,langPath:URL +"lang/"
//主题配置项,默认是default。有需要的话也可以使用如下这样的方式来自动多主题切换,当然,前提条件是themes文件夹下存在对应的主题文件:
//现有如下皮肤:default
//,theme:'default'
//,themePath:URL +"themes/"
//,zIndex : 900 //编辑器层级的基数,默认是900
//针对getAllHtml方法,会在对应的head标签中增加该编码设置。
//,charset:"utf-8"
//若实例化编辑器的页面手动修改的domain,此处需要设置为true
//,customDomain:false
//常用配置项目
//,isShow : true //默认显示编辑器
//,textarea:'editorValue' // 提交表单时,服务器获取编辑器提交内容的所用的参数,多实例时可以给容器name属性,会将name给定的值最为每个实例的键值,不用每次实例化的时候都设置这个值
//,initialContent:'欢迎使用ueditor!' //初始化编辑器的内容,也可以通过textarea/script给值,看官网例子
//,autoClearinitialContent:true //是否自动清除编辑器初始内容,注意:如果focus属性设置为true,这个也为真,那么编辑器一上来就会触发导致初始化的内容看不到了
//,focus:false //初始化时,是否让编辑器获得焦点true或false
//如果自定义,最好给p标签如下的行高,要不输入中文时,会有跳动感
//,initialStyle:'p{line-height:1em}'//编辑器层级的基数,可以用来改变字体等
//,iframeCssUrl: URL + '/themes/iframe.css' //给编辑区域的iframe引入一个css文件
//indentValue
//首行缩进距离,默认是2em
//,indentValue:'2em'
//,initialFrameWidth:1000 //初始化编辑器宽度,默认1000
//,initialFrameHeight:320 //初始化编辑器高度,默认320
//,readonly : false //编辑器初始化结束后,编辑区域是否是只读的,默认是false
//,autoClearEmptyNode : true //getContent时,是否删除空的inlineElement节点(包括嵌套的情况)
//启用自动保存
//,enableAutoSave: true
//自动保存间隔时间, 单位ms
//,saveInterval: 500
//,fullscreen : false //是否开启初始化时即全屏,默认关闭
//,imagePopup:true //图片操作的浮层开关,默认打开
//,autoSyncData:true //自动同步编辑器要提交的数据
//,emotionLocalization:false //是否开启表情本地化,默认关闭。若要开启请确保emotion文件夹下包含官网提供的images表情文件夹
//粘贴只保留标签,去除标签所有属性
//,retainOnlyLabelPasted: false
//,pasteplain:false //是否默认为纯文本粘贴。false为不使用纯文本粘贴,true为使用纯文本粘贴
//纯文本粘贴模式下的过滤规则
//'filterTxtRules' : function(){
// function transP(node){
// node.tagName = 'p';
// node.setStyle();
// }
// return {
// //直接删除及其字节点内容
// '-' : 'script style object iframe embed input select',
// 'p': {$:{}},
// 'br':{$:{}},
// 'div':{'$':{}},
// 'li':{'$':{}},
// 'caption':transP,
// 'th':transP,
// 'tr':transP,
// 'h1':transP,'h2':transP,'h3':transP,'h4':transP,'h5':transP,'h6':transP,
// 'td':function(node){
// //没有内容的td直接删掉
// var txt = !!node.innerText();
// if(txt){
// node.parentNode.insertAfter(UE.uNode.createText(' '),node);
// }
// node.parentNode.removeChild(node,node.innerText())
// }
// }
//}()
//,allHtmlEnabled:false //提交到后台的数据是否包含整个html字符串
//insertorderedlist
//有序列表的下拉配置,值留空时支持多语言自动识别,若配置值,则以此值为准
//,'insertorderedlist':{
// //自定的样式
// 'num':'1,2,3...',
// 'num1':'1),2),3)...',
// 'num2':'(1),(2),(3)...',
// 'cn':'一,二,三....',
// 'cn1':'一),二),三)....',
// 'cn2':'(一),(二),(三)....',
// //系统自带
// 'decimal' : '' , //'1,2,3...'
// 'lower-alpha' : '' , // 'a,b,c...'
// 'lower-roman' : '' , //'i,ii,iii...'
// 'upper-alpha' : '' , lang //'A,B,C'
// 'upper-roman' : '' //'I,II,III...'
//}
//insertunorderedlist
//无序列表的下拉配置,值留空时支持多语言自动识别,若配置值,则以此值为准
//,insertunorderedlist : { //自定的样式
// 'dash' :'— 破折号', //-破折号
// 'dot':' 。 小圆圈', //系统自带
// 'circle' : '', // '○ 小圆圈'
// 'disc' : '', // '● 小圆点'
// 'square' : '' //'■ 小方块'
//}
//,listDefaultPaddingLeft : '30'//默认的左边缩进的基数倍
//,listiconpath : 'http://bs.baidu.com/listicon/'//自定义标号的路径
//,maxListLevel : 3 //限制可以tab的级数, 设置-1为不限制
//,autoTransWordToList:false //禁止word中粘贴进来的列表自动变成列表标签
//fontfamily
//字体设置 label留空支持多语言自动切换,若配置,则以配置值为准
//,'fontfamily':[
// { label:'',name:'songti',val:'宋体,SimSun'},
// { label:'',name:'kaiti',val:'楷体,楷体_GB2312, SimKai'},
// { label:'',name:'yahei',val:'微软雅黑,Microsoft YaHei'},
// { label:'',name:'heiti',val:'黑体, SimHei'},
// { label:'',name:'lishu',val:'隶书, SimLi'},
// { label:'',name:'andaleMono',val:'andale mono'},
// { label:'',name:'arial',val:'arial, helvetica,sans-serif'},
// { label:'',name:'arialBlack',val:'arial black,avant garde'},
// { label:'',name:'comicSansMs',val:'comic sans ms'},
// { label:'',name:'impact',val:'impact,chicago'},
// { label:'',name:'timesNewRoman',val:'times new roman'}
//]
//fontsize
//字号
//,'fontsize':[10, 11, 12, 14, 16, 18, 20, 24, 36]
//paragraph
//段落格式 值留空时支持多语言自动识别,若配置,则以配置值为准
//,'paragraph':{'p':'', 'h1':'', 'h2':'', 'h3':'', 'h4':'', 'h5':'', 'h6':''}
//rowspacingtop
//段间距 值和显示的名字相同
//,'rowspacingtop':['5', '10', '15', '20', '25']
//rowspacingBottom
//段间距 值和显示的名字相同
//,'rowspacingbottom':['5', '10', '15', '20', '25']
//lineheight
//行内间距 值和显示的名字相同
//,'lineheight':['1', '1.5','1.75','2', '3', '4', '5']
//customstyle
//自定义样式,不支持国际化,此处配置值即可最后显示值
//block的元素是依据设置段落的逻辑设置的,inline的元素依据BIU的逻辑设置
//尽量使用一些常用的标签
//参数说明
//tag 使用的标签名字
//label 显示的名字也是用来标识不同类型的标识符,注意这个值每个要不同,
//style 添加的样式
//每一个对象就是一个自定义的样式
//,'customstyle':[
// {tag:'h1', name:'tc', label:'', style:'border-bottom:#ccc 2px solid;padding:0 4px 0 0;text-align:center;margin:0 0 20px 0;'},
// {tag:'h1', name:'tl',label:'', style:'border-bottom:#ccc 2px solid;padding:0 4px 0 0;margin:0 0 10px 0;'},
// {tag:'span',name:'im', label:'', style:'font-style:italic;font-weight:bold'},
// {tag:'span',name:'hi', label:'', style:'font-style:italic;font-weight:bold;color:rgb(51, 153, 204)'}
//]
//打开右键菜单功能
//,enableContextMenu: true
//右键菜单的内容,可以参考plugins/contextmenu.js里边的默认菜单的例子,label留空支持国际化,否则以此配置为准
//,contextMenu:[
// {
// label:'', //显示的名称
// cmdName:'selectall',//执行的command命令,当点击这个右键菜单时
// //exec可选,有了exec就会在点击时执行这个function,优先级高于cmdName
// exec:function () {
// //this是当前编辑器的实例
// //this.ui._dialogs['inserttableDialog'].open();
// }
// }
//]
//快捷菜单
//,shortcutMenu:["fontfamily", "fontsize", "bold", "italic", "underline", "forecolor", "backcolor", "insertorderedlist", "insertunorderedlist"]
//elementPathEnabled
//是否启用元素路径,默认是显示
//,elementPathEnabled : true
//wordCount
//,wordCount:true //是否开启字数统计
//,maximumWords:10000 //允许的最大字符数
//字数统计提示,{#count}代表当前字数,{#leave}代表还可以输入多少字符数,留空支持多语言自动切换,否则按此配置显示
//,wordCountMsg:'' //当前已输入 {#count} 个字符,您还可以输入{#leave} 个字符
//超出字数限制提示 留空支持多语言自动切换,否则按此配置显示
//,wordOverFlowMsg:'' //<span style="color:red;">你输入的字符个数已经超出最大允许值,服务器可能会拒绝保存!</span>
//tab
//点击tab键时移动的距离,tabSize倍数,tabNode什么字符做为单位
//,tabSize:4
//,tabNode:' '
//removeFormat
//清除格式时可以删除的标签和属性
//removeForamtTags标签
//,removeFormatTags:'b,big,code,del,dfn,em,font,i,ins,kbd,q,samp,small,span,strike,strong,sub,sup,tt,u,var'
//removeFormatAttributes属性
//,removeFormatAttributes:'class,style,lang,width,height,align,hspace,valign'
//undo
//可以最多回退的次数,默认20
//,maxUndoCount:20
//当输入的字符数超过该值时,保存一次现场
//,maxInputCount:1
//autoHeightEnabled
// 是否自动长高,默认true
//,autoHeightEnabled:true
//scaleEnabled
//是否可以拉伸长高,默认true(当开启时,自动长高失效)
//,scaleEnabled:false
//,minFrameWidth:800 //编辑器拖动时最小宽度,默认800
//,minFrameHeight:220 //编辑器拖动时最小高度,默认220
//autoFloatEnabled
//是否保持toolbar的位置不动,默认true
//,autoFloatEnabled:true
//浮动时工具栏距离浏览器顶部的高度,用于某些具有固定头部的页面
//,topOffset:30
//编辑器底部距离工具栏高度(如果参数大于等于编辑器高度,则设置无效)
//,toolbarTopOffset:400
//设置远程图片是否抓取到本地保存
//,catchRemoteImageEnable: true //设置是否抓取远程图片
//pageBreakTag
//分页标识符,默认是_ueditor_page_break_tag_
//,pageBreakTag:'_ueditor_page_break_tag_'
//autotypeset
//自动排版参数
//,autotypeset: {
// mergeEmptyline: true, //合并空行
// removeClass: true, //去掉冗余的class
// removeEmptyline: false, //去掉空行
// textAlign:"left", //段落的排版方式,可以是 left,right,center,justify 去掉这个属性表示不执行排版
// imageBlockLine: 'center', //图片的浮动方式,独占一行剧中,左右浮动,默认: center,left,right,none 去掉这个属性表示不执行排版
// pasteFilter: false, //根据规则过滤没事粘贴进来的内容
// clearFontSize: false, //去掉所有的内嵌字号,使用编辑器默认的字号
// clearFontFamily: false, //去掉所有的内嵌字体,使用编辑器默认的字体
// removeEmptyNode: false, // 去掉空节点
// //可以去掉的标签
// removeTagNames: {标签名字:1},
// indent: false, // 行首缩进
// indentValue : '2em', //行首缩进的大小
// bdc2sb: false,
// tobdc: false
//}
//tableDragable
//表格是否可以拖拽
//,tableDragable: true
//sourceEditor
//源码的查看方式,codemirror 是代码高亮,textarea是文本框,默认是codemirror
//注意默认codemirror只能在ie8+和非ie中使用
//,sourceEditor:"codemirror"
//如果sourceEditor是codemirror,还用配置一下两个参数
//codeMirrorJsUrl js加载的路径,默认是 URL + "third-party/codemirror/codemirror.js"
//,codeMirrorJsUrl:URL + "third-party/codemirror/codemirror.js"
//codeMirrorCssUrl css加载的路径,默认是 URL + "third-party/codemirror/codemirror.css"
//,codeMirrorCssUrl:URL + "third-party/codemirror/codemirror.css"
//编辑器初始化完成后是否进入源码模式,默认为否。
//,sourceEditorFirst:false
//iframeUrlMap
//dialog内容的路径 ~会被替换成URL,垓属性一旦打开,将覆盖所有的dialog的默认路径
//,iframeUrlMap:{
// 'anchor':'~/dialogs/anchor/anchor.html',
//}
//allowLinkProtocol 允许的链接地址,有这些前缀的链接地址不会自动添加http
//, allowLinkProtocols: ['http:', 'https:', '#', '/', 'ftp:', 'mailto:', 'tel:', 'git:', 'svn:']
//webAppKey 百度应用的APIkey,每个站长必须首先去百度官网注册一个key后方能正常使用app功能,注册介绍,http://app.baidu.com/static/cms/getapikey.html
//, webAppKey: ""
//默认过滤规则相关配置项目
//,disabledTableInTable:true //禁止表格嵌套
//,allowDivTransToP:true //允许进入编辑器的div标签自动变成p标签
//,rgb2Hex:true //默认产出的数据中的color自动从rgb格式变成16进制格式
// xss 过滤是否开启,inserthtml等操作
,xssFilterRules: true
//input xss过滤
,inputXssFilter: true
//output xss过滤
,outputXssFilter: true
// xss过滤白名单 名单来源: https://raw.githubusercontent.com/leizongmin/js-xss/master/lib/default.js
,whitList: {
a: ['target', 'href', 'title', 'class', 'style'],
abbr: ['title', 'class', 'style'],
address: ['class', 'style'],
area: ['shape', 'coords', 'href', 'alt'],
article: [],
aside: [],
audio: ['autoplay', 'controls', 'loop', 'preload', 'src', 'class', 'style'],
b: ['class', 'style'],
bdi: ['dir'],
bdo: ['dir'],
big: [],
blockquote: ['cite', 'class', 'style'],
br: [],
caption: ['class', 'style'],
center: [],
cite: [],
code: ['class', 'style'],
col: ['align', 'valign', 'span', 'width', 'class', 'style'],
colgroup: ['align', 'valign', 'span', 'width', 'class', 'style'],
dd: ['class', 'style'],
del: ['datetime'], | details: ['open'],
div: ['class', 'style'],
dl: ['class', 'style'],
dt: ['class', 'style'],
em: ['class', 'style'],
font: ['color', 'size', 'face'],
footer: [],
h1: ['class', 'style'],
h2: ['class', 'style'],
h3: ['class', 'style'],
h4: ['class', 'style'],
h5: ['class', 'style'],
h6: ['class', 'style'],
header: [],
hr: [],
i: ['class', 'style'],
img: ['src', 'alt', 'title', 'width', 'height', 'id', '_src', 'loadingclass', 'class', 'data-latex'],
ins: ['datetime'],
li: ['class', 'style'],
mark: [],
nav: [],
ol: ['class', 'style'],
p: ['class', 'style'],
pre: ['class', 'style'],
s: [],
section:[],
small: [],
span: ['class', 'style'],
sub: ['class', 'style'],
sup: ['class', 'style'],
strong: ['class', 'style'],
table: ['width', 'border', 'align', 'valign', 'class', 'style'],
tbody: ['align', 'valign', 'class', 'style'],
td: ['width', 'rowspan', 'colspan', 'align', 'valign', 'class', 'style'],
tfoot: ['align', 'valign', 'class', 'style'],
th: ['width', 'rowspan', 'colspan', 'align', 'valign', 'class', 'style'],
thead: ['align', 'valign', 'class', 'style'],
tr: ['rowspan', 'align', 'valign', 'class', 'style'],
tt: [],
u: [],
ul: ['class', 'style'],
video: ['autoplay', 'controls', 'loop', 'preload', 'src', 'height', 'width', 'class', 'style']
}
};
function getUEBasePath(docUrl, confUrl) {
return getBasePath(docUrl || self.document.URL || self.location.href, confUrl || getConfigFilePath());
}
function getConfigFilePath() {
var configPath = document.getElementsByTagName('script');
return configPath[ configPath.length - 1 ].src;
}
function getBasePath(docUrl, confUrl) {
var basePath = confUrl;
if (/^(\/|\\\\)/.test(confUrl)) {
basePath = /^.+?\w(\/|\\\\)/.exec(docUrl)[0] + confUrl.replace(/^(\/|\\\\)/, '');
} else if (!/^[a-z]+:/i.test(confUrl)) {
docUrl = docUrl.split("#")[0].split("?")[0].replace(/[^\\\/]+$/, '');
basePath = docUrl + "" + confUrl;
}
return optimizationPath(basePath);
}
function optimizationPath(path) {
var protocol = /^[a-z]+:\/\//.exec(path)[ 0 ],
tmp = null,
res = [];
path = path.replace(protocol, "").split("?")[0].split("#")[0];
path = path.replace(/\\/g, '/').split(/\//);
path[ path.length - 1 ] = "";
while (path.length) {
if (( tmp = path.shift() ) === "..") {
res.pop();
} else if (tmp !== ".") {
res.push(tmp);
}
}
return protocol + res.join("/");
}
window.UE = {
getUEBasePath: getUEBasePath
};
})(); | |
App.js | import * as React from 'react';
import { ethers } from 'ethers';
import PingPortal from './utils/PingPortal.json';
import './App.css';
export default function | () {
const [loading, setLoading] = React.useState(false);
const [activeAccount, setActiveAccount] = React.useState('');
const [allPings, setAllPings] = React.useState([]);
const [message, setMessage] = React.useState('');
const contractAddress = '0x214d8D01441c940585126752649AbCa64eDa16b6';
const contractABI = PingPortal.abi;
const initWalletConnection = async () => {
try {
const { ethereum } = window;
if (!ethereum) {
console.log('Must install Metamask!');
return;
} else {
console.log('We have ethereum!', ethereum);
}
const accounts = await ethereum.request({ method: 'eth_accounts' });
if (accounts.length > 0) {
const account = accounts[0];
console.log('Connected Account:', account);
setActiveAccount(account);
await getAllPings();
} else {
console.log('No connected account found');
}
} catch (error) {
console.error(error);
}
};
const connectWallet = async () => {
try {
const { ethereum } = window;
if (!ethereum) {
console.log('Must install Metamask!');
return;
}
const accounts = await ethereum.request({
method: 'eth_requestAccounts',
});
console.log('connected', accounts[0]);
setActiveAccount(accounts[0]);
} catch (error) {}
};
React.useEffect(() => {
initWalletConnection();
});
//listen for emitter events
React.useEffect(() => {
let pingPortalContract;
const onNewPing = (from, timestamp, message) => {
console.log('NewPing', from, timestamp, message);
setAllPings(prevState => [
...prevState,
{
address: from,
timestamp: new Date(timestamp * 1000),
message: message,
},
]);
};
if (window.ethereum) {
const provider = new ethers.providers.Web3Provider(window.ethereum);
const signer = provider.getSigner();
pingPortalContract = new ethers.Contract(
contractAddress,
contractABI,
signer
);
pingPortalContract.on('NewPing', onNewPing);
}
return () => {
if (pingPortalContract) {
pingPortalContract.off('NewPing', onNewPing);
}
};
});
const getAllPings = async () => {
try {
const { ethereum } = window;
if (ethereum) {
const provider = new ethers.providers.Web3Provider(ethereum);
const signer = provider.getSigner();
const pingPortalContract = new ethers.Contract(
contractAddress,
contractABI,
signer
);
const pings = await pingPortalContract.getAllPings();
console.log(pings[0]);
// remove obj fields we don't need
let pingsCleaned = pings.map(ping => ({
address: ping.pinger,
timestamp: new Date(ping.timestamp * 1000),
message: ping.message,
isWinner: ping.isWinner,
}));
console.log({ pingsCleaned });
setAllPings(pingsCleaned);
} else {
console.log("Ethereum object doesn't exist!");
}
} catch (error) {
console.log(error);
}
};
const ping = async () => {
setLoading(true);
try {
const { ethereum } = window;
if (ethereum) {
if (message.length > 0) {
const provider = new ethers.providers.Web3Provider(ethereum);
const signer = provider.getSigner();
const pingPortalContract = new ethers.Contract(
contractAddress,
contractABI,
signer
);
let count = await pingPortalContract.getTotalPings();
console.log('Retrieved total ping count...', count.toNumber());
/*
* Execute the actual ping from your smart contract
*/
const pingTxn = await pingPortalContract.ping(message, {
gasLimit: 300000,
});
console.log('Mining...', pingTxn.hash);
await pingTxn.wait();
console.log('Mined -- ', pingTxn.hash);
count = await pingPortalContract.getTotalPings();
console.log('Retrieved total ping count...', count.toNumber());
await getAllPings();
setMessage('');
setLoading(false);
} else {
setLoading(false);
console.warn('Please write a message!');
}
} else {
setLoading(false);
console.log("Ethereum object doesn't exist!");
}
} catch (error) {
setLoading(false);
console.error(error);
}
};
return (
<div className='mainContainer'>
<div className='dataContainer'>
<div className='header'>♟ Rinkeby Ping Contest ♟</div>
<p>
This is the really cool portal to "ping" a smart contract. If you
ping, you have a 25% chance to win some prize money! 0.001 ETH (on
rinkeby) to be exact...
</p>
<p>
Also, you must wait 1 minute between pings. Can't let you spam my
contract for the moneys.
</p>
<p>
<b>Must be on Rinkeby network!</b>
</p>
<p>
Written by{' '}
<a href='https://twitter.com/jacobdcastro'>@jacobdcastro</a>.
</p>
<p>
<a href={`https://rinkeby.etherscan.io/address/${contractAddress}`}>
View contract on etherscan
</a>{' '}
(Deployed to Rinkeby)
<br />
<a href='https://github.com/jacobdcastro/rinkeby'>
View code on Github
</a>
</p>
{!activeAccount && (
<>
<p>Connect Your Wallet First!</p>
<button className='pingButton' onClick={connectWallet}>
Connect Wallet
</button>
</>
)}
<h2 style={{ textAlign: 'center' }}>Ping the Contract</h2>
<input
style={{ padding: '8px' }}
onChange={e => setMessage(e.target.value)}
value={message}
placeholder='Please write your message to ping'
/>
<button className='pingButton' onClick={ping}>
{loading ? 'Awaiting Ping Confirmation...' : 'Send Ping'}
</button>
<h2 style={{ textAlign: 'center' }}>
All Previous Pings ({allPings.length})
</h2>
{allPings.map((ping, index) => {
return (
<div
key={index}
style={{
backgroundColor: ping.isWinner ? 'lightgreen' : 'OldLace',
marginTop: '16px',
padding: '8px',
}}
>
{ping.isWinner && 'This ping won prize money!'}
<div>Address: {ping.address}</div>
<div>Time: {ping.timestamp.toString()}</div>
<div>Message: {ping.message}</div>
{/* <div>Winner: {ping.isWinner.toString()}</div> */}
</div>
);
})}
</div>
</div>
);
}
| App |
unbanchat.js | let handler = async (m, { conn }) => {
global.db.data.chats[m.chat].isBanned = false
m.reply('Done!')
} | handler.command = /^unbanchat$/i
handler.owner = true
module.exports = handler | handler.help = ['unbanchat']
handler.tags = ['owner'] |
de.js | export default {
isoName: 'de',
nativeName: 'Deutsch',
label: {
clear: 'Leeren',
ok: 'Ok',
cancel: 'Abbrechen',
close: 'Schließen',
set: 'Setzen',
select: 'Auswählen',
reset: 'Zurücksetzen',
remove: 'Löschen',
update: 'Aktualisieren',
create: 'Erstellen',
search: 'Suche',
filter: 'Filter',
refresh: 'Aktualisieren'
},
date: { | monthsShort: 'Jan_Feb_Mar_Apr_Mai_Jun_Jul_Aug_Sep_Okt_Nov_Dez'.split('_'),
firstDayOfWeek: 1, // 0-6, 0 - Sunday, 1 Monday, ...
format24h: true,
pluralDay: 'tage'
},
table: {
noData: 'Keine Daten vorhanden.',
noResults: 'Keine Einträge gefunden',
loading: 'Lade...',
selectedRecords: rows => (
rows > 1
? rows + ' ausgewählte Zeilen'
: (rows === 0 ? 'Keine' : '1') + ' ausgewählt.'
),
recordsPerPage: 'Zeilen pro Seite',
allRows: 'Alle',
pagination: (start, end, total) => start + '-' + end + ' von ' + total,
columns: 'Spalten'
},
editor: {
url: 'URL',
bold: 'Fett',
italic: 'Kursiv',
strikethrough: 'Durchgestrichen',
underline: 'Unterstrichen',
unorderedList: 'Ungeordnete Liste',
orderedList: 'Geordnete Liste',
subscript: 'tiefgestellt',
superscript: 'hochgestellt',
hyperlink: 'Link',
toggleFullscreen: 'Vollbild umschalten',
quote: 'Zitat',
left: 'linksbündig',
center: 'zentriert',
right: 'rechtsbündig',
justify: 'Ausrichten',
print: 'Drucken',
outdent: 'ausrücken',
indent: 'einrücken',
removeFormat: 'Entferne Formatierung',
formatting: 'Formatiere',
fontSize: 'Schriftgröße',
align: 'Ausrichten',
hr: 'Horizontale Linie einfügen',
undo: 'Rückgänging',
redo: 'Wiederherstellen',
heading1: 'Überschrift 1',
heading2: 'Überschrift 2',
heading3: 'Überschrift 3',
heading4: 'Überschrift 4',
heading5: 'Überschrift 5',
heading6: 'Überschrift 6',
paragraph: 'Absatz',
code: 'Code',
size1: 'Sehr klein',
size2: 'klein',
size3: 'Normal',
size4: 'Groß',
size5: 'Größer',
size6: 'Sehr groß',
size7: 'Maximum',
defaultFont: 'Standard Schrift',
viewSource: 'Quelltext anzeigen'
},
tree: {
noNodes: 'Keine Knoten verfügbar',
noResults: 'Keine passenden Knoten gefunden'
}
} | days: 'Sonntag_Montag_Dienstag_Mittwoch_Donnerstag_Freitag_Samstag'.split('_'),
daysShort: 'So_Mo_Di_Mi_Do_Fr_Sa'.split('_'),
months: 'Januar_Februar_März_April_Mai_Juni_Juli_August_September_Oktober_November_Dezember'.split('_'), |
hello.py | #!python3
# single-line string:
print("hello")
# multi-line strings: | l
d
"""
)
print("abc"*3)
a = [1,2,3,4]
print(a*3) | print("""
w
o
r |
filter_patient_sample.py | import glob
import numpy as numpy
import cv2
import os
import random
label_list = glob.glob("./iafoss_data/256_filter/label/*.png")
file_list = glob.glob("./iafoss_data/256_filter/train/*.png")
assert len(label_list) == len(file_list), "error"
for i in range(len(label_list)):
label = cv2.imread(label_list[i], 0)
file = cv2.imread(file_list[i], 0)
print(label.sum())
if label.sum() == 0 and random.uniform(0,1) > 0.5: | os.remove(label_list[i])
os.remove(file_list[i]) | |
BlackboardCrawler.py | import json
import time
import requests
import urllib
import re
import os
from functools import reduce
import uuid # in case the error happens
from getpass import getpass
# import xml.etree.ElementTree as ET
# XML parser is not used because the fucking –
import inspect # debugging purpose
import string
from sys import platform, stdout
from utils import mkdir, directory_flatten
class AuthenticationException(Exception):
pass
# Only developer should change it?
class BCFlags(object):
SLEEP_TIME=1
MAX_DEPTH=2
VERBOSE=True
IGNORE_SAME=True
# IGNORE_SAME: ignore same file
# Just a dict with support of middler & get/setattr
class BCPrefs(object):
@staticmethod
def default_dict():
return {'folder_prefix': 'blackboard', 'blackboard_url': 'https://blackboard.cuhk.edu.hk', 'email_suffix': '@link.cuhk.edu.hk', 'folder_name_style': 'CC_ONLY'}
no_verification = staticmethod(lambda x: True)
url_check = staticmethod(lambda x: re.match('https?://', str(x)) is not None)
email_check = staticmethod(lambda x: '@' in str(x))
folder_style = ['CC_ONLY', 'FULL', 'TERM_AND_CC']
folder_name_check = staticmethod(lambda x: x in BCPrefs.folder_style)
test = lambda x: '@' in x
keys = ['folder_prefix', 'blackboard_url', 'email_suffix', 'folder_name_style']
prefs_dict = {}
@staticmethod
def get_option_vals(key):
if(key is 'folder_name_style'):
return BCPrefs.folder_style
return []
@staticmethod
def get_pref_type(key):
if(key is 'folder_prefix'):
return 'text'
elif(key is 'blackboard_url'):
return 'text'
elif(key is 'email_suffix'):
return 'text'
elif(key is 'folder_name_style'):
return 'option'
else:
return 'text'
def __init__(self):
self['folder_prefix']='blackboard'
self['blackboard_url']='https://blackboard.cuhk.edu.hk'
self['email_suffix']='@link.cuhk.edu.hk'
self['folder_name_style']='CC_ONLY'
def __setitem__(self, key, value):
if(key not in self.keys):
# return (False,"{0} not found".format(key)) # Cannot return value in __setitem__...
raise KeyError("{0} not found".format(key))
if(key is 'blackboard_url' and not BCPrefs.url_check(value)):
# return (False,"{0} is not a url".format(value))
raise Exception("{0} is not a url".format(key))
if(key is 'email_suffix' and not BCPrefs.email_check(value)):
# return (False,"{0} is not a email".format(value))
raise Exception("{0} is not a email".format(value))
if(key is 'folder_name_style' and not BCPrefs.folder_name_check(value)):
raise Exception("{0} is not a folder style setting".format(value))
self.prefs_dict[key]=value
# return (True,'')
def __getitem__(self, key):
return self.prefs_dict[key]
def __delitem__(self, key):
del self.prefs_dict[key]
def __setattr__(self, key, value):
self.__setitem__(key, value)
def __getattr__(self, key):
return self.prefs_dict[key]
def __str__(self):
return self.prefs_dict.__str__()
def __repr__(self):
return self.prefs_dict.__str__()
class BlackboardCrawler:
flags = BCFlags()
prefs = BCPrefs()
def __init__(
self,
username,
password,
):
self.username = username
self.password = password
def updatePrefs(self, key, value):
self.prefs[key]=value
def log(self, s, t=0, coding='utf-8'):
if(self.flags.VERBOSE or t!=0):
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
caller = calframe[1][3]
if(isinstance(s, str)):
print('{0}:{1}'.format(caller, s.encode(coding)))
else:
print('{0}:{1}'.format(caller, s))
def title_print(self, s):
s = '@ {0} @'.format(s)
print('@'*len(s))
print(s)
print('@'*len(s))
# return True/ False
def login(self):
self._init_bb_session()
self._login()
self.userid = self._get_bb_userid()
mkdir(self.prefs.folder_prefix)
# return list of courses
# [course_id, course_code, display_name]
def get_courses(self):
if(not self.userid):
raise AuthenticationException("Not logged in.")
self.log("getting courses...")
courses_resp = self.sess.get("{0}/learn/api/v1/users/{1}/memberships?expand=course.instructorsMembership,course.effectiveAvailability,course.permissions,courseRole&limit=10000&organization=false".format(self.prefs.blackboard_url, self.userid))
courses = courses_resp.json()['results']
courses_info = []
for course in courses:
if(course['course']['isAvailable']):
content_id = course['id']
course_id = course['course']['id']
course_code = course['course']['courseId']
display_name = course['course']['displayName']
courses_info.append((course_id, course_code, display_name))
self.log("finish getting courses...")
return courses_info #[course_id, course_code, course_name]
# download courses
def download(self, selected_courses_info):
# Un-enumerate it
selected_courses_info = map(lambda x: x[1], selected_courses_info)
self._download(selected_courses_info)
def get_metadata_from_file(self, file_name):
try:
if(not os.path.isfile(file_name)):
return {'size': -1}
size = os.path.getsize(file_name)
except Exception as inst:
self.log('err: {0}'.format(inst))
return {'size': -1}
return {'size': size}
# def get_metadata_from_url(self, url):
# try:
# resp = self.sess.get(url)
# headers = resp.headers
# self.log("headers: {0}".format(headers))
# size = int(headers['Content-Length'])
# except Exception as inst:
# self.log('url: {0}'.format(url))
# self.log('err: {0}'.format(inst))
# return {'size': -1}
# return {'size': size}
def file_same(self, file_name, file_size):
if(not self.flags.IGNORE_SAME):
return False
metadata_file = self.get_metadata_from_file(file_name)
# DEBUG: return True if size same, return False if not
# self.log(metadata_file)
# self.log(file_size)
metadata_file['size'] = int(metadata_file['size'])
file_size = int(file_size)
if(metadata_file['size']<0 or file_size<0):
return False
return (metadata_file['size'] == file_size)
def make_course_dir(self, course_info):
course_id, course_code, course_name = course_info
print("{0}; {1}; {2}".format(self.prefs.folder_prefix, course_code, os.path.join(self.prefs.folder_prefix, course_code)))
#course_code: 2018R1-CSCI4180
if(self.prefs.folder_name_style == 'CC_ONLY'):
course_code = course_code.split('-')[1]
if(not reduce(lambda x,y: x and y, map(lambda x: not x.isdigit(), course_code))):
while(not course_code[-1].isdigit()):
course_code = directory_flatten(course_code[:-1])
dir_name = mkdir(os.path.join(self.prefs.folder_prefix, course_code))
elif(self.prefs.folder_name_style == 'FULL'):
dir_name = mkdir(os.path.join(self.prefs.folder_prefix, course_name))
elif(self.prefs.folder_name_style == 'TERM_AND_CC'):
dir_name = mkdir(os.path.join(self.prefs.folder_prefix, course_code))
else:
dir_name = mkdir(os.path.join(self.prefs.folder_prefix, course_code))
return dir_name
def _download(self, courses_info):
for course_info in courses_info:
course_id, course_code, course_name = course_info
sections = self._get_course_sections(course_info)
dirname = self.make_course_dir(course_info)
# Ask if the user want to continue download if the folder exists?
for section in sections:
section_title = directory_flatten(section[1])
# print '---------------------'
# print dirname, section_title, os.path.join(dirname, section_title)
# print '---------------------'
path_prefix = os.path.join(dirname, section_title)
directories, files = self._get_item_from_section(path_prefix, section)
self._download_item_from_directories(path_prefix, directories, self.flags.MAX_DEPTH)
self._download_files(path_prefix, files)
time.sleep(self.flags.SLEEP_TIME)
def _download_file(self, url, path):
if(self.prefs.blackboard_url not in url):
url = self.prefs.blackboard_url+url
resp = self.sess.get(url, stream=True)
headers = resp.headers
url = urllib.parse.unquote(resp.url)
if(platform == "darwin"):
url = url.encode('latin1')
if(isinstance(path, str)):
self.log(u'path: {0}'.format(path))
else:
self.log('path: {0}'.format(path))
self.log('url: {0}'.format(url))
self.log("header: {0}".format(resp.headers))
try:
header_content = headers['Content-Disposition']
coding, local_filename = re.findall("[*]=(.+)''(.+)", header_content)[0]
except:
local_filename = url.split('/')[-1]
# self.log('local_filename1: {0}'.format(repr(header_content)))
# self.log('coding: {0}'.format(repr(coding)))
# self.log('repr local_filename2: {0}'.format(repr(local_filename))) | # self.log('local_filename3: {0}'.format(local_filename_unquoted))
# self.log('str local_filename3: {0}'.format(str(local_filename_unquoted)))
# self.log('repr local_filename3: {0}'.format(repr(local_filename_unquoted)))
# self.log('type local_filename3: {0}'.format(type(local_filename_unquoted)))
final_local_filename = local_filename_unquoted
# final_local_filename = local_filename_unquoted
# self.log(u'local_filename4: {0}'.format(final_local_filename))
# self.log(u'repr local_filename4: {0}'.format(repr(final_local_filename)))
try:
file_size = resp.headers['Content-Length']
except:
file_size = 0
# if(int(file_size)>=1024*1024*100):
# while(1):
# download = raw_input("The file {1} is around {0}MB, still download?(y/n)".format(int(file_size)/1024/1024, local_filename))
# if(download.lower() == 'y'):
# break
# elif(download.lower() == 'n'):
# return local_filename
# else:
# print("Please input only y or n!")
# NOTE the stream=True parameter
if(not self.file_same(os.path.join(path, final_local_filename), file_size)):
self.log(u"Downloading {0}".format(final_local_filename))
r = resp
with open(os.path.join(path, final_local_filename), 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
#f.flush() commented by recommendation from J.F.Sebastian
else:
self.log(u'File are found to be same: {0}'.format(final_local_filename))
return final_local_filename
def _download_files(self, path_prefix, files):
self.log(repr(files))
for f in files:
file_url, file_name = f
if(isinstance(file_name, str)):
self.log(u'url: {0} {1}'.format(file_url, file_name))
else:
self.log('url: {0} {1}'.format(file_url, file_name))
self._download_file(file_url, path_prefix)
def _download_item_from_directories(self, path_prefix, directories, depth):
if(depth<=0):
return
for directory in directories:
directory_url, directory_title = directory
directory_title = directory_flatten(directory_title[:64])
self.log(u'reading: {0} {1}'.format(directory_url, directory_title))
new_prefix = os.path.join(path_prefix, directory_title)
next_directories, files = self._get_item_from_section(new_prefix, directory)
self._download_files(new_prefix, files)
self._download_item_from_directories(new_prefix, next_directories, depth-1)
def _get_item_from_section(self, path_prefix, section):
section_url, section_name = section
section_name = section_name[:64]
if(isinstance(section_name, str)):
self.log(u'----reading sections: {0}'.format(section_name))
else:
self.log('----reading sections: {0}'.format(section_name))
dir_name = mkdir(path_prefix)
# path_prefix = dir_name
if(self.prefs.blackboard_url not in section_url):
section_url = self.prefs.blackboard_url+section_url
course_section_resp = self.sess.get(section_url)
directories = re.findall('<a href="(/webapps/blackboard/content/listContent.jsp?.+?)".+?><span style=".+?">(.+?)</span>', course_section_resp.text)
files = re.findall('<a(?:.+?|)href="(?:https://blackboard.cuhk.edu.hk|)(/bbcswebdav.+?)">(.+?)<', course_section_resp.text)
""" files type 1
<a href="/bbcswebdav/pid-2238145-dt-content-rid-8465171_1/xid-8465171_1" onClick="this.href='/webapps/blackboard/execute/content/file?cmd=view&content_id=_2238145_1&course_id=_87673_1'">
<span style="color:#000000;">lesson 11</span>
</a>
<a href="https://blackboard.cuhk.edu.hk/bbcswebdav/pid-2470832-dt-content-rid-13383234_1/xid-13383234_1">大綱</a>
"""
files2 = re.findall('<a href="(/bbcswebdav.+?)".+?">.+?">(.+)[^</span>]</a>', course_section_resp.text)
""" files type 2
<a href="/bbcswebdav/pid-2233230-dt-content-rid-8062745_1/xid-8062745_1" target="_blank">
<img src="https://d1e7kr0efngifs.cloudfront.net/3400.1.0-rel.35+67d71b7/images/ci/ng/cal_year_event.gif" alt="File">
第十課閱讀材料 (徐復觀).pdf
</a>
"""
time.sleep(self.flags.SLEEP_TIME)
return (directories, (files+files2)) # [(directory_url, directory_name), (file_url, file_name)
def _get_course_sections(self, course_info):
course_id, course_code, course_name = course_info
if(isinstance(course_name, str)):
self.log(u'reading course: {0}'.format(course_name))
else:
self.log('reading course: {0}'.format(course_name))
course_url = "{1}/webapps/blackboard/execute/courseMain?course_id={0}".format(course_id, self.prefs.blackboard_url)
course_url_resp = self.sess.get(course_url)
sections = re.findall('<a href="(/webapps/blackboard/content/listContent.jsp?.+?)".+?">.+?">(.+?)</span>', course_url_resp.text)
return sections
def _init_bb_session(self):
sess = requests.session()
# fake header, otherwise they wont care
sess.headers['User-Agent']='Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'
# go to starting page
blackboard_main_resp = sess.get(self.prefs.blackboard_url)
next_url_1 = re.findall('url=(.+)', blackboard_main_resp.text)[0]
# redirected to login page
next_url_1 = urllib.parse.unquote(next_url_1)
login_page_resp = sess.get(next_url_1)
self.login_page_url = login_page_resp.url
self.sess = sess
def _login(self):
if '@' not in self.username:
self.username = self.username + self.prefs.email_suffix
form_auth_payload={
'UserName': self.username,
'Password': self.password,
'AuthMethod': "FormsAuthentication"
}
self.log("logging in...")
logging_in_resp = self.sess.post(self.login_page_url, data=form_auth_payload)
time.sleep(self.flags.SLEEP_TIME)
next_url_3 = re.findall('action="(.+?)">',logging_in_resp.text)[0]
SAMLResponse = re.findall('name="SAMLResponse" value="(.+?)" />',logging_in_resp.text)[0]
resp4 = self.sess.post(next_url_3, data = {'SAMLResponse': SAMLResponse})
time.sleep(self.flags.SLEEP_TIME)
self.log("logged in...")
def _get_bb_userid(self):
course_url = '{0}/ultra/course'.format(self.prefs.blackboard_url)
course_resp = self.sess.get(course_url)
userid = re.findall('"id":"(.+?)"', course_resp.text)[0]
return userid
def set_auth(self, u, p):
self.username = u
self.password = p
def setFlags():
pass | local_filename_unquoted = urllib.parse.unquote(local_filename)
self.debug = local_filename_unquoted |
adapter.test.js | const Utils = require("../dist/adapterUtils");
const gql = require("graphql-tag");
const assert = require("assert");
const _ = require("lodash");
describe("queryTable", () => {
it("queryTable with one and obj fields", function () {
assert.strictEqual((gql`${Utils.queryTable({
name: 'test', fields: {
a: true
}
})}`), (gql`query testFindOne($where:JSONType,$scope:[String]){
test {
one(where:$where,scope:$scope) {
a
}
}
}`));
});
it("queryTable with one and array fields", function () {
assert.strictEqual((gql`${Utils.queryTable({
name: 'test', fields: ['a']
})}`), (gql`query testFindOne($where:JSONType,$scope:[String]){
test {
one(where:$where,scope:$scope) {
a
}
}
}`));
});
it("queryTable with list and obj fields", function () {
assert.strictEqual((gql`${Utils.queryTable({
name: 'test',
isList: true,
fields: {
a: true
}
})}`), (gql`query testList($limit:Int,$offset:Int,$order:[TestOrderType],$subQuery:Boolean,$where:JSONType,$scope:[String]){
test {
list(limit:$limit,offset:$offset,order:$order,subQuery:$subQuery,where:$where,scope:$scope) {
a
}
}
}`));
});
it("queryTable with list and count and obj fields", function () {
assert.strictEqual((gql`${Utils.queryTable({
name: 'test',
isList: true,
withCount: true,
fields: {
a: true
}
})}`), (gql`query testList($limit:Int,$offset:Int,$order:[TestOrderType],$subQuery:Boolean,$where:JSONType,$scope:[String]){
test {
total:aggregate(fn:COUNT,field: _all,where: $where)
list(limit:$limit,offset:$offset,order:$order,subQuery:$subQuery,where:$where,scope:$scope) {
a
}
}
}`));
});
it("queryTable with list and count and complex fields", function () { | isList: true,
withCount: true,
fields: {
a: true,
b: {
alias: 'name'
},
c: {
fields: ['d', 'e']
}
}
})}`), (gql`query testList($limit:Int,$offset:Int,$order:[TestOrderType],$subQuery:Boolean,$where:JSONType,$scope:[String]){
test {
total:aggregate(fn:COUNT,field: _all,where: $where)
list(limit:$limit,offset:$offset,order:$order,subQuery:$subQuery,where:$where,scope:$scope) {
a,name:b,c{d,e}
}
}
}`));
});
}); | assert.strictEqual((gql`${Utils.queryTable({
name: 'test', |
issue-67945-1.rs | enum | <S> {
Var = {
let x: S = 0; //~ ERROR: mismatched types
0
},
}
fn main() {}
| Bug |
api_op_GetCostForecast.go | // Code generated by smithy-go-codegen DO NOT EDIT.
package costexplorer
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/costexplorer/types"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Retrieves a forecast for how much Amazon Web Services predicts that you will
// spend over the forecast time period that you select, based on your past costs.
func (c *Client) GetCostForecast(ctx context.Context, params *GetCostForecastInput, optFns ...func(*Options)) (*GetCostForecastOutput, error) {
if params == nil {
params = &GetCostForecastInput{}
}
result, metadata, err := c.invokeOperation(ctx, "GetCostForecast", params, optFns, c.addOperationGetCostForecastMiddlewares)
if err != nil {
return nil, err
}
out := result.(*GetCostForecastOutput)
out.ResultMetadata = metadata
return out, nil
}
type GetCostForecastInput struct {
// How granular you want the forecast to be. You can get 3 months of DAILY
// forecasts or 12 months of MONTHLY forecasts. The GetCostForecast operation
// supports only DAILY and MONTHLY granularities.
//
// This member is required.
Granularity types.Granularity
// Which metric Cost Explorer uses to create your forecast. For more information
// about blended and unblended rates, see Why does the "blended" annotation appear
// on some line items in my bill?
// (http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/).
// Valid values for a GetCostForecast call are the following:
//
// * AMORTIZED_COST
//
// *
// BLENDED_COST
//
// * NET_AMORTIZED_COST
//
// * NET_UNBLENDED_COST
//
// * UNBLENDED_COST
//
// This member is required.
Metric types.Metric
// The period of time that you want the forecast to cover. The start date must be
// equal to or no later than the current date to avoid a validation error.
//
// This member is required.
TimePeriod *types.DateInterval
// The filters that you want to use to filter your forecast. The GetCostForecast
// API supports filtering by the following dimensions:
//
// * AZ
//
// * INSTANCE_TYPE
//
// *
// LINKED_ACCOUNT
//
// * LINKED_ACCOUNT_NAME
//
// * OPERATION
//
// * PURCHASE_TYPE
//
// * REGION
//
// *
// SERVICE
//
// * USAGE_TYPE
//
// * USAGE_TYPE_GROUP
//
// * RECORD_TYPE
//
// * OPERATING_SYSTEM
//
// *
// TENANCY
//
// * SCOPE
//
// * PLATFORM
//
// * SUBSCRIPTION_ID
//
// * LEGAL_ENTITY_NAME
//
// *
// DEPLOYMENT_OPTION
//
// * DATABASE_ENGINE
//
// * INSTANCE_TYPE_FAMILY
//
// *
// BILLING_ENTITY
//
// * RESERVATION_ID
//
// * SAVINGS_PLAN_ARN
Filter *types.Expression
// Cost Explorer always returns the mean forecast as a single point. You can
// request a prediction interval around the mean by specifying a confidence level.
// The higher the confidence level, the more confident Cost Explorer is about the
// actual value falling in the prediction interval. Higher confidence levels result
// in wider prediction intervals.
PredictionIntervalLevel *int32
}
type GetCostForecastOutput struct {
// The forecasts for your query, in order. For DAILY forecasts, this is a list of
// days. For MONTHLY forecasts, this is a list of months.
ForecastResultsByTime []types.ForecastResult
// How much you are forecasted to spend over the forecast period, in USD.
Total *types.MetricValue
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func (c *Client) addOperationGetCostForecastMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsjson11_serializeOpGetCostForecast{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsjson11_deserializeOpGetCostForecast{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpGetCostForecastValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opGetCostForecast(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
func newServiceMetadataMiddleware_opGetCostForecast(region string) *awsmiddleware.RegisterServiceMetadata | {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "ce",
OperationName: "GetCostForecast",
}
} |
|
imageVisualTests.ts | /*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
module powerbitests {
import ImageVisual = powerbi.visuals.ImageVisual;
import imageVisualCapabilities = powerbi.visuals.imageVisualCapabilities;
describe("ImageVisual", () => {
let imageVisualDataBuilder: ImageVisualDataBuilder;
const imageBase64value =
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAAkFBMVEX" +
"////+/v78+/vz8e/29PLr5+Tw7etwWEdbPyzTy8Tj3trn4t78+/mTgHOsnZLCt69PMRzX0cvJwLml" +
"lYm1qJ53YE8zEQBsUj9QMR68sKZHKRREJA+aiHx+aViGcF7q5t6NeWpkSDafjoAvCwA4FgC4rKSnm" +
"JE+HAaKdWhVOCeHeG1TPi5jT0BxX1J5aFtlSznQd83pAAACIklEQVQYGZXBiVraQBiG0ff7s8BMQs" +
"JkEaNsalspXbj/u2us1Scg7TM9B64R/0X8H6FMxBPu53ePiCS6bz/qkljCfatK4hnHIxjRjPURiWj" +
"Cf69AIpYIP44eRCxRdj/bBBFLkHZHh4glQbYOiGgykgoRT2QDIprIhxwRr3AJ8VTuChDvJPFPZQri" +
"jQyQuE6vTC94IUiLHMRHMnFGQmSHu+f7vkBckIDcN0PXfR7qJmR5Caj4+mnR1LN5InFGENrDYVHVL" +
"viiKIJzIWQp65uG0anCmBKu7ytvTCjNdmVwmJWqDogJEU4NIzPTG0YlCEzdAjFhDPOMP/QbryTAWF" +
"cYE4L262lfucx4l7jq0CFAtvGICeGTpN6v5svl/DTrN5t+9rB8/vR0WyMw6kfElHiYMUp8XS22/Wx" +
"1WvWbRRXESOT7RGJKrOYmE5cEIm094oxY32cISTaSkMxMYpQOHnHOcDcO443Eu9TliAtSulxjXFHu" +
"SsQHxn4ucYVAfCT8vZOIV3KYYUSznfLlgBFHZBk0S09JFOEzjM8PASQuyMQFEQpkNLPOQBITxjVJj" +
"WTkj4s6Y0oi/eIQ54QbwAzyuhtclpa8kAT17VODuCCaKgczIE2LXSnJDAj9022D+ECExy5jJEbiRV" +
"r3N3ePCeIKoXq/HnzKq9R3m+XzvMpBXCWQa7f9ZrvdbvrV/O521gbAxN+IURKGqm3brvElIxP/ImN" +
"CJi78AkZVGOZlPDldAAAAAElFTkSuQmCC";
beforeEach(() => {
imageVisualDataBuilder = new ImageVisualDataBuilder();
});
it("ImageVisual registered capabilities", () => {
expect(powerbi.visuals.visualPluginFactory.create().getPlugin("image").capabilities).toBe(imageVisualCapabilities);
});
it("ImageVisual registered capabilities: objects", () => {
expect(powerbi.visuals.visualPluginFactory.create().getPlugin("image").capabilities.objects).toBeDefined();
});
it("Image no visual configuration", () => {
expect(imageVisualDataBuilder.element.children().length).toBe(0);
});
it("Image to about:blank", () => {
imageVisualDataBuilder.imageUrl = "about:blank";
imageVisualDataBuilder.update();
//invalid image data url
expect(imageVisualDataBuilder.imageBackgroundElement.css("background-image")).toBe("none");
});
it("Image from base64 string", () => {
imageVisualDataBuilder.imageUrl = imageBase64value;
imageVisualDataBuilder.update();
expect(imageVisualDataBuilder.imageBackgroundElement.css("background-image")).toBe("url(" + imageBase64value + ")");
});
it("Image from blob", () => {
let blobUrl = window.URL.createObjectURL(new Blob());
imageVisualDataBuilder.imageUrl = blobUrl;
imageVisualDataBuilder.update();
expect(imageVisualDataBuilder.imageBackgroundElement.css("background-image")).toBe("url(" + blobUrl + ")");
});
it("Image DOM Verification", () => {
imageVisualDataBuilder.imageUrl = imageBase64value;
imageVisualDataBuilder.update();
expect(imageVisualDataBuilder.imageBackgroundElement.css('background-image')).toBe('url(' + imageBase64value + ')');
expect(imageVisualDataBuilder.imageBackgroundElement.css('background-size')).toBe('contain');
expect(imageVisualDataBuilder.imageBackgroundElement.css('height')).toBe('200px');
expect(imageVisualDataBuilder.imageBackgroundElement.css('width')).toBe('300px');
});
it('Image scaling types', () => {
imageVisualDataBuilder.imageUrl = imageBase64value;
// Fit
imageVisualDataBuilder.imageScalingType = "Fit";
imageVisualDataBuilder.update();
expect(imageVisualDataBuilder.imageBackgroundElement.css('background-size')).toBe('100% 100%');
// Fill
imageVisualDataBuilder.imageScalingType = "Fill";
imageVisualDataBuilder.update();
expect(imageVisualDataBuilder.imageBackgroundElement.css('background-size')).toBe('cover');
// Reset to default
imageVisualDataBuilder.imageScalingType = null;
imageVisualDataBuilder.update();
expect(imageVisualDataBuilder.imageBackgroundElement.css('background-size')).toBe('contain');
// Normal
imageVisualDataBuilder.imageScalingType = "Normal";
imageVisualDataBuilder.update();
expect(imageVisualDataBuilder.imageBackgroundElement.css('background-size')).toBe('contain');
});
});
class ImageVisualDataBuilder {
private _element: JQuery;
public get element(): JQuery {
return this._element;
}
public get imageBackgroundElement(): JQuery {
return this._element.find('.imageBackground');
}
private _hostService: powerbi.IVisualHostServices;
private _style: powerbi.IVisualStyle;
private _image: ImageVisual;
public get image(): ImageVisual {
return this._image;
}
private _imageUrl: string;
public get imageUrl(): string {
return this._imageUrl;
}
public set imageUrl(value: string) {
this._imageUrl = value;
}
private _imageScalingType: string;
public get imageScalingType(): string {
return this._imageScalingType;
}
public set imageScalingType(value: string) {
this._imageScalingType = value;
}
constructor() {
this._element = powerbitests.helpers.testDom("200", "300");
this._hostService = mocks.createVisualHostServices();
this._style = powerbi.visuals.visualStyles.create();
this._image = new ImageVisual();
this.init();
}
private createViewport(): powerbi.IViewport {
return {
height: this._element.height(),
width: this._element.width(),
};
}
private init() {
this.image.init({
element: this._element,
host: this._hostService,
style: this._style,
viewport: this.createViewport(),
animation: {
transitionImmediate: true
}
});
}
public update() {
let objects: powerbi.DataViewObjects = {
general: {
imageUrl: this.imageUrl
}
};
if (this.imageScalingType) {
objects["imageScaling"] = { imageScalingType: this.imageScalingType };
}
this.image.update({
viewport: this.createViewport(),
dataViews: [{
metadata: {
columns: [],
objects: objects
}
}]
});
}
}
} | * in the Software without restriction, including without limitation the rights |
changing-crates.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-msvc FIXME #31306
// note that these aux-build directives must be in this order
// aux-build:changing-crates-a1.rs
// aux-build:changing-crates-b.rs
// aux-build:changing-crates-a2.rs
extern crate a;
extern crate b; //~ ERROR: found possibly newer version of crate `a` which `b` depends on
//~| NOTE: perhaps that crate needs to be recompiled
//~| NOTE: crate `a` path #1:
//~| NOTE: crate `b` path #1:
fn main() | {} |
|
main.go | package main
import(
"log"
"net/http"
"text/template"
"path/filepath"
"sync"
)
// struck to manage templates
type templateHandler struct {
once sync.Once
filename string
templ *template.Template
}
func (t *templateHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
t.once.Do(func() {
t.templ =
template.Must(template.ParseFiles(filepath.Join("templates", t.filename)))
})
t.templ.Execute(w,nil)
}
func main() {
http.Handle("/", &templateHandler{ filename: "index.html"})
http.Handle("/about", &templateHandler{ filename: "about.html"}) | if err := http.ListenAndServe(":80", nil); err != nil {
log.Fatal("ListenAndServe:", err)
}
} |
// start the web server |
ModalProvider.tsx | import React, {
ReactNode,
useCallback,
useContext,
useEffect,
useMemo,
useRef,
} from 'react';
import noop from 'lodash/noop';
import ModalContainer, { ModalContainerProps } from '../modal-container/ModalContainer';
import Modal, { ModalProps } from '../modal/Modal';
import ModalContext from './ModalContext';
export interface ModalProviderProps extends ModalContainerProps {
children?: ReactNode;
}
const ModalProvider = (props: ModalProviderProps) => {
const { location, children, getContainer } = props;
const ref = useRef<ModalContainer>(null);
const prepareToOpen = useMemo<(ModalProps & { children })[]>(
() => [] as (ModalProps & { children })[],
[],
);
const open = useCallback((modalProps: ModalProps & { children }) => {
const container = ref.current;
if (container) {
const close = async (destroy?: boolean) => {
const { onClose = noop } = modalProps;
if ((await onClose()) !== false) {
if (destroy) {
container.close({ ...modalProps, destroyOnClose: true });
} else {
container.close(modalProps);
}
}
};
const show = () => {
container.open(modalProps);
};
const update = newProps => {
container.update({ ...modalProps, ...newProps });
};
modalProps = {
close,
update,
...Modal.defaultProps as ModalProps,
...modalProps,
};
container.open(modalProps);
return {
close,
open: show,
update,
};
}
prepareToOpen.push(modalProps);
}, []);
useEffect(() => {
if (ref.current) {
prepareToOpen.forEach(prepare => open(prepare));
}
}, [ref, open]);
const context = { open };
return (
<ModalContext.Provider value={context}>
<ModalContainer ref={ref} location={location} getContainer={getContainer} />
{children}
</ModalContext.Provider> | };
export const useModal = () => {
return useContext(ModalContext);
};
export const injectModal = Target => {
const Hoc = props => {
const modal = useModal();
return <Target {...props} Modal={modal} />;
};
Hoc.displayName = `${Target.displayName || 'Anonymous'}-with-inject-modal`;
return Hoc;
};
ModalProvider.displayName = 'ModalProvider';
ModalProvider.useModal = useModal;
ModalProvider.injectModal = injectModal;
export default ModalProvider; | ); |
test_tasks.py | """Tests for tasks.py."""
import collections
import contextlib
import contextvars
import functools
import gc
import io
import random
import re
import sys
import textwrap
import traceback
import types
import unittest
import weakref
from unittest import mock
import asyncio
from asyncio import coroutines
from asyncio import futures
from asyncio import tasks
from test.test_asyncio import utils as test_utils
from test import support
from test.support.script_helper import assert_python_ok
def tearDownModule():
asyncio.set_event_loop_policy(None)
async def coroutine_function():
pass
@contextlib.contextmanager
def set_coroutine_debug(enabled):
coroutines = asyncio.coroutines
old_debug = coroutines._DEBUG
try:
coroutines._DEBUG = enabled
yield
finally:
coroutines._DEBUG = old_debug
def format_coroutine(qualname, state, src, source_traceback, generator=False):
if generator:
state = '%s' % state
else:
state = '%s, defined' % state
if source_traceback is not None:
frame = source_traceback[-1]
return ('coro=<%s() %s at %s> created at %s:%s'
% (qualname, state, src, frame[0], frame[1]))
else:
return 'coro=<%s() %s at %s>' % (qualname, state, src)
def get_innermost_context(exc):
"""
Return information about the innermost exception context in the chain.
"""
depth = 0
while True:
context = exc.__context__
if context is None:
break
exc = context
depth += 1
return (type(exc), exc.args, depth)
class Dummy:
def __repr__(self):
return '<Dummy>'
def __call__(self, *args):
pass
class CoroLikeObject:
def send(self, v):
raise StopIteration(42)
def throw(self, *exc):
pass
def close(self):
pass
def __await__(self):
return self
# The following value can be used as a very small timeout:
# it passes check "timeout > 0", but has almost
# no effect on the test performance
_EPSILON = 0.0001
class BaseTaskTests:
Task = None
Future = None
def new_task(self, loop, coro, name='TestTask'):
return self.__class__.Task(coro, loop=loop, name=name)
def new_future(self, loop):
return self.__class__.Future(loop=loop)
def setUp(self):
super().setUp()
self.loop = self.new_test_loop()
self.loop.set_task_factory(self.new_task)
self.loop.create_future = lambda: self.new_future(self.loop)
def test_task_cancel_message_getter(self):
async def coro():
pass
t = self.new_task(self.loop, coro())
self.assertTrue(hasattr(t, '_cancel_message'))
self.assertEqual(t._cancel_message, None)
t.cancel('my message')
self.assertEqual(t._cancel_message, 'my message')
with self.assertRaises(asyncio.CancelledError):
self.loop.run_until_complete(t)
def test_task_cancel_message_setter(self):
async def coro():
pass
t = self.new_task(self.loop, coro())
t.cancel('my message')
t._cancel_message = 'my new message'
self.assertEqual(t._cancel_message, 'my new message')
with self.assertRaises(asyncio.CancelledError):
self.loop.run_until_complete(t)
def test_task_del_collect(self):
class Evil:
def __del__(self):
gc.collect()
async def run():
return Evil()
self.loop.run_until_complete(
asyncio.gather(*[
self.new_task(self.loop, run()) for _ in range(100)
]))
def test_other_loop_future(self):
other_loop = asyncio.new_event_loop()
fut = self.new_future(other_loop)
async def run(fut):
await fut
try:
with self.assertRaisesRegex(RuntimeError,
r'Task .* got Future .* attached'):
self.loop.run_until_complete(run(fut))
finally:
other_loop.close()
def test_task_awaits_on_itself(self):
async def test():
await task
task = asyncio.ensure_future(test(), loop=self.loop)
with self.assertRaisesRegex(RuntimeError,
'Task cannot await on itself'):
self.loop.run_until_complete(task)
def test_task_class(self):
async def notmuch():
return 'ok'
t = self.new_task(self.loop, notmuch())
self.loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertEqual(t.result(), 'ok')
self.assertIs(t._loop, self.loop)
self.assertIs(t.get_loop(), self.loop)
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
t = self.new_task(loop, notmuch())
self.assertIs(t._loop, loop)
loop.run_until_complete(t)
loop.close()
def test_ensure_future_coroutine(self):
async def notmuch():
return 'ok'
t = asyncio.ensure_future(notmuch(), loop=self.loop)
self.assertIs(t._loop, self.loop)
self.loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertEqual(t.result(), 'ok')
a = notmuch()
self.addCleanup(a.close)
with self.assertWarns(DeprecationWarning) as cm:
with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
asyncio.ensure_future(a)
self.assertEqual(cm.warnings[0].filename, __file__)
async def test():
return asyncio.ensure_future(notmuch())
t = self.loop.run_until_complete(test())
self.assertIs(t._loop, self.loop)
self.loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertEqual(t.result(), 'ok')
# Deprecated in 3.10
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
with self.assertWarns(DeprecationWarning) as cm:
t = asyncio.ensure_future(notmuch())
self.assertEqual(cm.warnings[0].filename, __file__)
self.assertIs(t._loop, self.loop)
self.loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertEqual(t.result(), 'ok')
def test_ensure_future_coroutine_2(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def notmuch():
return 'ok'
t = asyncio.ensure_future(notmuch(), loop=self.loop)
self.assertIs(t._loop, self.loop)
self.loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertEqual(t.result(), 'ok')
a = notmuch()
self.addCleanup(a.close)
with self.assertWarns(DeprecationWarning) as cm:
with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
asyncio.ensure_future(a)
self.assertEqual(cm.warnings[0].filename, __file__)
async def test():
return asyncio.ensure_future(notmuch())
t = self.loop.run_until_complete(test())
self.assertIs(t._loop, self.loop)
self.loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertEqual(t.result(), 'ok')
# Deprecated in 3.10
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
with self.assertWarns(DeprecationWarning) as cm:
t = asyncio.ensure_future(notmuch())
self.assertEqual(cm.warnings[0].filename, __file__)
self.assertIs(t._loop, self.loop)
self.loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertEqual(t.result(), 'ok')
def test_ensure_future_future(self):
f_orig = self.new_future(self.loop)
f_orig.set_result('ko')
f = asyncio.ensure_future(f_orig)
self.loop.run_until_complete(f)
self.assertTrue(f.done())
self.assertEqual(f.result(), 'ko')
self.assertIs(f, f_orig)
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
with self.assertRaises(ValueError):
f = asyncio.ensure_future(f_orig, loop=loop)
loop.close()
f = asyncio.ensure_future(f_orig, loop=self.loop)
self.assertIs(f, f_orig)
def test_ensure_future_task(self):
async def notmuch():
return 'ok'
t_orig = self.new_task(self.loop, notmuch())
t = asyncio.ensure_future(t_orig)
self.loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertEqual(t.result(), 'ok')
self.assertIs(t, t_orig)
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
with self.assertRaises(ValueError):
t = asyncio.ensure_future(t_orig, loop=loop)
loop.close()
t = asyncio.ensure_future(t_orig, loop=self.loop)
self.assertIs(t, t_orig)
def test_ensure_future_awaitable(self):
class Aw:
def __init__(self, coro):
self.coro = coro
def __await__(self):
return (yield from self.coro)
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro():
return 'ok'
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
fut = asyncio.ensure_future(Aw(coro()), loop=loop)
loop.run_until_complete(fut)
assert fut.result() == 'ok'
def test_ensure_future_neither(self):
with self.assertRaises(TypeError):
asyncio.ensure_future('ok')
def test_ensure_future_error_msg(self):
loop = asyncio.new_event_loop()
f = self.new_future(self.loop)
with self.assertRaisesRegex(ValueError, 'The future belongs to a '
'different loop than the one specified as '
'the loop argument'):
asyncio.ensure_future(f, loop=loop)
loop.close()
def test_get_stack(self):
T = None
async def foo():
await bar()
async def bar():
# test get_stack()
f = T.get_stack(limit=1)
try:
self.assertEqual(f[0].f_code.co_name, 'foo')
finally:
f = None
# test print_stack()
file = io.StringIO()
T.print_stack(limit=1, file=file)
file.seek(0)
tb = file.read()
self.assertRegex(tb, r'foo\(\) running')
async def runner():
nonlocal T
T = asyncio.ensure_future(foo(), loop=self.loop)
await T
self.loop.run_until_complete(runner())
def test_task_repr(self):
self.loop.set_debug(False)
async def notmuch():
return 'abc'
# test coroutine function
self.assertEqual(notmuch.__name__, 'notmuch')
self.assertRegex(notmuch.__qualname__,
r'\w+.test_task_repr.<locals>.notmuch')
self.assertEqual(notmuch.__module__, __name__)
filename, lineno = test_utils.get_function_source(notmuch)
src = "%s:%s" % (filename, lineno)
# test coroutine object
gen = notmuch()
coro_qualname = 'BaseTaskTests.test_task_repr.<locals>.notmuch'
self.assertEqual(gen.__name__, 'notmuch')
self.assertEqual(gen.__qualname__, coro_qualname)
# test pending Task
t = self.new_task(self.loop, gen)
t.add_done_callback(Dummy())
coro = format_coroutine(coro_qualname, 'running', src,
t._source_traceback, generator=True)
self.assertEqual(repr(t),
"<Task pending name='TestTask' %s cb=[<Dummy>()]>" % coro)
# test cancelling Task
t.cancel() # Does not take immediate effect!
self.assertEqual(repr(t),
"<Task cancelling name='TestTask' %s cb=[<Dummy>()]>" % coro)
# test cancelled Task
self.assertRaises(asyncio.CancelledError,
self.loop.run_until_complete, t)
coro = format_coroutine(coro_qualname, 'done', src,
t._source_traceback)
self.assertEqual(repr(t),
"<Task cancelled name='TestTask' %s>" % coro)
# test finished Task
t = self.new_task(self.loop, notmuch())
self.loop.run_until_complete(t)
coro = format_coroutine(coro_qualname, 'done', src,
t._source_traceback)
self.assertEqual(repr(t),
"<Task finished name='TestTask' %s result='abc'>" % coro)
def test_task_repr_autogenerated(self):
async def notmuch():
return 123
t1 = self.new_task(self.loop, notmuch(), None)
t2 = self.new_task(self.loop, notmuch(), None)
self.assertNotEqual(repr(t1), repr(t2))
match1 = re.match(r"^<Task pending name='Task-(\d+)'", repr(t1))
self.assertIsNotNone(match1)
match2 = re.match(r"^<Task pending name='Task-(\d+)'", repr(t2))
self.assertIsNotNone(match2)
# Autogenerated task names should have monotonically increasing numbers
self.assertLess(int(match1.group(1)), int(match2.group(1)))
self.loop.run_until_complete(t1)
self.loop.run_until_complete(t2)
def test_task_repr_name_not_str(self):
async def notmuch():
return 123
t = self.new_task(self.loop, notmuch())
t.set_name({6})
self.assertEqual(t.get_name(), '{6}')
self.loop.run_until_complete(t)
def test_task_repr_coro_decorator(self):
self.loop.set_debug(False)
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def notmuch():
# notmuch() function doesn't use yield from: it will be wrapped by
# @coroutine decorator
return 123
# test coroutine function
self.assertEqual(notmuch.__name__, 'notmuch')
self.assertRegex(notmuch.__qualname__,
r'\w+.test_task_repr_coro_decorator'
r'\.<locals>\.notmuch')
self.assertEqual(notmuch.__module__, __name__)
# test coroutine object
gen = notmuch()
# On Python >= 3.5, generators now inherit the name of the
# function, as expected, and have a qualified name (__qualname__
# attribute).
coro_name = 'notmuch'
coro_qualname = ('BaseTaskTests.test_task_repr_coro_decorator'
'.<locals>.notmuch')
self.assertEqual(gen.__name__, coro_name)
self.assertEqual(gen.__qualname__, coro_qualname)
# test repr(CoroWrapper)
if coroutines._DEBUG:
# format the coroutine object
if coroutines._DEBUG:
filename, lineno = test_utils.get_function_source(notmuch)
frame = gen._source_traceback[-1]
coro = ('%s() running, defined at %s:%s, created at %s:%s'
% (coro_qualname, filename, lineno,
frame[0], frame[1]))
else:
code = gen.gi_code
coro = ('%s() running at %s:%s'
% (coro_qualname, code.co_filename,
code.co_firstlineno))
self.assertEqual(repr(gen), '<CoroWrapper %s>' % coro)
# test pending Task
t = self.new_task(self.loop, gen)
t.add_done_callback(Dummy())
# format the coroutine object
if coroutines._DEBUG:
src = '%s:%s' % test_utils.get_function_source(notmuch)
else:
code = gen.gi_code
src = '%s:%s' % (code.co_filename, code.co_firstlineno)
coro = format_coroutine(coro_qualname, 'running', src,
t._source_traceback,
generator=not coroutines._DEBUG)
self.assertEqual(repr(t),
"<Task pending name='TestTask' %s cb=[<Dummy>()]>" % coro)
self.loop.run_until_complete(t)
def test_task_repr_wait_for(self):
self.loop.set_debug(False)
async def wait_for(fut):
return await fut
fut = self.new_future(self.loop)
task = self.new_task(self.loop, wait_for(fut))
test_utils.run_briefly(self.loop)
self.assertRegex(repr(task),
'<Task .* wait_for=%s>' % re.escape(repr(fut)))
fut.set_result(None)
self.loop.run_until_complete(task)
def test_task_repr_partial_corowrapper(self):
# Issue #222: repr(CoroWrapper) must not fail in debug mode if the
# coroutine is a partial function
with set_coroutine_debug(True):
self.loop.set_debug(True)
async def func(x, y):
await asyncio.sleep(0)
with self.assertWarns(DeprecationWarning):
partial_func = asyncio.coroutine(functools.partial(func, 1))
task = self.loop.create_task(partial_func(2))
# make warnings quiet
task._log_destroy_pending = False
self.addCleanup(task._coro.close)
coro_repr = repr(task._coro)
expected = (
r'<coroutine object \w+\.test_task_repr_partial_corowrapper'
r'\.<locals>\.func at'
)
self.assertRegex(coro_repr, expected)
def test_task_basics(self):
async def outer():
a = await inner1()
b = await inner2()
return a+b
async def inner1():
return 42
async def inner2():
return 1000
t = outer()
self.assertEqual(self.loop.run_until_complete(t), 1042)
def test_exception_chaining_after_await(self):
# Test that when awaiting on a task when an exception is already
# active, if the task raises an exception it will be chained
# with the original.
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
async def raise_error():
raise ValueError
async def run():
try:
raise KeyError(3)
except Exception as exc:
task = self.new_task(loop, raise_error())
try:
await task
except Exception as exc:
self.assertEqual(type(exc), ValueError)
chained = exc.__context__
self.assertEqual((type(chained), chained.args),
(KeyError, (3,)))
try:
task = self.new_task(loop, run())
loop.run_until_complete(task)
finally:
loop.close()
def test_exception_chaining_after_await_with_context_cycle(self):
# Check trying to create an exception context cycle:
# https://bugs.python.org/issue40696
has_cycle = None
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
async def process_exc(exc):
raise exc
async def run():
nonlocal has_cycle
try:
raise KeyError('a')
except Exception as exc:
task = self.new_task(loop, process_exc(exc))
try:
await task
except BaseException as exc:
has_cycle = (exc is exc.__context__)
# Prevent a hang if has_cycle is True.
exc.__context__ = None
try:
task = self.new_task(loop, run())
loop.run_until_complete(task)
finally:
loop.close()
# This also distinguishes from the initial has_cycle=None.
self.assertEqual(has_cycle, False)
def test_cancel(self):
def gen():
when = yield
self.assertAlmostEqual(10.0, when)
yield 0
loop = self.new_test_loop(gen)
async def task():
await asyncio.sleep(10.0)
return 12
t = self.new_task(loop, task())
loop.call_soon(t.cancel)
with self.assertRaises(asyncio.CancelledError):
loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertTrue(t.cancelled())
self.assertFalse(t.cancel())
def test_cancel_with_message_then_future_result(self):
# Test Future.result() after calling cancel() with a message.
cases = [
((), ()),
((None,), ()),
(('my message',), ('my message',)),
# Non-string values should roundtrip.
((5,), (5,)),
]
for cancel_args, expected_args in cases:
with self.subTest(cancel_args=cancel_args):
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
async def sleep():
await asyncio.sleep(10)
async def coro():
task = self.new_task(loop, sleep())
await asyncio.sleep(0)
task.cancel(*cancel_args)
done, pending = await asyncio.wait([task])
task.result()
task = self.new_task(loop, coro())
with self.assertRaises(asyncio.CancelledError) as cm:
loop.run_until_complete(task)
exc = cm.exception
self.assertEqual(exc.args, ())
actual = get_innermost_context(exc)
self.assertEqual(actual,
(asyncio.CancelledError, expected_args, 2))
def test_cancel_with_message_then_future_exception(self):
# Test Future.exception() after calling cancel() with a message.
cases = [
((), ()),
((None,), ()),
(('my message',), ('my message',)),
# Non-string values should roundtrip.
((5,), (5,)),
]
for cancel_args, expected_args in cases:
with self.subTest(cancel_args=cancel_args):
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
async def sleep():
await asyncio.sleep(10)
async def coro():
task = self.new_task(loop, sleep())
await asyncio.sleep(0)
task.cancel(*cancel_args)
done, pending = await asyncio.wait([task])
task.exception()
task = self.new_task(loop, coro())
with self.assertRaises(asyncio.CancelledError) as cm:
loop.run_until_complete(task)
exc = cm.exception
self.assertEqual(exc.args, ())
actual = get_innermost_context(exc)
self.assertEqual(actual,
(asyncio.CancelledError, expected_args, 2))
def test_cancel_with_message_before_starting_task(self):
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
async def sleep():
await asyncio.sleep(10)
async def coro():
task = self.new_task(loop, sleep())
# We deliberately leave out the sleep here.
task.cancel('my message')
done, pending = await asyncio.wait([task])
task.exception()
task = self.new_task(loop, coro())
with self.assertRaises(asyncio.CancelledError) as cm:
loop.run_until_complete(task)
exc = cm.exception
self.assertEqual(exc.args, ())
actual = get_innermost_context(exc)
self.assertEqual(actual,
(asyncio.CancelledError, ('my message',), 2))
def test_cancel_yield(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def task():
yield
yield
return 12
t = self.new_task(self.loop, task())
test_utils.run_briefly(self.loop) # start coro
t.cancel()
self.assertRaises(
asyncio.CancelledError, self.loop.run_until_complete, t)
self.assertTrue(t.done())
self.assertTrue(t.cancelled())
self.assertFalse(t.cancel())
def test_cancel_inner_future(self):
f = self.new_future(self.loop)
async def task():
await f
return 12
t = self.new_task(self.loop, task())
test_utils.run_briefly(self.loop) # start task
f.cancel()
with self.assertRaises(asyncio.CancelledError):
self.loop.run_until_complete(t)
self.assertTrue(f.cancelled())
self.assertTrue(t.cancelled())
def test_cancel_both_task_and_inner_future(self):
f = self.new_future(self.loop)
async def task():
await f
return 12
t = self.new_task(self.loop, task())
test_utils.run_briefly(self.loop)
f.cancel()
t.cancel()
with self.assertRaises(asyncio.CancelledError):
self.loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertTrue(f.cancelled())
self.assertTrue(t.cancelled())
def test_cancel_task_catching(self):
fut1 = self.new_future(self.loop)
fut2 = self.new_future(self.loop)
async def task():
await fut1
try:
await fut2
except asyncio.CancelledError:
return 42
t = self.new_task(self.loop, task())
test_utils.run_briefly(self.loop)
self.assertIs(t._fut_waiter, fut1) # White-box test.
fut1.set_result(None)
test_utils.run_briefly(self.loop)
self.assertIs(t._fut_waiter, fut2) # White-box test.
t.cancel()
self.assertTrue(fut2.cancelled())
res = self.loop.run_until_complete(t)
self.assertEqual(res, 42)
self.assertFalse(t.cancelled())
def test_cancel_task_ignoring(self):
fut1 = self.new_future(self.loop)
fut2 = self.new_future(self.loop)
fut3 = self.new_future(self.loop)
async def task():
await fut1
try:
await fut2
except asyncio.CancelledError:
pass
res = await fut3
return res
t = self.new_task(self.loop, task())
test_utils.run_briefly(self.loop)
self.assertIs(t._fut_waiter, fut1) # White-box test.
fut1.set_result(None)
test_utils.run_briefly(self.loop)
self.assertIs(t._fut_waiter, fut2) # White-box test.
t.cancel()
self.assertTrue(fut2.cancelled())
test_utils.run_briefly(self.loop)
self.assertIs(t._fut_waiter, fut3) # White-box test.
fut3.set_result(42)
res = self.loop.run_until_complete(t)
self.assertEqual(res, 42)
self.assertFalse(fut3.cancelled())
self.assertFalse(t.cancelled())
def test_cancel_current_task(self):
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
async def task():
t.cancel()
self.assertTrue(t._must_cancel) # White-box test.
# The sleep should be cancelled immediately.
await asyncio.sleep(100)
return 12
t = self.new_task(loop, task())
self.assertFalse(t.cancelled())
self.assertRaises(
asyncio.CancelledError, loop.run_until_complete, t)
self.assertTrue(t.done())
self.assertTrue(t.cancelled())
self.assertFalse(t._must_cancel) # White-box test.
self.assertFalse(t.cancel())
def test_cancel_at_end(self):
"""coroutine end right after task is cancelled"""
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
async def task():
t.cancel()
self.assertTrue(t._must_cancel) # White-box test.
return 12
t = self.new_task(loop, task())
self.assertFalse(t.cancelled())
self.assertRaises(
asyncio.CancelledError, loop.run_until_complete, t)
self.assertTrue(t.done())
self.assertTrue(t.cancelled())
self.assertFalse(t._must_cancel) # White-box test.
self.assertFalse(t.cancel())
def test_cancel_awaited_task(self):
# This tests for a relatively rare condition when
# a task cancellation is requested for a task which is not
# currently blocked, such as a task cancelling itself.
# In this situation we must ensure that whatever next future
# or task the cancelled task blocks on is cancelled correctly
# as well. See also bpo-34872.
loop = asyncio.new_event_loop()
self.addCleanup(lambda: loop.close())
task = nested_task = None
fut = self.new_future(loop)
async def nested():
await fut
async def coro():
nonlocal nested_task
# Create a sub-task and wait for it to run.
nested_task = self.new_task(loop, nested())
await asyncio.sleep(0)
# Request the current task to be cancelled.
task.cancel()
# Block on the nested task, which should be immediately
# cancelled.
await nested_task
task = self.new_task(loop, coro())
with self.assertRaises(asyncio.CancelledError):
loop.run_until_complete(task)
self.assertTrue(task.cancelled())
self.assertTrue(nested_task.cancelled())
self.assertTrue(fut.cancelled())
def assert_text_contains(self, text, substr):
if substr not in text:
raise RuntimeError(f'text {substr!r} not found in:\n>>>{text}<<<')
def test_cancel_traceback_for_future_result(self):
# When calling Future.result() on a cancelled task, check that the
# line of code that was interrupted is included in the traceback.
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
async def nested():
# This will get cancelled immediately.
await asyncio.sleep(10)
async def coro():
task = self.new_task(loop, nested())
await asyncio.sleep(0)
task.cancel()
await task # search target
task = self.new_task(loop, coro())
try:
loop.run_until_complete(task)
except asyncio.CancelledError:
tb = traceback.format_exc()
self.assert_text_contains(tb, "await asyncio.sleep(10)")
# The intermediate await should also be included.
self.assert_text_contains(tb, "await task # search target")
else:
self.fail('CancelledError did not occur')
def test_cancel_traceback_for_future_exception(self):
# When calling Future.exception() on a cancelled task, check that the
# line of code that was interrupted is included in the traceback.
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
async def nested():
# This will get cancelled immediately.
await asyncio.sleep(10)
async def coro():
task = self.new_task(loop, nested())
await asyncio.sleep(0)
task.cancel()
done, pending = await asyncio.wait([task])
task.exception() # search target
task = self.new_task(loop, coro())
try:
loop.run_until_complete(task)
except asyncio.CancelledError:
tb = traceback.format_exc()
self.assert_text_contains(tb, "await asyncio.sleep(10)")
# The intermediate await should also be included.
self.assert_text_contains(tb,
"task.exception() # search target")
else:
self.fail('CancelledError did not occur')
def test_stop_while_run_in_complete(self):
def gen():
when = yield
self.assertAlmostEqual(0.1, when)
when = yield 0.1
self.assertAlmostEqual(0.2, when)
when = yield 0.1
self.assertAlmostEqual(0.3, when)
yield 0.1
loop = self.new_test_loop(gen)
x = 0
async def task():
nonlocal x
while x < 10:
await asyncio.sleep(0.1)
x += 1
if x == 2:
loop.stop()
t = self.new_task(loop, task())
with self.assertRaises(RuntimeError) as cm:
loop.run_until_complete(t)
self.assertEqual(str(cm.exception),
'Event loop stopped before Future completed.')
self.assertFalse(t.done())
self.assertEqual(x, 2)
self.assertAlmostEqual(0.3, loop.time())
t.cancel()
self.assertRaises(asyncio.CancelledError, loop.run_until_complete, t)
def test_log_traceback(self):
async def coro():
pass
task = self.new_task(self.loop, coro())
with self.assertRaisesRegex(ValueError, 'can only be set to False'):
task._log_traceback = True
self.loop.run_until_complete(task)
def test_wait_for_timeout_less_then_0_or_0_future_done(self):
def gen():
when = yield
self.assertAlmostEqual(0, when)
loop = self.new_test_loop(gen)
fut = self.new_future(loop)
fut.set_result('done')
ret = loop.run_until_complete(asyncio.wait_for(fut, 0))
self.assertEqual(ret, 'done')
self.assertTrue(fut.done())
self.assertAlmostEqual(0, loop.time())
def test_wait_for_timeout_less_then_0_or_0_coroutine_do_not_started(self):
def gen():
when = yield
self.assertAlmostEqual(0, when)
loop = self.new_test_loop(gen)
foo_started = False
async def foo():
nonlocal foo_started
foo_started = True
with self.assertRaises(asyncio.TimeoutError):
loop.run_until_complete(asyncio.wait_for(foo(), 0))
self.assertAlmostEqual(0, loop.time())
self.assertEqual(foo_started, False)
def test_wait_for_timeout_less_then_0_or_0(self):
def gen():
when = yield
self.assertAlmostEqual(0.2, when)
when = yield 0
self.assertAlmostEqual(0, when)
for timeout in [0, -1]:
with self.subTest(timeout=timeout):
loop = self.new_test_loop(gen)
foo_running = None
async def foo():
nonlocal foo_running
foo_running = True
try:
await asyncio.sleep(0.2)
finally:
foo_running = False
return 'done'
fut = self.new_task(loop, foo())
with self.assertRaises(asyncio.TimeoutError):
loop.run_until_complete(asyncio.wait_for(fut, timeout))
self.assertTrue(fut.done())
# it should have been cancelled due to the timeout
self.assertTrue(fut.cancelled())
self.assertAlmostEqual(0, loop.time())
self.assertEqual(foo_running, False)
def test_wait_for(self):
def gen():
when = yield
self.assertAlmostEqual(0.2, when)
when = yield 0
self.assertAlmostEqual(0.1, when)
when = yield 0.1
loop = self.new_test_loop(gen)
foo_running = None
async def foo():
nonlocal foo_running
foo_running = True
try:
await asyncio.sleep(0.2)
finally:
foo_running = False
return 'done'
fut = self.new_task(loop, foo())
with self.assertRaises(asyncio.TimeoutError):
loop.run_until_complete(asyncio.wait_for(fut, 0.1))
self.assertTrue(fut.done())
# it should have been cancelled due to the timeout
self.assertTrue(fut.cancelled())
self.assertAlmostEqual(0.1, loop.time())
self.assertEqual(foo_running, False)
def test_wait_for_blocking(self):
loop = self.new_test_loop()
async def coro():
return 'done'
res = loop.run_until_complete(asyncio.wait_for(coro(), timeout=None))
self.assertEqual(res, 'done')
def test_wait_for_race_condition(self):
def gen():
yield 0.1
yield 0.1
yield 0.1
loop = self.new_test_loop(gen)
fut = self.new_future(loop)
task = asyncio.wait_for(fut, timeout=0.2)
loop.call_later(0.1, fut.set_result, "ok")
res = loop.run_until_complete(task)
self.assertEqual(res, "ok")
def test_wait_for_cancellation_race_condition(self):
def gen():
yield 0.1
yield 0.1
yield 0.1
yield 0.1
loop = self.new_test_loop(gen)
fut = self.new_future(loop)
loop.call_later(0.1, fut.set_result, "ok")
task = loop.create_task(asyncio.wait_for(fut, timeout=1))
loop.call_later(0.1, task.cancel)
res = loop.run_until_complete(task)
self.assertEqual(res, "ok")
def test_wait_for_waits_for_task_cancellation(self):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
task_done = False
async def foo():
async def inner():
nonlocal task_done
try:
await asyncio.sleep(0.2)
except asyncio.CancelledError:
await asyncio.sleep(_EPSILON)
raise
finally:
task_done = True
inner_task = self.new_task(loop, inner())
await asyncio.wait_for(inner_task, timeout=_EPSILON)
with self.assertRaises(asyncio.TimeoutError) as cm:
loop.run_until_complete(foo())
self.assertTrue(task_done)
chained = cm.exception.__context__
self.assertEqual(type(chained), asyncio.CancelledError)
def test_wait_for_waits_for_task_cancellation_w_timeout_0(self):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
task_done = False
async def foo():
async def inner():
nonlocal task_done
try:
await asyncio.sleep(10)
except asyncio.CancelledError:
await asyncio.sleep(_EPSILON)
raise
finally:
task_done = True
inner_task = self.new_task(loop, inner())
await asyncio.sleep(_EPSILON)
await asyncio.wait_for(inner_task, timeout=0)
with self.assertRaises(asyncio.TimeoutError) as cm:
loop.run_until_complete(foo())
self.assertTrue(task_done)
chained = cm.exception.__context__
self.assertEqual(type(chained), asyncio.CancelledError)
def test_wait_for_reraises_exception_during_cancellation(self):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
class FooException(Exception):
pass
async def foo():
async def inner():
try:
await asyncio.sleep(0.2)
finally:
raise FooException
inner_task = self.new_task(loop, inner())
await asyncio.wait_for(inner_task, timeout=_EPSILON)
with self.assertRaises(FooException):
loop.run_until_complete(foo())
def test_wait_for_raises_timeout_error_if_returned_during_cancellation(self):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
async def foo():
async def inner():
try:
await asyncio.sleep(0.2)
except asyncio.CancelledError:
return 42
inner_task = self.new_task(loop, inner())
await asyncio.wait_for(inner_task, timeout=_EPSILON)
with self.assertRaises(asyncio.TimeoutError):
loop.run_until_complete(foo())
def test_wait_for_self_cancellation(self):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
async def foo():
async def inner():
try:
await asyncio.sleep(0.3)
except asyncio.CancelledError:
try:
await asyncio.sleep(0.3)
except asyncio.CancelledError:
await asyncio.sleep(0.3)
return 42
inner_task = self.new_task(loop, inner())
wait = asyncio.wait_for(inner_task, timeout=0.1)
# Test that wait_for itself is properly cancellable
# even when the initial task holds up the initial cancellation.
task = self.new_task(loop, wait)
await asyncio.sleep(0.2)
task.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertEqual(await inner_task, 42)
loop.run_until_complete(foo())
def test_wait(self):
def gen():
when = yield
self.assertAlmostEqual(0.1, when)
when = yield 0
self.assertAlmostEqual(0.15, when)
yield 0.15
loop = self.new_test_loop(gen)
a = self.new_task(loop, asyncio.sleep(0.1))
b = self.new_task(loop, asyncio.sleep(0.15))
async def foo():
done, pending = await asyncio.wait([b, a])
self.assertEqual(done, set([a, b]))
self.assertEqual(pending, set())
return 42
res = loop.run_until_complete(self.new_task(loop, foo()))
self.assertEqual(res, 42)
self.assertAlmostEqual(0.15, loop.time())
# Doing it again should take no time and exercise a different path.
res = loop.run_until_complete(self.new_task(loop, foo()))
self.assertAlmostEqual(0.15, loop.time())
self.assertEqual(res, 42)
def test_wait_duplicate_coroutines(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro(s):
return s
c = coro('test')
task = self.new_task(
self.loop,
asyncio.wait([c, c, coro('spam')]))
with self.assertWarns(DeprecationWarning):
done, pending = self.loop.run_until_complete(task)
self.assertFalse(pending)
self.assertEqual(set(f.result() for f in done), {'test', 'spam'})
def test_wait_errors(self):
self.assertRaises(
ValueError, self.loop.run_until_complete,
asyncio.wait(set()))
# -1 is an invalid return_when value
sleep_coro = asyncio.sleep(10.0)
wait_coro = asyncio.wait([sleep_coro], return_when=-1)
self.assertRaises(ValueError,
self.loop.run_until_complete, wait_coro)
sleep_coro.close()
def test_wait_first_completed(self):
def gen():
when = yield
self.assertAlmostEqual(10.0, when)
when = yield 0
self.assertAlmostEqual(0.1, when)
yield 0.1
loop = self.new_test_loop(gen)
a = self.new_task(loop, asyncio.sleep(10.0))
b = self.new_task(loop, asyncio.sleep(0.1))
task = self.new_task(
loop,
asyncio.wait([b, a], return_when=asyncio.FIRST_COMPLETED))
done, pending = loop.run_until_complete(task)
self.assertEqual({b}, done)
self.assertEqual({a}, pending)
self.assertFalse(a.done())
self.assertTrue(b.done())
self.assertIsNone(b.result())
self.assertAlmostEqual(0.1, loop.time())
# move forward to close generator
loop.advance_time(10)
loop.run_until_complete(asyncio.wait([a, b]))
def test_wait_really_done(self):
# there is possibility that some tasks in the pending list
# became done but their callbacks haven't all been called yet
async def coro1():
await asyncio.sleep(0)
async def coro2():
await asyncio.sleep(0)
await asyncio.sleep(0)
a = self.new_task(self.loop, coro1())
b = self.new_task(self.loop, coro2())
task = self.new_task(
self.loop,
asyncio.wait([b, a], return_when=asyncio.FIRST_COMPLETED))
done, pending = self.loop.run_until_complete(task)
self.assertEqual({a, b}, done)
self.assertTrue(a.done())
self.assertIsNone(a.result())
self.assertTrue(b.done())
self.assertIsNone(b.result())
def test_wait_first_exception(self):
def gen():
when = yield
self.assertAlmostEqual(10.0, when)
yield 0
loop = self.new_test_loop(gen)
# first_exception, task already has exception
a = self.new_task(loop, asyncio.sleep(10.0))
async def exc():
raise ZeroDivisionError('err')
b = self.new_task(loop, exc())
task = self.new_task(
loop,
asyncio.wait([b, a], return_when=asyncio.FIRST_EXCEPTION))
done, pending = loop.run_until_complete(task)
self.assertEqual({b}, done)
self.assertEqual({a}, pending)
self.assertAlmostEqual(0, loop.time())
# move forward to close generator
loop.advance_time(10)
loop.run_until_complete(asyncio.wait([a, b]))
def test_wait_first_exception_in_wait(self):
def gen():
when = yield
self.assertAlmostEqual(10.0, when)
when = yield 0
self.assertAlmostEqual(0.01, when)
yield 0.01
loop = self.new_test_loop(gen)
# first_exception, exception during waiting
a = self.new_task(loop, asyncio.sleep(10.0))
async def exc():
await asyncio.sleep(0.01)
raise ZeroDivisionError('err')
b = self.new_task(loop, exc())
task = asyncio.wait([b, a], return_when=asyncio.FIRST_EXCEPTION)
done, pending = loop.run_until_complete(task)
self.assertEqual({b}, done)
self.assertEqual({a}, pending)
self.assertAlmostEqual(0.01, loop.time())
# move forward to close generator
loop.advance_time(10)
loop.run_until_complete(asyncio.wait([a, b]))
def test_wait_with_exception(self):
def gen():
when = yield
self.assertAlmostEqual(0.1, when)
when = yield 0
self.assertAlmostEqual(0.15, when)
yield 0.15
loop = self.new_test_loop(gen)
a = self.new_task(loop, asyncio.sleep(0.1))
async def sleeper():
await asyncio.sleep(0.15)
raise ZeroDivisionError('really')
b = self.new_task(loop, sleeper())
async def foo():
done, pending = await asyncio.wait([b, a])
self.assertEqual(len(done), 2)
self.assertEqual(pending, set())
errors = set(f for f in done if f.exception() is not None)
self.assertEqual(len(errors), 1)
loop.run_until_complete(self.new_task(loop, foo()))
self.assertAlmostEqual(0.15, loop.time())
loop.run_until_complete(self.new_task(loop, foo()))
self.assertAlmostEqual(0.15, loop.time())
def test_wait_with_timeout(self):
def gen():
when = yield
self.assertAlmostEqual(0.1, when)
when = yield 0
self.assertAlmostEqual(0.15, when)
when = yield 0
self.assertAlmostEqual(0.11, when)
yield 0.11
loop = self.new_test_loop(gen)
a = self.new_task(loop, asyncio.sleep(0.1))
b = self.new_task(loop, asyncio.sleep(0.15))
async def foo():
done, pending = await asyncio.wait([b, a], timeout=0.11)
self.assertEqual(done, set([a]))
self.assertEqual(pending, set([b]))
loop.run_until_complete(self.new_task(loop, foo()))
self.assertAlmostEqual(0.11, loop.time())
# move forward to close generator
loop.advance_time(10)
loop.run_until_complete(asyncio.wait([a, b]))
def test_wait_concurrent_complete(self):
def gen():
when = yield
self.assertAlmostEqual(0.1, when)
when = yield 0
self.assertAlmostEqual(0.15, when)
when = yield 0
self.assertAlmostEqual(0.1, when)
yield 0.1
loop = self.new_test_loop(gen)
a = self.new_task(loop, asyncio.sleep(0.1))
b = self.new_task(loop, asyncio.sleep(0.15))
done, pending = loop.run_until_complete(
asyncio.wait([b, a], timeout=0.1))
self.assertEqual(done, set([a]))
self.assertEqual(pending, set([b]))
self.assertAlmostEqual(0.1, loop.time())
# move forward to close generator
loop.advance_time(10)
loop.run_until_complete(asyncio.wait([a, b]))
def test_wait_with_iterator_of_tasks(self):
def gen():
when = yield
self.assertAlmostEqual(0.1, when)
when = yield 0
self.assertAlmostEqual(0.15, when)
yield 0.15
loop = self.new_test_loop(gen)
a = self.new_task(loop, asyncio.sleep(0.1))
b = self.new_task(loop, asyncio.sleep(0.15))
async def foo():
done, pending = await asyncio.wait(iter([b, a]))
self.assertEqual(done, set([a, b]))
self.assertEqual(pending, set())
return 42
res = loop.run_until_complete(self.new_task(loop, foo()))
self.assertEqual(res, 42)
self.assertAlmostEqual(0.15, loop.time())
def test_as_completed(self):
def gen():
yield 0
yield 0
yield 0.01
yield 0
loop = self.new_test_loop(gen)
# disable "slow callback" warning
loop.slow_callback_duration = 1.0
completed = set()
time_shifted = False
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def sleeper(dt, x):
nonlocal time_shifted
yield from asyncio.sleep(dt)
completed.add(x)
if not time_shifted and 'a' in completed and 'b' in completed:
time_shifted = True
loop.advance_time(0.14)
return x
a = sleeper(0.01, 'a')
b = sleeper(0.01, 'b')
c = sleeper(0.15, 'c')
async def foo():
values = []
for f in asyncio.as_completed([b, c, a]):
values.append(await f)
return values
res = loop.run_until_complete(self.new_task(loop, foo()))
self.assertAlmostEqual(0.15, loop.time())
self.assertTrue('a' in res[:2])
self.assertTrue('b' in res[:2])
self.assertEqual(res[2], 'c')
# Doing it again should take no time and exercise a different path.
res = loop.run_until_complete(self.new_task(loop, foo()))
self.assertAlmostEqual(0.15, loop.time())
def test_as_completed_with_timeout(self):
def gen():
yield
yield 0
yield 0
yield 0.1
loop = self.new_test_loop(gen)
a = loop.create_task(asyncio.sleep(0.1, 'a'))
b = loop.create_task(asyncio.sleep(0.15, 'b'))
async def foo():
values = []
for f in asyncio.as_completed([a, b], timeout=0.12):
if values:
loop.advance_time(0.02)
try:
v = await f
values.append((1, v))
except asyncio.TimeoutError as exc:
values.append((2, exc))
return values
res = loop.run_until_complete(self.new_task(loop, foo()))
self.assertEqual(len(res), 2, res)
self.assertEqual(res[0], (1, 'a'))
self.assertEqual(res[1][0], 2)
self.assertIsInstance(res[1][1], asyncio.TimeoutError)
self.assertAlmostEqual(0.12, loop.time())
# move forward to close generator
loop.advance_time(10)
loop.run_until_complete(asyncio.wait([a, b]))
def test_as_completed_with_unused_timeout(self):
def gen():
yield
yield 0
yield 0.01
loop = self.new_test_loop(gen)
a = asyncio.sleep(0.01, 'a')
async def foo():
for f in asyncio.as_completed([a], timeout=1):
v = await f
self.assertEqual(v, 'a')
loop.run_until_complete(self.new_task(loop, foo()))
def test_as_completed_reverse_wait(self):
def gen():
yield 0
yield 0.05
yield 0
loop = self.new_test_loop(gen)
a = asyncio.sleep(0.05, 'a')
b = asyncio.sleep(0.10, 'b')
fs = {a, b}
async def test():
futs = list(asyncio.as_completed(fs))
self.assertEqual(len(futs), 2)
x = await futs[1]
self.assertEqual(x, 'a')
self.assertAlmostEqual(0.05, loop.time())
loop.advance_time(0.05)
y = await futs[0]
self.assertEqual(y, 'b')
self.assertAlmostEqual(0.10, loop.time())
loop.run_until_complete(test())
def test_as_completed_concurrent(self):
def gen():
when = yield
self.assertAlmostEqual(0.05, when)
when = yield 0
self.assertAlmostEqual(0.05, when)
yield 0.05
a = asyncio.sleep(0.05, 'a')
b = asyncio.sleep(0.05, 'b')
fs = {a, b}
async def test():
futs = list(asyncio.as_completed(fs))
self.assertEqual(len(futs), 2)
waiter = asyncio.wait(futs)
# Deprecation from passing coros in futs to asyncio.wait()
with self.assertWarns(DeprecationWarning) as cm:
done, pending = await waiter
self.assertEqual(cm.warnings[0].filename, __file__)
self.assertEqual(set(f.result() for f in done), {'a', 'b'})
loop = self.new_test_loop(gen)
loop.run_until_complete(test())
def test_as_completed_duplicate_coroutines(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro(s):
return s
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def runner():
result = []
c = coro('ham')
for f in asyncio.as_completed([c, c, coro('spam')]):
result.append((yield from f))
return result
fut = self.new_task(self.loop, runner())
self.loop.run_until_complete(fut)
result = fut.result()
self.assertEqual(set(result), {'ham', 'spam'})
self.assertEqual(len(result), 2)
def test_as_completed_coroutine_without_loop(self):
async def coro():
return 42
a = coro()
self.addCleanup(a.close)
futs = asyncio.as_completed([a])
with self.assertWarns(DeprecationWarning) as cm:
with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
list(futs)
self.assertEqual(cm.warnings[0].filename, __file__)
def test_as_completed_coroutine_use_running_loop(self):
loop = self.new_test_loop()
async def coro():
return 42
async def test():
futs = list(asyncio.as_completed([coro()]))
self.assertEqual(len(futs), 1)
self.assertEqual(await futs[0], 42)
loop.run_until_complete(test())
def test_as_completed_coroutine_use_global_loop(self):
# Deprecated in 3.10
async def coro():
return 42
loop = self.new_test_loop()
asyncio.set_event_loop(loop)
self.addCleanup(asyncio.set_event_loop, None)
futs = asyncio.as_completed([coro()])
with self.assertWarns(DeprecationWarning) as cm:
futs = list(futs)
self.assertEqual(cm.warnings[0].filename, __file__)
self.assertEqual(len(futs), 1)
self.assertEqual(loop.run_until_complete(futs[0]), 42)
def test_sleep(self):
def gen():
when = yield
self.assertAlmostEqual(0.05, when)
when = yield 0.05
self.assertAlmostEqual(0.1, when)
yield 0.05
loop = self.new_test_loop(gen)
async def sleeper(dt, arg):
await asyncio.sleep(dt/2)
res = await asyncio.sleep(dt/2, arg)
return res
t = self.new_task(loop, sleeper(0.1, 'yeah'))
loop.run_until_complete(t)
self.assertTrue(t.done())
self.assertEqual(t.result(), 'yeah')
self.assertAlmostEqual(0.1, loop.time())
def test_sleep_cancel(self):
def gen():
when = yield
self.assertAlmostEqual(10.0, when)
yield 0
loop = self.new_test_loop(gen)
t = self.new_task(loop, asyncio.sleep(10.0, 'yeah'))
handle = None
orig_call_later = loop.call_later
def call_later(delay, callback, *args):
nonlocal handle
handle = orig_call_later(delay, callback, *args)
return handle
loop.call_later = call_later
test_utils.run_briefly(loop)
self.assertFalse(handle._cancelled)
t.cancel()
test_utils.run_briefly(loop)
self.assertTrue(handle._cancelled)
def test_task_cancel_sleeping_task(self):
def gen():
when = yield
self.assertAlmostEqual(0.1, when)
when = yield 0
self.assertAlmostEqual(5000, when)
yield 0.1
loop = self.new_test_loop(gen)
async def sleep(dt):
await asyncio.sleep(dt)
async def doit():
sleeper = self.new_task(loop, sleep(5000))
loop.call_later(0.1, sleeper.cancel)
try:
await sleeper
except asyncio.CancelledError:
return 'cancelled'
else:
return 'slept in'
doer = doit()
self.assertEqual(loop.run_until_complete(doer), 'cancelled')
self.assertAlmostEqual(0.1, loop.time())
def test_task_cancel_waiter_future(self):
fut = self.new_future(self.loop)
async def coro():
await fut
task = self.new_task(self.loop, coro())
test_utils.run_briefly(self.loop)
self.assertIs(task._fut_waiter, fut)
task.cancel()
test_utils.run_briefly(self.loop)
self.assertRaises(
asyncio.CancelledError, self.loop.run_until_complete, task)
self.assertIsNone(task._fut_waiter)
self.assertTrue(fut.cancelled())
def test_task_set_methods(self):
async def notmuch():
return 'ko'
gen = notmuch()
task = self.new_task(self.loop, gen)
with self.assertRaisesRegex(RuntimeError, 'not support set_result'):
task.set_result('ok')
with self.assertRaisesRegex(RuntimeError, 'not support set_exception'):
task.set_exception(ValueError())
self.assertEqual(
self.loop.run_until_complete(task),
'ko')
def test_step_result(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def notmuch():
yield None
yield 1
return 'ko'
self.assertRaises(
RuntimeError, self.loop.run_until_complete, notmuch())
def test_step_result_future(self):
# If coroutine returns future, task waits on this future.
class Fut(asyncio.Future):
def __init__(self, *args, **kwds):
self.cb_added = False
super().__init__(*args, **kwds)
def add_done_callback(self, *args, **kwargs):
self.cb_added = True
super().add_done_callback(*args, **kwargs)
fut = Fut(loop=self.loop)
result = None
async def wait_for_future():
nonlocal result
result = await fut
t = self.new_task(self.loop, wait_for_future())
test_utils.run_briefly(self.loop)
self.assertTrue(fut.cb_added)
res = object()
fut.set_result(res)
test_utils.run_briefly(self.loop)
self.assertIs(res, result)
self.assertTrue(t.done())
self.assertIsNone(t.result())
def test_baseexception_during_cancel(self):
def gen():
when = yield
self.assertAlmostEqual(10.0, when)
yield 0
loop = self.new_test_loop(gen)
async def sleeper():
await asyncio.sleep(10)
base_exc = SystemExit()
async def notmutch():
try:
await sleeper()
except asyncio.CancelledError:
raise base_exc
task = self.new_task(loop, notmutch())
test_utils.run_briefly(loop)
task.cancel()
self.assertFalse(task.done())
self.assertRaises(SystemExit, test_utils.run_briefly, loop)
self.assertTrue(task.done())
self.assertFalse(task.cancelled())
self.assertIs(task.exception(), base_exc)
def test_iscoroutinefunction(self):
def fn():
pass
self.assertFalse(asyncio.iscoroutinefunction(fn))
def fn1():
yield
self.assertFalse(asyncio.iscoroutinefunction(fn1))
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def fn2():
yield
self.assertTrue(asyncio.iscoroutinefunction(fn2))
self.assertFalse(asyncio.iscoroutinefunction(mock.Mock()))
def test_yield_vs_yield_from(self):
fut = self.new_future(self.loop)
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def wait_for_future():
yield fut
task = wait_for_future()
with self.assertRaises(RuntimeError):
self.loop.run_until_complete(task)
self.assertFalse(fut.done())
def test_yield_vs_yield_from_generator(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro():
yield
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def wait_for_future():
gen = coro()
try:
yield gen
finally:
gen.close()
task = wait_for_future()
self.assertRaises(
RuntimeError,
self.loop.run_until_complete, task)
def test_coroutine_non_gen_function(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def func():
return 'test'
self.assertTrue(asyncio.iscoroutinefunction(func))
coro = func()
self.assertTrue(asyncio.iscoroutine(coro))
res = self.loop.run_until_complete(coro)
self.assertEqual(res, 'test')
def test_coroutine_non_gen_function_return_future(self):
fut = self.new_future(self.loop)
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def func():
return fut
async def coro():
fut.set_result('test')
t1 = self.new_task(self.loop, func())
t2 = self.new_task(self.loop, coro())
res = self.loop.run_until_complete(t1)
self.assertEqual(res, 'test')
self.assertIsNone(t2.result())
def test_current_task(self):
self.assertIsNone(asyncio.current_task(loop=self.loop))
async def coro(loop):
self.assertIs(asyncio.current_task(), task)
self.assertIs(asyncio.current_task(None), task)
self.assertIs(asyncio.current_task(), task)
task = self.new_task(self.loop, coro(self.loop))
self.loop.run_until_complete(task)
self.assertIsNone(asyncio.current_task(loop=self.loop))
def test_current_task_with_interleaving_tasks(self):
self.assertIsNone(asyncio.current_task(loop=self.loop))
fut1 = self.new_future(self.loop)
fut2 = self.new_future(self.loop)
async def coro1(loop):
self.assertTrue(asyncio.current_task() is task1)
await fut1
self.assertTrue(asyncio.current_task() is task1)
fut2.set_result(True)
async def coro2(loop):
self.assertTrue(asyncio.current_task() is task2)
fut1.set_result(True)
await fut2
self.assertTrue(asyncio.current_task() is task2)
task1 = self.new_task(self.loop, coro1(self.loop))
task2 = self.new_task(self.loop, coro2(self.loop))
self.loop.run_until_complete(asyncio.wait((task1, task2)))
self.assertIsNone(asyncio.current_task(loop=self.loop))
# Some thorough tests for cancellation propagation through
# coroutines, tasks and wait().
def test_yield_future_passes_cancel(self):
# Cancelling outer() cancels inner() cancels waiter.
proof = 0
waiter = self.new_future(self.loop)
async def inner():
nonlocal proof
try:
await waiter
except asyncio.CancelledError:
proof += 1
raise
else:
self.fail('got past sleep() in inner()')
async def outer():
nonlocal proof
try:
await inner()
except asyncio.CancelledError:
proof += 100 # Expect this path.
else:
proof += 10
f = asyncio.ensure_future(outer(), loop=self.loop)
test_utils.run_briefly(self.loop)
f.cancel()
self.loop.run_until_complete(f)
self.assertEqual(proof, 101)
self.assertTrue(waiter.cancelled())
def test_yield_wait_does_not_shield_cancel(self):
# Cancelling outer() makes wait() return early, leaves inner()
# running.
proof = 0
waiter = self.new_future(self.loop)
async def inner():
nonlocal proof
await waiter
proof += 1
async def outer():
nonlocal proof
with self.assertWarns(DeprecationWarning):
d, p = await asyncio.wait([inner()])
proof += 100
f = asyncio.ensure_future(outer(), loop=self.loop)
test_utils.run_briefly(self.loop)
f.cancel()
self.assertRaises(
asyncio.CancelledError, self.loop.run_until_complete, f)
waiter.set_result(None)
test_utils.run_briefly(self.loop)
self.assertEqual(proof, 1)
def test_shield_result(self):
inner = self.new_future(self.loop)
outer = asyncio.shield(inner)
inner.set_result(42)
res = self.loop.run_until_complete(outer)
self.assertEqual(res, 42)
def test_shield_exception(self):
inner = self.new_future(self.loop)
outer = asyncio.shield(inner)
test_utils.run_briefly(self.loop)
exc = RuntimeError('expected')
inner.set_exception(exc)
test_utils.run_briefly(self.loop)
self.assertIs(outer.exception(), exc)
def test_shield_cancel_inner(self):
inner = self.new_future(self.loop)
outer = asyncio.shield(inner)
test_utils.run_briefly(self.loop)
inner.cancel()
test_utils.run_briefly(self.loop)
self.assertTrue(outer.cancelled())
def test_shield_cancel_outer(self):
inner = self.new_future(self.loop)
outer = asyncio.shield(inner)
test_utils.run_briefly(self.loop)
outer.cancel()
test_utils.run_briefly(self.loop)
self.assertTrue(outer.cancelled())
self.assertEqual(0, 0 if outer._callbacks is None else len(outer._callbacks))
def test_shield_shortcut(self):
fut = self.new_future(self.loop)
fut.set_result(42)
res = self.loop.run_until_complete(asyncio.shield(fut))
self.assertEqual(res, 42)
def test_shield_effect(self):
# Cancelling outer() does not affect inner().
proof = 0
waiter = self.new_future(self.loop)
async def inner():
nonlocal proof
await waiter
proof += 1
async def outer():
nonlocal proof
await asyncio.shield(inner())
proof += 100
f = asyncio.ensure_future(outer(), loop=self.loop)
test_utils.run_briefly(self.loop)
f.cancel()
with self.assertRaises(asyncio.CancelledError):
self.loop.run_until_complete(f)
waiter.set_result(None)
test_utils.run_briefly(self.loop)
self.assertEqual(proof, 1)
def test_shield_gather(self):
child1 = self.new_future(self.loop)
child2 = self.new_future(self.loop)
parent = asyncio.gather(child1, child2)
outer = asyncio.shield(parent)
test_utils.run_briefly(self.loop)
outer.cancel()
test_utils.run_briefly(self.loop)
self.assertTrue(outer.cancelled())
child1.set_result(1)
child2.set_result(2)
test_utils.run_briefly(self.loop)
self.assertEqual(parent.result(), [1, 2])
def test_gather_shield(self):
child1 = self.new_future(self.loop)
child2 = self.new_future(self.loop)
inner1 = asyncio.shield(child1)
inner2 = asyncio.shield(child2)
parent = asyncio.gather(inner1, inner2)
test_utils.run_briefly(self.loop)
parent.cancel()
# This should cancel inner1 and inner2 but bot child1 and child2.
test_utils.run_briefly(self.loop)
self.assertIsInstance(parent.exception(), asyncio.CancelledError)
self.assertTrue(inner1.cancelled())
self.assertTrue(inner2.cancelled())
child1.set_result(1)
child2.set_result(2)
test_utils.run_briefly(self.loop)
def test_shield_coroutine_without_loop(self):
async def coro():
return 42
inner = coro()
self.addCleanup(inner.close)
with self.assertWarns(DeprecationWarning) as cm:
with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
asyncio.shield(inner)
self.assertEqual(cm.warnings[0].filename, __file__)
def test_shield_coroutine_use_running_loop(self):
async def coro():
return 42
async def test():
return asyncio.shield(coro())
outer = self.loop.run_until_complete(test())
self.assertEqual(outer._loop, self.loop)
res = self.loop.run_until_complete(outer)
self.assertEqual(res, 42)
def test_shield_coroutine_use_global_loop(self):
# Deprecated in 3.10
async def coro():
return 42
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
with self.assertWarns(DeprecationWarning) as cm:
outer = asyncio.shield(coro())
self.assertEqual(cm.warnings[0].filename, __file__)
self.assertEqual(outer._loop, self.loop)
res = self.loop.run_until_complete(outer)
self.assertEqual(res, 42)
def test_as_completed_invalid_args(self):
fut = self.new_future(self.loop)
# as_completed() expects a list of futures, not a future instance
self.assertRaises(TypeError, self.loop.run_until_complete,
asyncio.as_completed(fut))
coro = coroutine_function()
self.assertRaises(TypeError, self.loop.run_until_complete,
asyncio.as_completed(coro))
coro.close()
def test_wait_invalid_args(self):
fut = self.new_future(self.loop)
# wait() expects a list of futures, not a future instance
self.assertRaises(TypeError, self.loop.run_until_complete,
asyncio.wait(fut))
coro = coroutine_function()
self.assertRaises(TypeError, self.loop.run_until_complete,
asyncio.wait(coro))
coro.close()
# wait() expects at least a future
self.assertRaises(ValueError, self.loop.run_until_complete,
asyncio.wait([]))
def test_corowrapper_mocks_generator(self):
def check():
# A function that asserts various things.
# Called twice, with different debug flag values.
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro():
# The actual coroutine.
self.assertTrue(gen.gi_running)
yield from fut
# A completed Future used to run the coroutine.
fut = self.new_future(self.loop)
fut.set_result(None)
# Call the coroutine.
gen = coro()
# Check some properties.
self.assertTrue(asyncio.iscoroutine(gen))
self.assertIsInstance(gen.gi_frame, types.FrameType)
self.assertFalse(gen.gi_running)
self.assertIsInstance(gen.gi_code, types.CodeType)
# Run it.
self.loop.run_until_complete(gen)
# The frame should have changed.
self.assertIsNone(gen.gi_frame)
# Test with debug flag cleared.
with set_coroutine_debug(False):
check()
# Test with debug flag set.
with set_coroutine_debug(True):
check()
def test_yield_from_corowrapper(self):
with set_coroutine_debug(True):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def t1():
return (yield from t2())
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def t2():
f = self.new_future(self.loop)
self.new_task(self.loop, t3(f))
return (yield from f)
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def t3(f):
f.set_result((1, 2, 3))
task = self.new_task(self.loop, t1())
val = self.loop.run_until_complete(task)
self.assertEqual(val, (1, 2, 3))
def test_yield_from_corowrapper_send(self):
def foo():
a = yield
return a
def call(arg):
cw = asyncio.coroutines.CoroWrapper(foo())
cw.send(None)
try:
cw.send(arg)
except StopIteration as ex:
return ex.args[0]
else:
raise AssertionError('StopIteration was expected')
self.assertEqual(call((1, 2)), (1, 2))
self.assertEqual(call('spam'), 'spam')
def test_corowrapper_weakref(self):
wd = weakref.WeakValueDictionary()
def foo(): yield from []
cw = asyncio.coroutines.CoroWrapper(foo())
wd['cw'] = cw # Would fail without __weakref__ slot.
cw.gen = None # Suppress warning from __del__.
def test_corowrapper_throw(self):
# Issue 429: CoroWrapper.throw must be compatible with gen.throw
def foo():
value = None
while True:
try:
value = yield value
except Exception as e:
value = e
exception = Exception("foo")
cw = asyncio.coroutines.CoroWrapper(foo())
cw.send(None)
self.assertIs(exception, cw.throw(exception))
cw = asyncio.coroutines.CoroWrapper(foo())
cw.send(None)
self.assertIs(exception, cw.throw(Exception, exception))
cw = asyncio.coroutines.CoroWrapper(foo())
cw.send(None)
exception = cw.throw(Exception, "foo")
self.assertIsInstance(exception, Exception)
self.assertEqual(exception.args, ("foo", ))
cw = asyncio.coroutines.CoroWrapper(foo())
cw.send(None)
exception = cw.throw(Exception, "foo", None)
self.assertIsInstance(exception, Exception)
self.assertEqual(exception.args, ("foo", ))
def test_log_destroyed_pending_task(self):
Task = self.__class__.Task
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def kill_me(loop):
future = self.new_future(loop)
yield from future
# at this point, the only reference to kill_me() task is
# the Task._wakeup() method in future._callbacks
raise Exception("code never reached")
mock_handler = mock.Mock()
self.loop.set_debug(True)
self.loop.set_exception_handler(mock_handler)
# schedule the task
coro = kill_me(self.loop)
task = asyncio.ensure_future(coro, loop=self.loop)
self.assertEqual(asyncio.all_tasks(loop=self.loop), {task})
asyncio.set_event_loop(None)
# execute the task so it waits for future
self.loop._run_once()
self.assertEqual(len(self.loop._ready), 0)
# remove the future used in kill_me(), and references to the task
del coro.gi_frame.f_locals['future']
coro = None
source_traceback = task._source_traceback
task = None
# no more reference to kill_me() task: the task is destroyed by the GC
support.gc_collect()
self.assertEqual(asyncio.all_tasks(loop=self.loop), set())
mock_handler.assert_called_with(self.loop, {
'message': 'Task was destroyed but it is pending!',
'task': mock.ANY,
'source_traceback': source_traceback,
})
mock_handler.reset_mock()
@mock.patch('asyncio.base_events.logger')
def test_tb_logger_not_called_after_cancel(self, m_log):
loop = asyncio.new_event_loop()
self.set_event_loop(loop)
async def coro():
raise TypeError
async def runner():
task = self.new_task(loop, coro())
await asyncio.sleep(0.05)
task.cancel()
task = None
loop.run_until_complete(runner())
self.assertFalse(m_log.error.called)
@mock.patch('asyncio.coroutines.logger')
def test_coroutine_never_yielded(self, m_log):
with set_coroutine_debug(True):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro_noop():
pass
tb_filename = __file__
tb_lineno = sys._getframe().f_lineno + 2
# create a coroutine object but don't use it
coro_noop()
support.gc_collect()
self.assertTrue(m_log.error.called)
message = m_log.error.call_args[0][0]
func_filename, func_lineno = test_utils.get_function_source(coro_noop)
regex = (r'^<CoroWrapper %s\(?\)? .* at %s:%s, .*> '
r'was never yielded from\n'
r'Coroutine object created at \(most recent call last, truncated to \d+ last lines\):\n'
r'.*\n'
r' File "%s", line %s, in test_coroutine_never_yielded\n'
r' coro_noop\(\)$'
% (re.escape(coro_noop.__qualname__),
re.escape(func_filename), func_lineno,
re.escape(tb_filename), tb_lineno))
self.assertRegex(message, re.compile(regex, re.DOTALL))
def test_return_coroutine_from_coroutine(self):
"""Return of @asyncio.coroutine()-wrapped function generator object
from @asyncio.coroutine()-wrapped function should have same effect as
returning generator object or Future."""
def check():
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def outer_coro():
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def inner_coro():
return 1
return inner_coro()
result = self.loop.run_until_complete(outer_coro())
self.assertEqual(result, 1)
# Test with debug flag cleared.
with set_coroutine_debug(False):
check()
# Test with debug flag set.
with set_coroutine_debug(True):
check()
def test_task_source_traceback(self):
self.loop.set_debug(True)
task = self.new_task(self.loop, coroutine_function())
lineno = sys._getframe().f_lineno - 1
self.assertIsInstance(task._source_traceback, list)
self.assertEqual(task._source_traceback[-2][:3],
(__file__,
lineno,
'test_task_source_traceback'))
self.loop.run_until_complete(task)
def _test_cancel_wait_for(self, timeout):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
async def blocking_coroutine():
fut = self.new_future(loop)
# Block: fut result is never set
await fut
task = loop.create_task(blocking_coroutine())
wait = loop.create_task(asyncio.wait_for(task, timeout))
loop.call_soon(wait.cancel)
self.assertRaises(asyncio.CancelledError,
loop.run_until_complete, wait)
# Python issue #23219: cancelling the wait must also cancel the task
self.assertTrue(task.cancelled())
def test_cancel_blocking_wait_for(self):
self._test_cancel_wait_for(None)
def test_cancel_wait_for(self):
self._test_cancel_wait_for(60.0)
def test_cancel_gather_1(self):
"""Ensure that a gathering future refuses to be cancelled once all
children are done"""
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
fut = self.new_future(loop)
async def create():
# The indirection fut->child_coro is needed since otherwise the
# gathering task is done at the same time as the child future
def child_coro():
return (yield from fut)
gather_future = asyncio.gather(child_coro())
return asyncio.ensure_future(gather_future)
gather_task = loop.run_until_complete(create())
cancel_result = None
def cancelling_callback(_):
nonlocal cancel_result
cancel_result = gather_task.cancel()
fut.add_done_callback(cancelling_callback)
fut.set_result(42) # calls the cancelling_callback after fut is done()
# At this point the task should complete.
loop.run_until_complete(gather_task)
# Python issue #26923: asyncio.gather drops cancellation
self.assertEqual(cancel_result, False)
self.assertFalse(gather_task.cancelled())
self.assertEqual(gather_task.result(), [42])
def test_cancel_gather_2(self):
cases = [
((), ()),
((None,), ()),
(('my message',), ('my message',)),
# Non-string values should roundtrip.
((5,), (5,)),
]
for cancel_args, expected_args in cases:
with self.subTest(cancel_args=cancel_args):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
async def test():
time = 0
while True:
time += 0.05
await asyncio.gather(asyncio.sleep(0.05),
return_exceptions=True)
if time > 1:
return
async def main():
qwe = self.new_task(loop, test())
await asyncio.sleep(0.2)
qwe.cancel(*cancel_args)
await qwe
try:
loop.run_until_complete(main())
except asyncio.CancelledError as exc:
self.assertEqual(exc.args, ())
exc_type, exc_args, depth = get_innermost_context(exc)
self.assertEqual((exc_type, exc_args),
(asyncio.CancelledError, expected_args))
# The exact traceback seems to vary in CI.
self.assertIn(depth, (2, 3))
else:
self.fail('gather did not propagate the cancellation '
'request')
def test_exception_traceback(self):
# See http://bugs.python.org/issue28843
async def foo():
1 / 0
async def main():
task = self.new_task(self.loop, foo())
await asyncio.sleep(0) # skip one loop iteration
self.assertIsNotNone(task.exception().__traceback__)
self.loop.run_until_complete(main())
@mock.patch('asyncio.base_events.logger')
def test_error_in_call_soon(self, m_log):
def call_soon(callback, *args, **kwargs):
raise ValueError
self.loop.call_soon = call_soon
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro():
pass
self.assertFalse(m_log.error.called)
with self.assertRaises(ValueError):
gen = coro()
try:
self.new_task(self.loop, gen)
finally:
gen.close()
self.assertTrue(m_log.error.called)
message = m_log.error.call_args[0][0]
self.assertIn('Task was destroyed but it is pending', message)
self.assertEqual(asyncio.all_tasks(self.loop), set())
def test_create_task_with_noncoroutine(self):
with self.assertRaisesRegex(TypeError,
"a coroutine was expected, got 123"):
self.new_task(self.loop, 123)
# test it for the second time to ensure that caching
# in asyncio.iscoroutine() doesn't break things.
with self.assertRaisesRegex(TypeError,
"a coroutine was expected, got 123"):
self.new_task(self.loop, 123)
def test_create_task_with_oldstyle_coroutine(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro():
pass
task = self.new_task(self.loop, coro())
self.assertIsInstance(task, self.Task)
self.loop.run_until_complete(task)
# test it for the second time to ensure that caching
# in asyncio.iscoroutine() doesn't break things.
task = self.new_task(self.loop, coro())
self.assertIsInstance(task, self.Task)
self.loop.run_until_complete(task)
def test_create_task_with_async_function(self):
async def coro():
pass
task = self.new_task(self.loop, coro())
self.assertIsInstance(task, self.Task)
self.loop.run_until_complete(task)
# test it for the second time to ensure that caching
# in asyncio.iscoroutine() doesn't break things.
task = self.new_task(self.loop, coro())
self.assertIsInstance(task, self.Task)
self.loop.run_until_complete(task)
def test_create_task_with_asynclike_function(self):
task = self.new_task(self.loop, CoroLikeObject())
self.assertIsInstance(task, self.Task)
self.assertEqual(self.loop.run_until_complete(task), 42)
# test it for the second time to ensure that caching
# in asyncio.iscoroutine() doesn't break things.
task = self.new_task(self.loop, CoroLikeObject())
self.assertIsInstance(task, self.Task)
self.assertEqual(self.loop.run_until_complete(task), 42)
def test_bare_create_task(self):
async def inner():
return 1
async def coro():
task = asyncio.create_task(inner())
self.assertIsInstance(task, self.Task)
ret = await task
self.assertEqual(1, ret)
self.loop.run_until_complete(coro())
def test_bare_create_named_task(self):
async def coro_noop():
pass
async def coro():
task = asyncio.create_task(coro_noop(), name='No-op')
self.assertEqual(task.get_name(), 'No-op')
await task
self.loop.run_until_complete(coro())
def test_context_1(self):
cvar = contextvars.ContextVar('cvar', default='nope')
async def | ():
await asyncio.sleep(0.01)
self.assertEqual(cvar.get(), 'nope')
cvar.set('something else')
async def main():
self.assertEqual(cvar.get(), 'nope')
subtask = self.new_task(loop, sub())
cvar.set('yes')
self.assertEqual(cvar.get(), 'yes')
await subtask
self.assertEqual(cvar.get(), 'yes')
loop = asyncio.new_event_loop()
try:
task = self.new_task(loop, main())
loop.run_until_complete(task)
finally:
loop.close()
def test_context_2(self):
cvar = contextvars.ContextVar('cvar', default='nope')
async def main():
def fut_on_done(fut):
# This change must not pollute the context
# of the "main()" task.
cvar.set('something else')
self.assertEqual(cvar.get(), 'nope')
for j in range(2):
fut = self.new_future(loop)
fut.add_done_callback(fut_on_done)
cvar.set(f'yes{j}')
loop.call_soon(fut.set_result, None)
await fut
self.assertEqual(cvar.get(), f'yes{j}')
for i in range(3):
# Test that task passed its context to add_done_callback:
cvar.set(f'yes{i}-{j}')
await asyncio.sleep(0.001)
self.assertEqual(cvar.get(), f'yes{i}-{j}')
loop = asyncio.new_event_loop()
try:
task = self.new_task(loop, main())
loop.run_until_complete(task)
finally:
loop.close()
self.assertEqual(cvar.get(), 'nope')
def test_context_3(self):
# Run 100 Tasks in parallel, each modifying cvar.
cvar = contextvars.ContextVar('cvar', default=-1)
async def sub(num):
for i in range(10):
cvar.set(num + i)
await asyncio.sleep(random.uniform(0.001, 0.05))
self.assertEqual(cvar.get(), num + i)
async def main():
tasks = []
for i in range(100):
task = loop.create_task(sub(random.randint(0, 10)))
tasks.append(task)
await asyncio.gather(*tasks)
loop = asyncio.new_event_loop()
try:
loop.run_until_complete(main())
finally:
loop.close()
self.assertEqual(cvar.get(), -1)
def test_get_coro(self):
loop = asyncio.new_event_loop()
coro = coroutine_function()
try:
task = self.new_task(loop, coro)
loop.run_until_complete(task)
self.assertIs(task.get_coro(), coro)
finally:
loop.close()
def add_subclass_tests(cls):
BaseTask = cls.Task
BaseFuture = cls.Future
if BaseTask is None or BaseFuture is None:
return cls
class CommonFuture:
def __init__(self, *args, **kwargs):
self.calls = collections.defaultdict(lambda: 0)
super().__init__(*args, **kwargs)
def add_done_callback(self, *args, **kwargs):
self.calls['add_done_callback'] += 1
return super().add_done_callback(*args, **kwargs)
class Task(CommonFuture, BaseTask):
pass
class Future(CommonFuture, BaseFuture):
pass
def test_subclasses_ctask_cfuture(self):
fut = self.Future(loop=self.loop)
async def func():
self.loop.call_soon(lambda: fut.set_result('spam'))
return await fut
task = self.Task(func(), loop=self.loop)
result = self.loop.run_until_complete(task)
self.assertEqual(result, 'spam')
self.assertEqual(
dict(task.calls),
{'add_done_callback': 1})
self.assertEqual(
dict(fut.calls),
{'add_done_callback': 1})
# Add patched Task & Future back to the test case
cls.Task = Task
cls.Future = Future
# Add an extra unit-test
cls.test_subclasses_ctask_cfuture = test_subclasses_ctask_cfuture
# Disable the "test_task_source_traceback" test
# (the test is hardcoded for a particular call stack, which
# is slightly different for Task subclasses)
cls.test_task_source_traceback = None
return cls
class SetMethodsTest:
def test_set_result_causes_invalid_state(self):
Future = type(self).Future
self.loop.call_exception_handler = exc_handler = mock.Mock()
async def foo():
await asyncio.sleep(0.1)
return 10
coro = foo()
task = self.new_task(self.loop, coro)
Future.set_result(task, 'spam')
self.assertEqual(
self.loop.run_until_complete(task),
'spam')
exc_handler.assert_called_once()
exc = exc_handler.call_args[0][0]['exception']
with self.assertRaisesRegex(asyncio.InvalidStateError,
r'step\(\): already done'):
raise exc
coro.close()
def test_set_exception_causes_invalid_state(self):
class MyExc(Exception):
pass
Future = type(self).Future
self.loop.call_exception_handler = exc_handler = mock.Mock()
async def foo():
await asyncio.sleep(0.1)
return 10
coro = foo()
task = self.new_task(self.loop, coro)
Future.set_exception(task, MyExc())
with self.assertRaises(MyExc):
self.loop.run_until_complete(task)
exc_handler.assert_called_once()
exc = exc_handler.call_args[0][0]['exception']
with self.assertRaisesRegex(asyncio.InvalidStateError,
r'step\(\): already done'):
raise exc
coro.close()
@unittest.skipUnless(hasattr(futures, '_CFuture') and
hasattr(tasks, '_CTask'),
'requires the C _asyncio module')
class CTask_CFuture_Tests(BaseTaskTests, SetMethodsTest,
test_utils.TestCase):
Task = getattr(tasks, '_CTask', None)
Future = getattr(futures, '_CFuture', None)
@support.refcount_test
def test_refleaks_in_task___init__(self):
gettotalrefcount = support.get_attribute(sys, 'gettotalrefcount')
async def coro():
pass
task = self.new_task(self.loop, coro())
self.loop.run_until_complete(task)
refs_before = gettotalrefcount()
for i in range(100):
task.__init__(coro(), loop=self.loop)
self.loop.run_until_complete(task)
self.assertAlmostEqual(gettotalrefcount() - refs_before, 0, delta=10)
def test_del__log_destroy_pending_segfault(self):
async def coro():
pass
task = self.new_task(self.loop, coro())
self.loop.run_until_complete(task)
with self.assertRaises(AttributeError):
del task._log_destroy_pending
@unittest.skipUnless(hasattr(futures, '_CFuture') and
hasattr(tasks, '_CTask'),
'requires the C _asyncio module')
@add_subclass_tests
class CTask_CFuture_SubclassTests(BaseTaskTests, test_utils.TestCase):
Task = getattr(tasks, '_CTask', None)
Future = getattr(futures, '_CFuture', None)
@unittest.skipUnless(hasattr(tasks, '_CTask'),
'requires the C _asyncio module')
@add_subclass_tests
class CTaskSubclass_PyFuture_Tests(BaseTaskTests, test_utils.TestCase):
Task = getattr(tasks, '_CTask', None)
Future = futures._PyFuture
@unittest.skipUnless(hasattr(futures, '_CFuture'),
'requires the C _asyncio module')
@add_subclass_tests
class PyTask_CFutureSubclass_Tests(BaseTaskTests, test_utils.TestCase):
Future = getattr(futures, '_CFuture', None)
Task = tasks._PyTask
@unittest.skipUnless(hasattr(tasks, '_CTask'),
'requires the C _asyncio module')
class CTask_PyFuture_Tests(BaseTaskTests, test_utils.TestCase):
Task = getattr(tasks, '_CTask', None)
Future = futures._PyFuture
@unittest.skipUnless(hasattr(futures, '_CFuture'),
'requires the C _asyncio module')
class PyTask_CFuture_Tests(BaseTaskTests, test_utils.TestCase):
Task = tasks._PyTask
Future = getattr(futures, '_CFuture', None)
class PyTask_PyFuture_Tests(BaseTaskTests, SetMethodsTest,
test_utils.TestCase):
Task = tasks._PyTask
Future = futures._PyFuture
@add_subclass_tests
class PyTask_PyFuture_SubclassTests(BaseTaskTests, test_utils.TestCase):
Task = tasks._PyTask
Future = futures._PyFuture
@unittest.skipUnless(hasattr(tasks, '_CTask'),
'requires the C _asyncio module')
class CTask_Future_Tests(test_utils.TestCase):
def test_foobar(self):
class Fut(asyncio.Future):
@property
def get_loop(self):
raise AttributeError
async def coro():
await fut
return 'spam'
self.loop = asyncio.new_event_loop()
try:
fut = Fut(loop=self.loop)
self.loop.call_later(0.1, fut.set_result, 1)
task = self.loop.create_task(coro())
res = self.loop.run_until_complete(task)
finally:
self.loop.close()
self.assertEqual(res, 'spam')
class BaseTaskIntrospectionTests:
_register_task = None
_unregister_task = None
_enter_task = None
_leave_task = None
def test__register_task_1(self):
class TaskLike:
@property
def _loop(self):
return loop
def done(self):
return False
task = TaskLike()
loop = mock.Mock()
self.assertEqual(asyncio.all_tasks(loop), set())
self._register_task(task)
self.assertEqual(asyncio.all_tasks(loop), {task})
self._unregister_task(task)
def test__register_task_2(self):
class TaskLike:
def get_loop(self):
return loop
def done(self):
return False
task = TaskLike()
loop = mock.Mock()
self.assertEqual(asyncio.all_tasks(loop), set())
self._register_task(task)
self.assertEqual(asyncio.all_tasks(loop), {task})
self._unregister_task(task)
def test__register_task_3(self):
class TaskLike:
def get_loop(self):
return loop
def done(self):
return True
task = TaskLike()
loop = mock.Mock()
self.assertEqual(asyncio.all_tasks(loop), set())
self._register_task(task)
self.assertEqual(asyncio.all_tasks(loop), set())
self._unregister_task(task)
def test__enter_task(self):
task = mock.Mock()
loop = mock.Mock()
self.assertIsNone(asyncio.current_task(loop))
self._enter_task(loop, task)
self.assertIs(asyncio.current_task(loop), task)
self._leave_task(loop, task)
def test__enter_task_failure(self):
task1 = mock.Mock()
task2 = mock.Mock()
loop = mock.Mock()
self._enter_task(loop, task1)
with self.assertRaises(RuntimeError):
self._enter_task(loop, task2)
self.assertIs(asyncio.current_task(loop), task1)
self._leave_task(loop, task1)
def test__leave_task(self):
task = mock.Mock()
loop = mock.Mock()
self._enter_task(loop, task)
self._leave_task(loop, task)
self.assertIsNone(asyncio.current_task(loop))
def test__leave_task_failure1(self):
task1 = mock.Mock()
task2 = mock.Mock()
loop = mock.Mock()
self._enter_task(loop, task1)
with self.assertRaises(RuntimeError):
self._leave_task(loop, task2)
self.assertIs(asyncio.current_task(loop), task1)
self._leave_task(loop, task1)
def test__leave_task_failure2(self):
task = mock.Mock()
loop = mock.Mock()
with self.assertRaises(RuntimeError):
self._leave_task(loop, task)
self.assertIsNone(asyncio.current_task(loop))
def test__unregister_task(self):
task = mock.Mock()
loop = mock.Mock()
task.get_loop = lambda: loop
self._register_task(task)
self._unregister_task(task)
self.assertEqual(asyncio.all_tasks(loop), set())
def test__unregister_task_not_registered(self):
task = mock.Mock()
loop = mock.Mock()
self._unregister_task(task)
self.assertEqual(asyncio.all_tasks(loop), set())
class PyIntrospectionTests(test_utils.TestCase, BaseTaskIntrospectionTests):
_register_task = staticmethod(tasks._py_register_task)
_unregister_task = staticmethod(tasks._py_unregister_task)
_enter_task = staticmethod(tasks._py_enter_task)
_leave_task = staticmethod(tasks._py_leave_task)
@unittest.skipUnless(hasattr(tasks, '_c_register_task'),
'requires the C _asyncio module')
class CIntrospectionTests(test_utils.TestCase, BaseTaskIntrospectionTests):
if hasattr(tasks, '_c_register_task'):
_register_task = staticmethod(tasks._c_register_task)
_unregister_task = staticmethod(tasks._c_unregister_task)
_enter_task = staticmethod(tasks._c_enter_task)
_leave_task = staticmethod(tasks._c_leave_task)
else:
_register_task = _unregister_task = _enter_task = _leave_task = None
class BaseCurrentLoopTests:
def setUp(self):
super().setUp()
self.loop = asyncio.new_event_loop()
self.set_event_loop(self.loop)
def new_task(self, coro):
raise NotImplementedError
def test_current_task_no_running_loop(self):
self.assertIsNone(asyncio.current_task(loop=self.loop))
def test_current_task_no_running_loop_implicit(self):
with self.assertRaises(RuntimeError):
asyncio.current_task()
def test_current_task_with_implicit_loop(self):
async def coro():
self.assertIs(asyncio.current_task(loop=self.loop), task)
self.assertIs(asyncio.current_task(None), task)
self.assertIs(asyncio.current_task(), task)
task = self.new_task(coro())
self.loop.run_until_complete(task)
self.assertIsNone(asyncio.current_task(loop=self.loop))
class PyCurrentLoopTests(BaseCurrentLoopTests, test_utils.TestCase):
def new_task(self, coro):
return tasks._PyTask(coro, loop=self.loop)
@unittest.skipUnless(hasattr(tasks, '_CTask'),
'requires the C _asyncio module')
class CCurrentLoopTests(BaseCurrentLoopTests, test_utils.TestCase):
def new_task(self, coro):
return getattr(tasks, '_CTask')(coro, loop=self.loop)
class GenericTaskTests(test_utils.TestCase):
def test_future_subclass(self):
self.assertTrue(issubclass(asyncio.Task, asyncio.Future))
@support.cpython_only
def test_asyncio_module_compiled(self):
# Because of circular imports it's easy to make _asyncio
# module non-importable. This is a simple test that will
# fail on systems where C modules were successfully compiled
# (hence the test for _functools etc), but _asyncio somehow didn't.
try:
import _functools
import _json
import _pickle
except ImportError:
self.skipTest('C modules are not available')
else:
try:
import _asyncio
except ImportError:
self.fail('_asyncio module is missing')
class GatherTestsBase:
def setUp(self):
super().setUp()
self.one_loop = self.new_test_loop()
self.other_loop = self.new_test_loop()
self.set_event_loop(self.one_loop, cleanup=False)
def _run_loop(self, loop):
while loop._ready:
test_utils.run_briefly(loop)
def _check_success(self, **kwargs):
a, b, c = [self.one_loop.create_future() for i in range(3)]
fut = self._gather(*self.wrap_futures(a, b, c), **kwargs)
cb = test_utils.MockCallback()
fut.add_done_callback(cb)
b.set_result(1)
a.set_result(2)
self._run_loop(self.one_loop)
self.assertEqual(cb.called, False)
self.assertFalse(fut.done())
c.set_result(3)
self._run_loop(self.one_loop)
cb.assert_called_once_with(fut)
self.assertEqual(fut.result(), [2, 1, 3])
def test_success(self):
self._check_success()
self._check_success(return_exceptions=False)
def test_result_exception_success(self):
self._check_success(return_exceptions=True)
def test_one_exception(self):
a, b, c, d, e = [self.one_loop.create_future() for i in range(5)]
fut = self._gather(*self.wrap_futures(a, b, c, d, e))
cb = test_utils.MockCallback()
fut.add_done_callback(cb)
exc = ZeroDivisionError()
a.set_result(1)
b.set_exception(exc)
self._run_loop(self.one_loop)
self.assertTrue(fut.done())
cb.assert_called_once_with(fut)
self.assertIs(fut.exception(), exc)
# Does nothing
c.set_result(3)
d.cancel()
e.set_exception(RuntimeError())
e.exception()
def test_return_exceptions(self):
a, b, c, d = [self.one_loop.create_future() for i in range(4)]
fut = self._gather(*self.wrap_futures(a, b, c, d),
return_exceptions=True)
cb = test_utils.MockCallback()
fut.add_done_callback(cb)
exc = ZeroDivisionError()
exc2 = RuntimeError()
b.set_result(1)
c.set_exception(exc)
a.set_result(3)
self._run_loop(self.one_loop)
self.assertFalse(fut.done())
d.set_exception(exc2)
self._run_loop(self.one_loop)
self.assertTrue(fut.done())
cb.assert_called_once_with(fut)
self.assertEqual(fut.result(), [3, 1, exc, exc2])
def test_env_var_debug(self):
code = '\n'.join((
'import asyncio.coroutines',
'print(asyncio.coroutines._DEBUG)'))
# Test with -E to not fail if the unit test was run with
# PYTHONASYNCIODEBUG set to a non-empty string
sts, stdout, stderr = assert_python_ok('-E', '-c', code)
self.assertEqual(stdout.rstrip(), b'False')
sts, stdout, stderr = assert_python_ok('-c', code,
PYTHONASYNCIODEBUG='',
PYTHONDEVMODE='')
self.assertEqual(stdout.rstrip(), b'False')
sts, stdout, stderr = assert_python_ok('-c', code,
PYTHONASYNCIODEBUG='1',
PYTHONDEVMODE='')
self.assertEqual(stdout.rstrip(), b'True')
sts, stdout, stderr = assert_python_ok('-E', '-c', code,
PYTHONASYNCIODEBUG='1',
PYTHONDEVMODE='')
self.assertEqual(stdout.rstrip(), b'False')
# -X dev
sts, stdout, stderr = assert_python_ok('-E', '-X', 'dev',
'-c', code)
self.assertEqual(stdout.rstrip(), b'True')
class FutureGatherTests(GatherTestsBase, test_utils.TestCase):
def wrap_futures(self, *futures):
return futures
def _gather(self, *args, **kwargs):
return asyncio.gather(*args, **kwargs)
def test_constructor_empty_sequence_without_loop(self):
with self.assertWarns(DeprecationWarning) as cm:
with self.assertRaises(RuntimeError):
asyncio.gather()
self.assertEqual(cm.warnings[0].filename, __file__)
def test_constructor_empty_sequence_use_running_loop(self):
async def gather():
return asyncio.gather()
fut = self.one_loop.run_until_complete(gather())
self.assertIsInstance(fut, asyncio.Future)
self.assertIs(fut._loop, self.one_loop)
self._run_loop(self.one_loop)
self.assertTrue(fut.done())
self.assertEqual(fut.result(), [])
def test_constructor_empty_sequence_use_global_loop(self):
# Deprecated in 3.10
asyncio.set_event_loop(self.one_loop)
self.addCleanup(asyncio.set_event_loop, None)
with self.assertWarns(DeprecationWarning) as cm:
fut = asyncio.gather()
self.assertEqual(cm.warnings[0].filename, __file__)
self.assertIsInstance(fut, asyncio.Future)
self.assertIs(fut._loop, self.one_loop)
self._run_loop(self.one_loop)
self.assertTrue(fut.done())
self.assertEqual(fut.result(), [])
def test_constructor_heterogenous_futures(self):
fut1 = self.one_loop.create_future()
fut2 = self.other_loop.create_future()
with self.assertRaises(ValueError):
asyncio.gather(fut1, fut2)
def test_constructor_homogenous_futures(self):
children = [self.other_loop.create_future() for i in range(3)]
fut = asyncio.gather(*children)
self.assertIs(fut._loop, self.other_loop)
self._run_loop(self.other_loop)
self.assertFalse(fut.done())
fut = asyncio.gather(*children)
self.assertIs(fut._loop, self.other_loop)
self._run_loop(self.other_loop)
self.assertFalse(fut.done())
def test_one_cancellation(self):
a, b, c, d, e = [self.one_loop.create_future() for i in range(5)]
fut = asyncio.gather(a, b, c, d, e)
cb = test_utils.MockCallback()
fut.add_done_callback(cb)
a.set_result(1)
b.cancel()
self._run_loop(self.one_loop)
self.assertTrue(fut.done())
cb.assert_called_once_with(fut)
self.assertFalse(fut.cancelled())
self.assertIsInstance(fut.exception(), asyncio.CancelledError)
# Does nothing
c.set_result(3)
d.cancel()
e.set_exception(RuntimeError())
e.exception()
def test_result_exception_one_cancellation(self):
a, b, c, d, e, f = [self.one_loop.create_future()
for i in range(6)]
fut = asyncio.gather(a, b, c, d, e, f, return_exceptions=True)
cb = test_utils.MockCallback()
fut.add_done_callback(cb)
a.set_result(1)
zde = ZeroDivisionError()
b.set_exception(zde)
c.cancel()
self._run_loop(self.one_loop)
self.assertFalse(fut.done())
d.set_result(3)
e.cancel()
rte = RuntimeError()
f.set_exception(rte)
res = self.one_loop.run_until_complete(fut)
self.assertIsInstance(res[2], asyncio.CancelledError)
self.assertIsInstance(res[4], asyncio.CancelledError)
res[2] = res[4] = None
self.assertEqual(res, [1, zde, None, 3, None, rte])
cb.assert_called_once_with(fut)
class CoroutineGatherTests(GatherTestsBase, test_utils.TestCase):
def wrap_futures(self, *futures):
coros = []
for fut in futures:
async def coro(fut=fut):
return await fut
coros.append(coro())
return coros
def _gather(self, *args, **kwargs):
async def coro():
return asyncio.gather(*args, **kwargs)
return self.one_loop.run_until_complete(coro())
def test_constructor_without_loop(self):
async def coro():
return 'abc'
gen1 = coro()
self.addCleanup(gen1.close)
gen2 = coro()
self.addCleanup(gen2.close)
with self.assertWarns(DeprecationWarning) as cm:
with self.assertRaises(RuntimeError):
asyncio.gather(gen1, gen2)
self.assertEqual(cm.warnings[0].filename, __file__)
def test_constructor_use_running_loop(self):
async def coro():
return 'abc'
gen1 = coro()
gen2 = coro()
async def gather():
return asyncio.gather(gen1, gen2)
fut = self.one_loop.run_until_complete(gather())
self.assertIs(fut._loop, self.one_loop)
self.one_loop.run_until_complete(fut)
def test_constructor_use_global_loop(self):
# Deprecated in 3.10
async def coro():
return 'abc'
asyncio.set_event_loop(self.other_loop)
self.addCleanup(asyncio.set_event_loop, None)
gen1 = coro()
gen2 = coro()
with self.assertWarns(DeprecationWarning) as cm:
fut = asyncio.gather(gen1, gen2)
self.assertEqual(cm.warnings[0].filename, __file__)
self.assertIs(fut._loop, self.other_loop)
self.other_loop.run_until_complete(fut)
def test_duplicate_coroutines(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro(s):
return s
c = coro('abc')
fut = self._gather(c, c, coro('def'), c)
self._run_loop(self.one_loop)
self.assertEqual(fut.result(), ['abc', 'abc', 'def', 'abc'])
def test_cancellation_broadcast(self):
# Cancelling outer() cancels all children.
proof = 0
waiter = self.one_loop.create_future()
async def inner():
nonlocal proof
await waiter
proof += 1
child1 = asyncio.ensure_future(inner(), loop=self.one_loop)
child2 = asyncio.ensure_future(inner(), loop=self.one_loop)
gatherer = None
async def outer():
nonlocal proof, gatherer
gatherer = asyncio.gather(child1, child2)
await gatherer
proof += 100
f = asyncio.ensure_future(outer(), loop=self.one_loop)
test_utils.run_briefly(self.one_loop)
self.assertTrue(f.cancel())
with self.assertRaises(asyncio.CancelledError):
self.one_loop.run_until_complete(f)
self.assertFalse(gatherer.cancel())
self.assertTrue(waiter.cancelled())
self.assertTrue(child1.cancelled())
self.assertTrue(child2.cancelled())
test_utils.run_briefly(self.one_loop)
self.assertEqual(proof, 0)
def test_exception_marking(self):
# Test for the first line marked "Mark exception retrieved."
async def inner(f):
await f
raise RuntimeError('should not be ignored')
a = self.one_loop.create_future()
b = self.one_loop.create_future()
async def outer():
await asyncio.gather(inner(a), inner(b))
f = asyncio.ensure_future(outer(), loop=self.one_loop)
test_utils.run_briefly(self.one_loop)
a.set_result(None)
test_utils.run_briefly(self.one_loop)
b.set_result(None)
test_utils.run_briefly(self.one_loop)
self.assertIsInstance(f.exception(), RuntimeError)
class RunCoroutineThreadsafeTests(test_utils.TestCase):
"""Test case for asyncio.run_coroutine_threadsafe."""
def setUp(self):
super().setUp()
self.loop = asyncio.new_event_loop()
self.set_event_loop(self.loop) # Will cleanup properly
async def add(self, a, b, fail=False, cancel=False):
"""Wait 0.05 second and return a + b."""
await asyncio.sleep(0.05)
if fail:
raise RuntimeError("Fail!")
if cancel:
asyncio.current_task(self.loop).cancel()
await asyncio.sleep(0)
return a + b
def target(self, fail=False, cancel=False, timeout=None,
advance_coro=False):
"""Run add coroutine in the event loop."""
coro = self.add(1, 2, fail=fail, cancel=cancel)
future = asyncio.run_coroutine_threadsafe(coro, self.loop)
if advance_coro:
# this is for test_run_coroutine_threadsafe_task_factory_exception;
# otherwise it spills errors and breaks **other** unittests, since
# 'target' is interacting with threads.
# With this call, `coro` will be advanced, so that
# CoroWrapper.__del__ won't do anything when asyncio tests run
# in debug mode.
self.loop.call_soon_threadsafe(coro.send, None)
try:
return future.result(timeout)
finally:
future.done() or future.cancel()
def test_run_coroutine_threadsafe(self):
"""Test coroutine submission from a thread to an event loop."""
future = self.loop.run_in_executor(None, self.target)
result = self.loop.run_until_complete(future)
self.assertEqual(result, 3)
def test_run_coroutine_threadsafe_with_exception(self):
"""Test coroutine submission from a thread to an event loop
when an exception is raised."""
future = self.loop.run_in_executor(None, self.target, True)
with self.assertRaises(RuntimeError) as exc_context:
self.loop.run_until_complete(future)
self.assertIn("Fail!", exc_context.exception.args)
def test_run_coroutine_threadsafe_with_timeout(self):
"""Test coroutine submission from a thread to an event loop
when a timeout is raised."""
callback = lambda: self.target(timeout=0)
future = self.loop.run_in_executor(None, callback)
with self.assertRaises(asyncio.TimeoutError):
self.loop.run_until_complete(future)
test_utils.run_briefly(self.loop)
# Check that there's no pending task (add has been cancelled)
for task in asyncio.all_tasks(self.loop):
self.assertTrue(task.done())
def test_run_coroutine_threadsafe_task_cancelled(self):
"""Test coroutine submission from a tread to an event loop
when the task is cancelled."""
callback = lambda: self.target(cancel=True)
future = self.loop.run_in_executor(None, callback)
with self.assertRaises(asyncio.CancelledError):
self.loop.run_until_complete(future)
def test_run_coroutine_threadsafe_task_factory_exception(self):
"""Test coroutine submission from a tread to an event loop
when the task factory raise an exception."""
def task_factory(loop, coro):
raise NameError
run = self.loop.run_in_executor(
None, lambda: self.target(advance_coro=True))
# Set exception handler
callback = test_utils.MockCallback()
self.loop.set_exception_handler(callback)
# Set corrupted task factory
self.addCleanup(self.loop.set_task_factory,
self.loop.get_task_factory())
self.loop.set_task_factory(task_factory)
# Run event loop
with self.assertRaises(NameError) as exc_context:
self.loop.run_until_complete(run)
# Check exceptions
self.assertEqual(len(callback.call_args_list), 1)
(loop, context), kwargs = callback.call_args
self.assertEqual(context['exception'], exc_context.exception)
class SleepTests(test_utils.TestCase):
def setUp(self):
super().setUp()
self.loop = asyncio.new_event_loop()
self.set_event_loop(self.loop)
def tearDown(self):
self.loop.close()
self.loop = None
super().tearDown()
def test_sleep_zero(self):
result = 0
def inc_result(num):
nonlocal result
result += num
async def coro():
self.loop.call_soon(inc_result, 1)
self.assertEqual(result, 0)
num = await asyncio.sleep(0, result=10)
self.assertEqual(result, 1) # inc'ed by call_soon
inc_result(num) # num should be 11
self.loop.run_until_complete(coro())
self.assertEqual(result, 11)
class WaitTests(test_utils.TestCase):
def setUp(self):
super().setUp()
self.loop = asyncio.new_event_loop()
self.set_event_loop(self.loop)
def tearDown(self):
self.loop.close()
self.loop = None
super().tearDown()
def test_coro_is_deprecated_in_wait(self):
# Remove test when passing coros to asyncio.wait() is removed in 3.11
with self.assertWarns(DeprecationWarning):
self.loop.run_until_complete(
asyncio.wait([coroutine_function()]))
task = self.loop.create_task(coroutine_function())
with self.assertWarns(DeprecationWarning):
self.loop.run_until_complete(
asyncio.wait([task, coroutine_function()]))
class CompatibilityTests(test_utils.TestCase):
# Tests for checking a bridge between old-styled coroutines
# and async/await syntax
def setUp(self):
super().setUp()
self.loop = asyncio.new_event_loop()
self.set_event_loop(self.loop)
def tearDown(self):
self.loop.close()
self.loop = None
super().tearDown()
def test_yield_from_awaitable(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro():
yield from asyncio.sleep(0)
return 'ok'
result = self.loop.run_until_complete(coro())
self.assertEqual('ok', result)
def test_await_old_style_coro(self):
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro1():
return 'ok1'
with self.assertWarns(DeprecationWarning):
@asyncio.coroutine
def coro2():
yield from asyncio.sleep(0)
return 'ok2'
async def inner():
return await asyncio.gather(coro1(), coro2())
result = self.loop.run_until_complete(inner())
self.assertEqual(['ok1', 'ok2'], result)
def test_debug_mode_interop(self):
# https://bugs.python.org/issue32636
code = textwrap.dedent("""
import asyncio
async def native_coro():
pass
@asyncio.coroutine
def old_style_coro():
yield from native_coro()
asyncio.run(old_style_coro())
""")
assert_python_ok("-Wignore::DeprecationWarning", "-c", code,
PYTHONASYNCIODEBUG="1")
if __name__ == '__main__':
unittest.main()
| sub |
client.rs | // This file is part of Substrate.
// Copyright (C) 2019-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Utilities to build a `TestClient` for `node-runtime`.
use sc_service::client;
use sp_runtime::BuildStorage;
/// Re-export test-client utilities.
pub use substrate_test_client::*;
/// Call executor for `node-runtime` `TestClient`.
pub type Executor = sc_executor::NativeExecutor<node_executor::Executor>;
/// Default backend type.
pub type Backend = sc_client_db::Backend<node_primitives::Block>;
/// Test client type.
pub type Client = client::Client<
Backend,
client::LocalCallExecutor<Backend, Executor>,
node_primitives::Block,
node_runtime::RuntimeApi,
>;
/// Transaction for node-runtime.
pub type Transaction = sc_client_api::backend::TransactionFor<Backend, node_primitives::Block>;
/// Genesis configuration parameters for `TestClient`.
#[derive(Default)]
pub struct GenesisParameters {
support_changes_trie: bool,
}
impl substrate_test_client::GenesisInit for GenesisParameters {
fn genesis_storage(&self) -> Storage {
crate::genesis::config(self.support_changes_trie, None)
.build_storage()
.unwrap()
}
}
/// A `test-runtime` extensions to `TestClientBuilder`.
pub trait TestClientBuilderExt: Sized {
/// Create test client builder.
fn new() -> Self;
/// Build the test client.
fn build(self) -> Client;
}
impl TestClientBuilderExt
for substrate_test_client::TestClientBuilder<
node_primitives::Block,
client::LocalCallExecutor<Backend, Executor>,
Backend,
GenesisParameters,
>
{
fn | () -> Self {
Self::default()
}
fn build(self) -> Client {
self.build_with_native_executor(None).0
}
}
| new |
naics.py | from usaspending_api.common.exceptions import InvalidParameterException
from usaspending_api.search.filters.elasticsearch.filter import _Filter, _QueryType
from usaspending_api.search.filters.elasticsearch.HierarchicalFilter import HierarchicalFilter, Node
from elasticsearch_dsl import Q as ES_Q
class NaicsCodes(_Filter, HierarchicalFilter):
underscore_name = "naics_codes"
@classmethod
def generate_elasticsearch_query(cls, filter_values, query_type: _QueryType) -> ES_Q:
# legacy functionality permits sending a single list of naics codes, which is treated as the required list
if isinstance(filter_values, list):
require = [cls.naics_code_to_naics_code_path(str(code)) for code in filter_values]
exclude = []
elif isinstance(filter_values, dict):
require = [cls.naics_code_to_naics_code_path(str(code)) for code in filter_values.get("require") or []]
exclude = [cls.naics_code_to_naics_code_path(str(code)) for code in filter_values.get("exclude") or []]
else:
raise InvalidParameterException(f"naics_codes must be an array or object")
if [value for value in require if len(value[-1]) not in [2, 4, 6]] or [
value for value in exclude if len(value[-1]) not in [2, 4, 6]
]:
raise InvalidParameterException("naics code filtering only supported for codes with lengths of 2, 4, and 6")
require = [code for code in require]
exclude = [code for code in exclude] |
return ES_Q("query_string", query=cls._query_string(require, exclude), default_field="naics_code.keyword")
@staticmethod
def code_is_parent_of(code, other):
return len(str(other)) == len(str(code)) + 2 and other[: len(str(code))] == str(code)
@staticmethod
def node(code, positive, positive_naics, negative_naics):
return NaicsNode(code, positive, positive_naics, negative_naics)
@staticmethod
def naics_code_to_naics_code_path(code):
"""Special scotch-tape code to convert a single naics into a path to match the heirarchical filter API"""
retval = []
if len(code) > 2:
retval.append(code[:2])
if len(code) > 4:
retval.append(code[:4])
retval.append(code)
return retval
class NaicsNode(Node):
def _basic_search_unit(self):
retval = f"{self.code}"
if len(self.code) < 6:
retval += "*"
return retval
def clone(self, code, positive, positive_naics, negative_naics):
return NaicsNode(code, positive, positive_naics, negative_naics) | |
rank.go | package api
import (
"singo/service"
"github.com/gin-gonic/gin"
)
// DailyRank 每日排行
func DailyRank(c *gin.Context) {
servi | ce := service.DailyRankService{}
if err := c.ShouldBind(&service); err == nil {
res := service.Get()
c.JSON(200, res)
} else {
c.JSON(200, ErrorResponse(err))
}
}
|
|
metrics.py | """Tools to fetch and extract Facebook Insights metrics.
>>> graph_id = '1234567890'
>>> metrics = ['page_impressions', 'page_engaged_users']
>>> page_metrics = fetch_metrics(graph_id, metrics)
>>> page_impressions = page_metrics['page_impressions']
>>> page_impressions.values
{'day': [
{'end_time': '2016-11-15T08:00:00+0000', 'value': 0},
{'end_time': '2016-11-16T08:00:00+0000', 'value': 1},
{'end_time': '2016-11-17T08:00:00+0000', 'value': 2},
],
'week': [
{'end_time': '2016-11-15T08:00:00+0000', 'value': 10},
{'end_time': '2016-11-16T08:00:00+0000', 'value': 11},
{'end_time': '2016-11-17T08:00:00+0000', 'value': 12},
],
'days_28': [
{'end_time': '2016-11-15T08:00:00+0000', 'value': 100},
{'end_time': '2016-11-16T08:00:00+0000', 'value': 101},
{'end_time': '2016-11-17T08:00:00+0000', 'value': 102},
]
}
>>> page_impressions.get_value('day')
{'end_time': '2016-11-17T08:00:00+0000', 'value': 2}
>>> page_impressions.get_value('day', extract=True)
2
>>> page_impressions.get_value('week', index=0)
{'end_time': '2016-11-15T08:00:00+0000', 'value': 10}
>>> page_impressions.get_value('week', index=0, extract=True)
10
>>> get_all_values()
{'day': {'end_time': '2016-11-17T08:00:00+0000', 'value': 2},
'week': {'end_time': '2016-11-17T08:00:00+0000', 'value': 12},
'days_28': {'end_time': '2016-11-17T08:00:00+0000', 'value': 102}}
>>> get_all_values(extract=True)
{'day': 2, 'week': 12, 'days_28': 102}
>>> get_all_values(index=0, extract=True)
{'day': 0, 'week': 10, 'days_28': 100}
"""
import json
from django.conf import settings
from facebook import GraphAPI, GraphAPIError
from facebook_insights.exceptions import EmptyData, MetricsNotSpecified
__all__ = ['fetch_metrics', 'Metric']
access_token = settings.FACEBOOK_INSIGHTS_ACCESS_TOKEN
api_version = getattr(settings, 'FACEBOOK_INSIGHTS_API_VERSION', None)
graph_api = GraphAPI(access_token=access_token, version=api_version)
def fetch_metrics(graph_id, metrics, token=None):
"""Fetch Facebook Insights metrics for an object with a given id.
Parameters
----------
graph_id : str
The Facebook ID of a Graph API object.
metrics : iterable of str
The object's metrics to fetch (e.g. 'page_engaged_users').
token: str
A Facebook Graph API access token
Returns
-------
dict
A dictionary of mappings between metric names and instances
of class 'Metric'.
"""
if not metrics:
raise MetricsNotSpecified('Specify metrics you want to fetch.')
batch = []
for metric in metrics:
request_data = {
'method': 'GET',
'relative_url': '{}/insights/{}/'.format(graph_id, metric)
}
batch.append(request_data)
# ##TODON'T##
global graph_api
if token and (token != graph_api.access_token):
graph_api = GraphAPI(access_token=token, version=api_version)
batch_response = graph_api.put_object(
parent_object='/',
connection_name='',
batch=json.dumps(batch),
)
extracted_metrics = {}
for response in batch_response:
body = json.loads(response['body'])
# (nevimov/2016-11-09): Currently facebook-sdk is not
# able to catch errors in responses to batch requests, so
# we have to take care of those ourselves.
if 'error' in body:
raise GraphAPIError(body)
data = body['data']
if not data:
# We need a better middle ground for this but just
# raising exceptions doesn't work when some of a
# set can legitimately be empty
continue
# raise EmptyData
rearranged_values = {}
for datum in data:
name = datum['name']
period = datum['period']
rearranged_values[period] = datum['values']
extracted_metrics[name] = Metric(name, rearranged_values)
return extracted_metrics
class Metric(object):
"""A Facebook Insights metric.
Parameters
----------
name : str
The name of a metric (e.g. 'post_impressions' or 'page_engaged_users').
values : dict of list of dict
Values to associate with the metric. Must be a dictionary of mappings
between periods ('day', 'week', 'days_28', 'lifetime') and lists of
their respective values, for example:
# The format typical for post metrics
{'lifetime': [{'value': 1000}]}
# The format typical for page metrics
{'day': [
{'end_time': '2016-11-15T08:00:00+0000', 'value': 0},
{'end_time': '2016-11-16T08:00:00+0000', 'value': 1},
{'end_time': '2016-11-17T08:00:00+0000', 'value': 2},
],
'week': [
{'end_time': '2016-11-15T08:00:00+0000', 'value': 10},
{'end_time': '2016-11-16T08:00:00+0000', 'value': 11},
{'end_time': '2016-11-17T08:00:00+0000', 'value': 12},
],
'days_28': [
{'end_time': '2016-11-15T08:00:00+0000', 'value': 100},
{'end_time': '2016-11-16T08:00:00+0000', 'value': 101},
{'end_time': '2016-11-17T08:00:00+0000', 'value': 102},
]}
Attributes
----------
name : str
The name of the metric.
values : list of dict of list
The values associated with the metric.
"""
def __init__(self, name, values):
self.name = name
self.values = values
def get_value(self, period=None, index=-1, extract=False):
"""Get the metric's value for a given period.
Parameters
----------
period: {None, 'day', 'week', 'days_28', 'lifetime'}
A period for which you want to get the value.
Can be omitted for metrics available only for one period
(e.g. all the post_impressions_* metrics).
index : int
For many metrics (e.g. most of page metrics) Facebook sends
values for 3 consecutive days. By default this method returns
the last value. If you want to get a previous value, pass
`index` in range from 0 to 2 (or from -1 to -3).
extract : bool
By default the return value is a dictionary containing key
'value' (most of page metrics also have 'end_time').
If `extract` is True, then simply the value associated with
this key is returned.
Returns
-------
The return value can be either:
* dictionary containing one key, 'value' (most of post metrics)
* dictionary containing two keys, 'value' and 'end_time'
(most of page metrics)
Pass `extract=True`, if you don't care about the 'end_time' and
need only the value.
"""
values = self.values
if not period:
if len(values) == 1:
period = list(values.keys())[0]
else:
raise TypeError(
"Can't get a period. Argument 'period' can be omitted "
"only for metrics that have one period."
)
value = values[period][index]
if extract:
return value['value']
return value
def get_all_values(self, index=-1, extract=False):
"""Get values for all periods.
Parameters
----------
Arguments `index` and `extract` have the same meaning as for
get_value().
Returns
-------
dict | for period in self.values:
all_values[period] = self.get_value(period, index, extract)
return all_values | A mapping of periods to values.
"""
all_values = {} |
text_generation.py | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/09_text_generation.ipynb (unless otherwise specified).
__all__ = ['logger', 'TransformersTextGenerator', 'EasyTextGenerator']
# Cell
import logging
from typing import List, Dict, Union
from collections import defaultdict
import torch
from torch.utils.data import TensorDataset
from transformers import (
AutoTokenizer,
AutoModelForCausalLM,
PreTrainedTokenizer,
PreTrainedModel,
)
from fastprogress.fastprogress import progress_bar
from ..model import AdaptiveModel, DataLoader
from ..model_hub import HFModelResult
from fastai.torch_core import apply, default_device, to_device
# Cell
logger = logging.getLogger(__name__)
# Cell
class | (AdaptiveModel):
"Adaptive model for Transformer's Language Models"
def __init__(
self,
tokenizer: PreTrainedTokenizer, # A tokenizer object from Huggingface's transformers (TODO)and tokenizers
model: PreTrainedModel # A transformers Language model
):
# Load up model and tokenizer
self.tokenizer = tokenizer
super().__init__()
# Sets internal model
self.set_model(model)
# Setup cuda and automatic allocation of model
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.model.to(self.device)
@classmethod
def load(
cls,
model_name_or_path: str # A key string of one of Transformer's pre-trained Language Model
) -> AdaptiveModel:
"Class method for loading and constructing this Model"
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, pad_token="<PAD>")
model = AutoModelForCausalLM.from_pretrained(model_name_or_path)
generator = cls(tokenizer, model)
return generator
def predict(
self,
text: Union[List[str], str], # Sentences to run inference on
mini_batch_size: int = 32, # Mini batch size
num_tokens_to_produce: int = 50, # Number of tokens you want to generate
) -> List[str]: # A list of predicted sentences
"Predict method for running inference using the pre-trained sequence classifier model. Keyword arguments for parameters of the method `Transformers.PreTrainedModel.generate()` can be used as well."
with torch.no_grad():
# Make all inputs lists
if isinstance(text, str):
text = [text]
dataset = self._tokenize(text)
dataloader = DataLoader(dataset, batch_size=mini_batch_size)
results = []
logger.info(f'Running text generator on {len(dataset)} text sequences')
logger.info(f'Batch size = {mini_batch_size}')
for batch in progress_bar(dataloader):
self.model.eval()
batch = apply(to_device, batch)
if len(batch) == 3:
inputs = {
'input_ids': batch[0],
'attention_masks': batch[1],
'token_type_ids': batch[2],
}
else:
inputs = {
'input_ids': batch[0],
'attention_masks': batch[1],
}
# model.generate() does not have batch inference implemented yet
generated_text = self._batch_generate(
inputs=inputs,
seq_len=batch[0].shape[1],
num_tokens_to_produce=num_tokens_to_produce,
)
results += generated_text
return {"generated_text":results}
def _tokenize(self, text: Union[List[str], str]) -> TensorDataset:
""" Batch tokenizes text and produces a `TensorDataset` with text """
tokenized_text = self.tokenizer.batch_encode_plus(
text,
return_tensors="pt",
padding="longest",
)
dataset = TensorDataset(
tokenized_text["input_ids"],
tokenized_text["attention_mask"],
)
return dataset
def _batch_generate(
self, inputs: Dict, seq_len: int, num_tokens_to_produce: int
) -> List[str]:
"""Generates text data with varying text sizes"""
input_ids = inputs["input_ids"]
attn_mask = inputs["attention_masks"]
pad_token_id = self.tokenizer.pad_token_id
eos_token_id = self.tokenizer.eos_token_id
eos_not_in_sents = torch.ones(input_ids.shape[0]).long().to(self.device)
# we need to get the token ids of the last non-padded value
last_non_masked_idx = torch.sum(attn_mask, dim=1) - 1
start_idx = (
(last_non_masked_idx)
.view(-1, 1)
.repeat(1, self.tokenizer.vocab_size)
.unsqueeze(1)
)
# get correct position ids
position_ids = torch.tensor(
[list(range(seq_len)) for i in range(input_ids.shape[0])]
).to(self.device)
for i, position_ids_slice in enumerate(position_ids):
position_ids_slice[last_non_masked_idx[i] :] = position_ids_slice[
last_non_masked_idx[i]
]
for step in range(num_tokens_to_produce):
outputs = self.model(
input_ids, attention_mask=attn_mask, position_ids=position_ids
)
# in the first decoding step, we want to use the 'real' last position for each sentence
if step == 0:
next_token_logits = outputs[0].gather(1, start_idx).squeeze(1)
else:
next_token_logits = outputs[0][:, -1, :]
next_tokens = torch.argmax(next_token_logits, dim=-1)
# this updates which sentences have not seen an <EOS> token so far
# if one <EOS> token was seen the sentence is finished
eos_not_in_sents.mul_(next_tokens.ne(eos_token_id).long())
# either append a padding token here if <EOS> has been seen or append next token
tokens_to_add = next_tokens * (eos_not_in_sents) + pad_token_id * (
1 - eos_not_in_sents
)
# Update input_ids, attn_mask and position_ids
input_ids = torch.cat([input_ids, tokens_to_add.unsqueeze(-1)], dim=-1)
attn_mask = torch.cat(
[attn_mask, torch.ones((attn_mask.shape[0], 1)).long().to(self.device)],
dim=1,
)
position_ids = torch.cat(
[position_ids, (position_ids[:, -1] + 1).unsqueeze(-1)], dim=1
)
return [
self.tokenizer.decode(output, skip_special_tokens=True)
for output in input_ids
]
# Cell
class EasyTextGenerator:
"Text Generation Module"
def __init__(self):
self.generators: Dict[AdaptiveModel] = defaultdict(bool)
def generate(
self,
text: Union[List[str], str], # List of sentences to run inference on
model_name_or_path: [str, HFModelResult] = "gpt2", # A model id or path to a pre-trained model repository or custom trained model directory
mini_batch_size: int = 32, # Mini batch size
num_tokens_to_produce: int = 50, # Number of tokens you want to generate
) -> List[str]: # A list of predicted sentences
"Predict method for running inference using the pre-trained sequence classifier model. Keyword arguments for parameters of the method `Transformers.PreTrainedModel.generate()` can be used as well."
name = getattr(model_name_or_path, 'name', model_name_or_path)
if not self.generators[name]:
self.generators[name] = TransformersTextGenerator.load(
name
)
generator = self.generators[name]
return generator.predict(
text=text,
mini_batch_size=mini_batch_size,
num_tokens_to_produce=num_tokens_to_produce
) | TransformersTextGenerator |
currency.py | """Codec for currency property inside an XRPL issued currency amount json."""
from __future__ import annotations # Requires Python 3.7+
from typing import Optional, Type
from typing_extensions import Final
from xrpl.constants import HEX_CURRENCY_REGEX, ISO_CURRENCY_REGEX
from xrpl.core.binarycodec.exceptions import XRPLBinaryCodecException
from xrpl.core.binarycodec.types.hash160 import Hash160
_CURRENCY_CODE_LENGTH: Final[int] = 20 # bytes
def | (value: str) -> bool:
"""Tests if value is a valid 3-char iso code."""
return bool(ISO_CURRENCY_REGEX.fullmatch(value))
def _iso_code_from_hex(value: bytes) -> Optional[str]:
candidate_iso = value.decode("ascii")
if candidate_iso == "XRP":
raise XRPLBinaryCodecException(
"Disallowed currency code: to indicate the currency "
"XRP you must use 20 bytes of 0s"
)
if _is_iso_code(candidate_iso):
return candidate_iso
return None
def _is_hex(value: str) -> bool:
"""Tests if value is a valid 40-char hex string."""
return bool(HEX_CURRENCY_REGEX.fullmatch(value))
def _iso_to_bytes(iso: str) -> bytes:
"""
Convert an ISO code to a 160-bit (20 byte) encoded representation.
See "Currency codes" subheading in
`Amount Fields <https://xrpl.org/serialization.html#amount-fields>`_
"""
if not _is_iso_code(iso):
raise XRPLBinaryCodecException(f"Invalid ISO code: {iso}")
if iso == "XRP":
# This code (160 bit all zeroes) is used to indicate XRP in
# rare cases where a field must specify a currency code for XRP.
return bytes(_CURRENCY_CODE_LENGTH)
iso_bytes = iso.encode("ASCII")
# Currency Codes: https://xrpl.org/currency-formats.html#standard-currency-codes
# 160 total bits:
# 8 bits type code (0x00)
# 88 bits reserved (0's)
# 24 bits ASCII
# 16 bits version (0x00)
# 24 bits reserved (0's)
return bytes(12) + iso_bytes + bytes(5)
class Currency(Hash160):
"""
Codec for serializing and deserializing currency codes in issued currency amounts.
`Amount fields <https://xrpl.org/serialization.html#amount-fields>`_
Attributes:
buffer: The byte encoding of this currency.
_iso: The three-character ISO currency code if standard format, else None.
"""
LENGTH: Final[int] = 20
_iso: Optional[str] = None
def __init__(self: Currency, buffer: Optional[bytes] = None) -> None:
"""Construct a Currency."""
if buffer is not None:
super().__init__(buffer)
else:
super().__init__(bytes(self.LENGTH))
code_bytes = self.buffer[12:15]
# Determine whether this currency code is in standard or nonstandard format:
# https://xrpl.org/currency-formats.html#nonstandard-currency-codes
if self.buffer[0] != 0:
# non-standard currency
self._iso = None
elif self.buffer.hex() == "0" * 40: # all 0s
# the special case for literal XRP
self._iso = "XRP"
else:
self._iso = _iso_code_from_hex(code_bytes)
@classmethod
def from_value(cls: Type[Currency], value: str) -> Currency:
"""
Construct a Currency object from a string representation of a currency.
Args:
value: The string to construct a Currency object from.
Returns:
A Currency object constructed from value.
Raises:
XRPLBinaryCodecException: If the Currency representation is invalid.
"""
if not isinstance(value, str):
raise XRPLBinaryCodecException(
"Invalid type to construct a Currency: expected str,"
f" received {value.__class__.__name__}."
)
if _is_iso_code(value):
return Currency(_iso_to_bytes(value))
if _is_hex(value):
return cls(bytes.fromhex(value))
raise XRPLBinaryCodecException("Unsupported Currency representation: {value}")
def to_json(self: Currency) -> str:
"""
Returns the JSON representation of a currency.
Returns:
The JSON representation of a Currency.
"""
if self._iso is not None:
return self._iso
return self.buffer.hex().upper()
| _is_iso_code |
main.rs | pub mod components;
pub mod pages;
pub mod routes;
use yew::prelude::*; | fn app() -> Html {
html! {
<div>
<routes.GlobalLayout></routes.GlobalLayout>
</div>
}
}
fn main() {
yew::start_app::<App>();
} |
#[function_component(App)] |
mod.rs | pub mod part1;
pub mod part2;
#[macro_use]
#[cfg(test)]
mod tests {
use crate::aoc_test_suite;
aoc_test_suite!(
super::part1::run, |
aoc_test_suite!(
super::part2::run,
(part2_main, 809953813657517, include_str!("input.txt")),
(part2_sanity, 444356092776315, include_str!("input.sanity.txt")),
);
} | (part1_main, 798147, include_str!("input.txt")),
(part1_sanity, 739785, include_str!("input.sanity.txt")),
); |
test_config_helpers.py | # ------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ------------------------------------------------------------------------------------------
from typing import List, Optional
import pytest
import torch
from InnerEye.ML.common import ModelExecutionMode
from InnerEye.ML.config import SegmentationModelBase, equally_weighted_classes
from InnerEye.ML.models.architectures.base_model import BaseSegmentationModel
from Tests.ML.configs.DummyModel import DummyModel
def test_validate_inference_stride_size() -> None:
SegmentationModelBase.validate_inference_stride_size(inference_stride_size=(5, 3, 1), output_size=(5, 3, 1))
SegmentationModelBase.validate_inference_stride_size(inference_stride_size=(5, 3, 1), output_size=None)
with pytest.raises(ValueError):
SegmentationModelBase.validate_inference_stride_size(inference_stride_size=(5, 3, 1), output_size=(3, 3, 3))
SegmentationModelBase.validate_inference_stride_size(inference_stride_size=(5, 3, 0), output_size=None)
def test_inference_stride_size_setter() -> None:
"""Tests setter function raises an error when stride size is larger than output patch size"""
test_output_size = (7, 3, 5)
test_stride_size = (3, 3, 3)
test_fail_stride_size = (1, 1, 9)
model = IdentityModel()
model_config = SegmentationModelBase(test_crop_size=test_output_size, should_validate=False)
model_config.inference_stride_size = test_stride_size
assert model_config.inference_stride_size == test_stride_size
model_config.set_derived_model_properties(model)
assert model_config.inference_stride_size == test_stride_size
model_config.inference_stride_size = None
model_config.set_derived_model_properties(model)
assert model_config.inference_stride_size == test_output_size
with pytest.raises(ValueError):
model_config.inference_stride_size = test_fail_stride_size
def test_crop_size() -> None:
"""Checks if test crop size is equal to train crop size if it's not specified at init time"""
model_config = DummyModel()
assert model_config.test_crop_size == model_config.crop_size
def test_set_model_config_attributes() -> None:
"""Tests setter function for model config attributes"""
train_output_size = (3, 5, 3)
test_output_size = (7, 7, 7)
model = IdentityModel()
model_config = SegmentationModelBase(crop_size=train_output_size,
test_crop_size=test_output_size,
should_validate=False)
model_config.set_derived_model_properties(model)
assert model_config.inference_stride_size == test_output_size
# noinspection PyArgumentList
def test_get_output_size() -> None:
"""Tests config properties related to output tensor size"""
train_output_size = (5, 5, 5)
test_output_size = (7, 7, 7)
model_config = SegmentationModelBase(crop_size=train_output_size,
test_crop_size=test_output_size,
should_validate=False)
assert model_config.get_output_size(execution_mode=ModelExecutionMode.TRAIN) is None
assert model_config.get_output_size(execution_mode=ModelExecutionMode.TEST) is None
model = IdentityModel()
model_config.set_derived_model_properties(model)
assert model_config.get_output_size(execution_mode=ModelExecutionMode.TRAIN) == train_output_size
assert model_config.get_output_size(execution_mode=ModelExecutionMode.TEST) == test_output_size
class IdentityModel(BaseSegmentationModel):
def __init__(self) -> None:
super().__init__(input_channels=1, name='IdentityModel')
def forward(self, x: torch.Tensor) -> torch.Tensor: # type: ignore | # returns the input as it is
return x
def get_all_child_layers(self) -> List[torch.nn.Module]:
return list()
@pytest.mark.parametrize(["num_fg_classes", "background_weight", "expected"],
[
(1, 0.2, [0.2, 0.8]),
(1, None, [0.5] * 2),
(9, None, [0.1] * 10),
(3, None, [1 / 4] * 4),
(3, 0.4, [0.4, 0.2, 0.2, 0.2]),
])
def test_equally_weighted_classes(num_fg_classes: int, background_weight: Optional[float],
expected: List[float]) -> None:
classes = [""] * num_fg_classes
actual = equally_weighted_classes(classes, background_weight)
assert isinstance(actual, list)
assert len(actual) == num_fg_classes + 1
assert sum(actual) == pytest.approx(1.0)
assert actual == pytest.approx(expected)
@pytest.mark.parametrize(["num_fg_clases", "background_weight"],
[
(0, 0.5),
(1, 1.0),
(1, -0.1)
])
def test_equally_weighted_classes_fails(num_fg_clases: int, background_weight: Optional[float]) -> None:
classes = [""] * num_fg_clases
with pytest.raises(ValueError):
equally_weighted_classes(classes, background_weight) | |
list-keys.py | import json
import lzma
from glob import glob
from pprint import pprint
import click
import smart_open
from tqdm import tqdm
@click.command()
@click.option("--path", help="Path. Wilcard '*' enabled")
@click.option("--tar", default=False, help="True for .xz files")
@click.option(
"--flavor",
default="sm",
help="Examples reported if <flavor> is lg. Default " "<falvor> is sm.",
)
@click.option("--limit", default=None, type=int, help="Break after <limit> iterations")
def | (path, tar, flavor, limit):
assert flavor in ["sm", "lg"]
key_val = {}
i = 0
for file in tqdm(glob(path)):
if tar:
_open = lzma.open
else:
_open = smart_open.open
with _open(file) as f:
for l in tqdm(f):
i += 1
for k, v in json.loads(l).items():
if k in key_val.keys():
if flavor == "lg":
key_val.update(
{k: (key_val[k][0] + 1, key_val[k][1], key_val[k][2])}
)
else:
key_val.update({k: (key_val[k][0] + 1, key_val[k][1])})
else:
if flavor == "lg":
key_val.update({k: (1, type(v), v)})
else:
key_val.update({k: (1, type(v))})
if limit:
if i > limit:
break
pprint(key_val)
if __name__ == "__main__":
main()
| main |
admin-backend-api.service.ts | // Copyright 2021 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Backend api service for fetching the admin data;
*/
import { downgradeInjectable } from '@angular/upgrade/static';
import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { AdminPageConstants } from
'pages/admin-page/admin-page.constants';
import {
TopicSummary,
TopicSummaryBackendDict
} from 'domain/topic/topic-summary.model';
import {
ComputationData,
ComputationDataBackendDict,
} from 'domain/admin/computation-data.model';
import {
Job,
JobDataBackendDict,
} from 'domain/admin/job.model';
import {
JobStatusSummary,
JobStatusSummaryBackendDict,
} from 'domain/admin/job-status-summary.model';
import {
PlatformParameter,
PlatformParameterBackendDict
} from 'domain/platform_feature/platform-parameter.model';
import { UrlInterpolationService } from
'domain/utilities/url-interpolation.service';
interface UserRolesBackendResponse {
[role: string]: string;
}
interface RoleGraphDataBackendResponse {
nodes: {
[role: string]: string;
};
links: {
target: string;
source: string;
}[];
}
interface ConfigPropertiesBackendResponse {
[property: string]: Object;
}
interface JobOutputBackendResponse {
output: string[];
}
interface ViewContributionBackendResponse {
usernames: string[];
}
interface ContributionRightsBackendResponse {
'can_review_questions': boolean,
'can_review_translation_for_language_codes': string[],
'can_review_voiceover_for_language_codes': string[],
'can_submit_questions': boolean
}
interface MemoryCacheProfileBackendResponse {
'peak_allocation': string,
'total_allocation': string,
'total_keys_stored': string
}
interface PendingDeletionRequestBackendResponse {
'number_of_pending_deletion_models': string
}
interface ModelsRelatedToUserBackendResponse {
'related_models_exist': boolean
}
interface SignupEmailContent {
'html_body': string,
'subject': string
}
interface ClassroomPageData {
'name': string,
'topic_ids': string[],
'course_details': string,
'url_fragment': string,
'topic_list_intro': string
}
interface VmidSharedSecretKeyMapping {
'shared_secret_key': string,
'vm_id': string
}
interface ConfigPropertyValues {
'always_ask_learners_for_answer_details': boolean,
'classroom_page_is_accessible': boolean,
'classroom_pages_data': ClassroomPageData,
'classroom_promos_are_enabled': boolean,
'contributor_can_suggest_questions': boolean,
'contributor_dashboard_is_enabled': boolean,
'contributor_dashboard_reviewer_emails_is_enabled': boolean,
'email_footer': string,
'email_sender_name': string,
'enable_admin_notifications_for_reviewer_shortage': boolean,
'featured_translation_languages': string[],
'high_bounce_rate_task_minimum_exploration_starts': number,
'high_bounce_rate_task_state_bounce_rate_creation_threshold': number,
'high_bounce_rate_task_state_bounce_rate_obsoletion_threshold': number,
'is_improvements_tab_enabled': boolean,
'max_number_of_explorations_in_math_svgs_batch': number,
'max_number_of_suggestions_per_reviewer': number,
'max_number_of_svgs_in_math_svgs_batch': number,
'notification_user_ids_for_failed_tasks': string[],
'notify_admins_suggestions_waiting_too_long_is_enabled': boolean,
'oppia_csrf_secret': string,
'promo_bar_enabled': boolean,
'promo_bar_message': string,
'record_playthrough_probability': number,
'signup_email_content': SignupEmailContent,
'unpublish_exploration_email_html_body': string,
'vmid_shared_secret_key_mapping': VmidSharedSecretKeyMapping,
'whitelisted_exploration_ids_for_playthroughs': string[]
}
export interface AdminPageDataBackendDict {
'demo_explorations': string[][];
'demo_collections': string[][];
'demo_exploration_ids': string[];
'one_off_job_status_summaries': JobStatusSummaryBackendDict[];
'human_readable_current_time': string;
'audit_job_status_summaries': JobStatusSummaryBackendDict[];
'updatable_roles': UserRolesBackendResponse;
'role_graph_data': RoleGraphDataBackendResponse;
'config_properties': ConfigPropertiesBackendResponse;
'viewable_roles': UserRolesBackendResponse;
'unfinished_job_data': JobDataBackendDict[];
'recent_job_data': JobDataBackendDict[];
'continuous_computations_data': ComputationDataBackendDict[];
'topic_summaries': TopicSummaryBackendDict[];
'feature_flags': PlatformParameterBackendDict[];
}
export interface AdminPageData {
demoExplorations: string[][];
demoCollections: string[][];
demoExplorationIds: string[];
oneOffJobStatusSummaries: JobStatusSummary[];
humanReadableCurrentTime: string;
auditJobStatusSummaries: JobStatusSummary[];
updatableRoles: UserRolesBackendResponse;
roleGraphData: RoleGraphDataBackendResponse;
configProperties: ConfigPropertiesBackendResponse;
viewableRoles: UserRolesBackendResponse;
unfinishedJobData: Job[];
recentJobData: Job[];
continuousComputationsData: ComputationData[];
topicSummaries: TopicSummary[];
featureFlags: PlatformParameter[];
}
@Injectable({
providedIn: 'root'
})
export class | {
constructor(
private http: HttpClient,
private urlInterpolationService: UrlInterpolationService) {}
async getDataAsync(): Promise<AdminPageData> {
return new Promise((resolve, reject) => {
this.http.get<AdminPageDataBackendDict>(
AdminPageConstants.ADMIN_HANDLER_URL).toPromise().then(response => {
resolve({
demoExplorations: response.demo_explorations,
demoCollections: response.demo_collections,
demoExplorationIds: response.demo_exploration_ids,
oneOffJobStatusSummaries: response.one_off_job_status_summaries.map(
JobStatusSummary.createFromBackendDict),
humanReadableCurrentTime: response.human_readable_current_time,
auditJobStatusSummaries: response.audit_job_status_summaries.map(
JobStatusSummary.createFromBackendDict),
updatableRoles: response.updatable_roles,
roleGraphData: response.role_graph_data,
configProperties: response.config_properties,
viewableRoles: response.viewable_roles,
unfinishedJobData: response.unfinished_job_data.map(
Job.createFromBackendDict),
recentJobData: response.recent_job_data.map(
Job.createFromBackendDict),
continuousComputationsData: response.continuous_computations_data.map(
ComputationData.createFromBackendDict),
topicSummaries: response.topic_summaries.map(
TopicSummary.createFromBackendDict),
featureFlags: response.feature_flags.map(
dict => PlatformParameter.createFromBackendDict(
dict)
)
});
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
private _postRequestAsync(
handlerUrl: string, payload?: Object, action?: string): Promise<void> {
return new Promise((resolve, reject) => {
this.http.post<void>(
handlerUrl, { action, ...payload }).toPromise()
.then(response => {
resolve(response);
}, errorResonse => {
reject(errorResonse.error.error);
});
});
}
// Admin Jobs Tab Services.
async startNewJobAsync(jobType: string): Promise<void> {
let action = 'start_new_job';
let payload = {
job_type: jobType
};
return this._postRequestAsync (
AdminPageConstants.ADMIN_HANDLER_URL, payload, action);
}
async cancelJobAsync(jobId: string, jobType: string): Promise<void> {
let action = 'cancel_job';
let payload = {
job_id: jobId,
job_type: jobType
};
return this._postRequestAsync (
AdminPageConstants.ADMIN_HANDLER_URL, payload, action);
}
async startComputationAsync(computationType: string): Promise<void> {
let action = 'start_computation';
let payload = {
computation_type: computationType
};
return this._postRequestAsync (
AdminPageConstants.ADMIN_HANDLER_URL, payload, action);
}
async stopComputationAsync(computationType: string): Promise<void> {
let action = 'stop_computation';
let payload = {
computation_type: computationType
};
return this._postRequestAsync (
AdminPageConstants.ADMIN_HANDLER_URL, payload, action);
}
async fetchJobOutputAsync(jobId: string): Promise<string[]> {
let adminJobOutputUrl = this.urlInterpolationService.interpolateUrl(
AdminPageConstants.ADMIN_JOB_OUTPUT_URL_TEMPLATE, {
jobId: jobId
});
return new Promise((resolve, reject) => {
this.http.get<JobOutputBackendResponse>(
adminJobOutputUrl).toPromise().then(response => {
resolve(Array.isArray(response.output) ? response.output.sort() : []);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
// Admin Roles Tab Services.
async viewUsersRoleAsync(
filterCriterion: string, role: string, username: string
): Promise<UserRolesBackendResponse> {
return new Promise((resolve, reject) => {
this.http.get<UserRolesBackendResponse>(
AdminPageConstants.ADMIN_ROLE_HANDLER_URL, {
params: {
filter_criterion: filterCriterion,
role: role,
username: username
}
}
).toPromise().then(response => {
resolve(response);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
async updateUserRoleAsync(
newRole: string, username: string, topicId: string
): Promise<void> {
return new Promise((resolve, reject) => {
this.http.post<void>(
AdminPageConstants.ADMIN_ROLE_HANDLER_URL, {
role: newRole,
username: username,
topic_id: topicId
}
).toPromise().then(response => {
resolve(response);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
async addContributionReviewerAsync(
category: string, username: string, languageCode: string
): Promise<void> {
return new Promise((resolve, reject) => {
this.http.post<void>(
AdminPageConstants.ADMIN_ADD_CONTRIBUTION_RIGHTS_HANDLER, {
category: category,
username: username,
language_code: languageCode
}
).toPromise().then(response => {
resolve(response);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
async viewContributionReviewersAsync(
category: string, languageCode: string
): Promise<ViewContributionBackendResponse> {
return new Promise((resolve, reject) => {
this.http.get<ViewContributionBackendResponse>(
AdminPageConstants.ADMIN_GET_CONTRIBUTOR_USERS_HANDLER, {
params: {
category: category,
language_code: languageCode
}
}
).toPromise().then(response => {
resolve(response);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
async contributionReviewerRightsAsync(
username: string): Promise<ContributionRightsBackendResponse> {
return new Promise((resolve, reject) => {
this.http.get<ContributionRightsBackendResponse>(
AdminPageConstants.ADMIN_CONTRIBUTION_RIGHTS_HANDLER, {
params: {
username: username
}
}
).toPromise().then(response => {
resolve(response);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
async removeContributionReviewerAsync(
username: string, method: string,
category: string, languageCode: string
): Promise<void> {
return new Promise((resolve, reject) => {
this.http.put<void>(
AdminPageConstants.ADMIN_REMOVE_CONTRIBUTION_RIGHTS_HANDLER, {
username: username,
removal_type: method,
category: category,
language_code: languageCode
}
).toPromise().then(response => {
resolve(response);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
// Admin Misc Tab Services.
async flushMemoryCacheAsync(): Promise<void> {
return this._postRequestAsync (
AdminPageConstants.ADMIN_MEMORY_CACHE_HANDLER_URL);
}
async clearSearchIndexAsync(): Promise<void> {
return this._postRequestAsync (
AdminPageConstants.ADMIN_HANDLER_URL);
}
async populateExplorationStatsRegenerationCsvResultAsync(
expIdToRegenerate: string): Promise<void> {
let action = 'regenerate_missing_exploration_stats';
let payload = {
exp_id: expIdToRegenerate
};
return this._postRequestAsync (
AdminPageConstants.ADMIN_HANDLER_URL, payload, action);
}
async regenerateOpportunitiesRelatedToTopicAsync(
topicId: string): Promise<void> {
let action = 'regenerate_topic_related_opportunities';
let payload = {
topic_id: topicId
};
return this._postRequestAsync (
AdminPageConstants.ADMIN_HANDLER_URL, payload, action);
}
async uploadTopicSimilaritiesAsync(data: string): Promise<void> {
let action = 'upload_topic_similarities';
let payload = {
data: data
};
return this._postRequestAsync (
AdminPageConstants.ADMIN_HANDLER_URL, payload, action);
}
async sendDummyMailToAdminAsync(): Promise<void> {
return this._postRequestAsync (
AdminPageConstants.ADMIN_SEND_DUMMY_MAIL_HANDLER_URL);
}
async getMemoryCacheProfileAsync(
): Promise<MemoryCacheProfileBackendResponse> {
return new Promise((resolve, reject) => {
this.http.get<MemoryCacheProfileBackendResponse>(
AdminPageConstants.ADMIN_MEMORY_CACHE_HANDLER_URL, {}
).toPromise().then(response => {
resolve(response);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
async updateUserNameAsync(
oldUsername: string, newUsername: string): Promise<void> {
return new Promise((resolve, reject) => {
this.http.put<void>(
AdminPageConstants.ADMIN_UPDATE_USERNAME_HANDLER_URL, {
old_username: oldUsername,
new_username: newUsername
}
).toPromise().then(response => {
resolve(response);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
async getNumberOfPendingDeletionRequestAsync(
): Promise<PendingDeletionRequestBackendResponse> {
return new Promise((resolve, reject) => {
this.http.get<PendingDeletionRequestBackendResponse>(
AdminPageConstants.ADMIN_NUMBER_OF_DELETION_REQUEST_HANDLER_URL, {}
).toPromise().then(response => {
resolve(response);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
async grantSuperAdminPrivilegesAsync(username: string): Promise<void> {
return this.http.get<void>(
AdminPageConstants.ADMIN_GRANT_SUPER_ADMIN_PRIVILEGES_HANDLER_URL, {
params: {username: username},
}
).toPromise();
}
async revokeSuperAdminPrivilegesAsync(username: string): Promise<void> {
return this.http.get<void>(
AdminPageConstants.ADMIN_REVOKE_SUPER_ADMIN_PRIVILEGES_HANDLER_URL, {
params: {username: username},
}
).toPromise();
}
async getModelsRelatedToUserAsync(userId: string): Promise<boolean> {
return new Promise((resolve, reject) => {
this.http.get<ModelsRelatedToUserBackendResponse>(
AdminPageConstants.ADMIN_VERIFY_USER_MODELS_DELETED_HANDLER_URL, {
params: {
user_id: userId
}
}
).toPromise().then(response => {
resolve(response.related_models_exist);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
async deleteUserAsync(userId: string, username: string): Promise<void> {
return new Promise((resolve, reject) => {
// eslint-disable-next-line dot-notation
this.http.delete<void>(
AdminPageConstants.ADMIN_DELETE_USER_HANDLER_URL, {
params: {
user_id: userId,
username: username
}
}
).toPromise().then(response => {
resolve(response);
}, errorResponse => {
reject(errorResponse.error.error);
});
});
}
// Admin Config Tab Services.
async revertConfigPropertyAsync(configPropertyId: string): Promise<void> {
let action = 'revert_config_property';
let payload = {
config_property_id: configPropertyId
};
return this._postRequestAsync (
AdminPageConstants.ADMIN_HANDLER_URL, payload, action);
}
async saveConfigPropertiesAsync(
newConfigPropertyValues: ConfigPropertyValues): Promise<void> {
let action = 'save_config_properties';
let payload = {
new_config_property_values: newConfigPropertyValues
};
return this._postRequestAsync (
AdminPageConstants.ADMIN_HANDLER_URL, payload, action);
}
}
angular.module('oppia').factory(
'AdminBackendApiService',
downgradeInjectable(AdminBackendApiService));
| AdminBackendApiService |
chunk_executor.rs | // Copyright (c) Aptos
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use crate::{
components::{
apply_chunk_output::{ensure_no_discard, ensure_no_retry},
chunk_commit_queue::ChunkCommitQueue,
chunk_output::ChunkOutput,
},
logging::{LogEntry, LogSchema},
metrics::{
DIEM_EXECUTOR_APPLY_CHUNK_SECONDS, DIEM_EXECUTOR_COMMIT_CHUNK_SECONDS,
DIEM_EXECUTOR_EXECUTE_CHUNK_SECONDS, DIEM_EXECUTOR_VM_EXECUTE_CHUNK_SECONDS,
},
};
use anyhow::Result;
use aptos_infallible::Mutex;
use aptos_logger::prelude::*;
use aptos_state_view::StateViewId;
use aptos_types::{
contract_event::ContractEvent,
ledger_info::LedgerInfoWithSignatures,
transaction::{
Transaction, TransactionInfo, TransactionListWithProof, TransactionOutputListWithProof,
},
};
use aptos_vm::VMExecutor;
use executor_types::{ChunkExecutorTrait, ExecutedChunk, ExecutedTrees, TransactionReplayer};
use fail::fail_point;
use std::{marker::PhantomData, sync::Arc};
use storage_interface::{state_view::VerifiedStateView, DbReaderWriter};
pub struct ChunkExecutor<V> {
db: DbReaderWriter,
commit_queue: Mutex<ChunkCommitQueue>,
_phantom: PhantomData<V>,
}
impl<V> ChunkExecutor<V> {
pub fn new(db: DbReaderWriter) -> Result<Self> {
let commit_queue = Mutex::new(ChunkCommitQueue::new_from_db(&db.reader)?);
Ok(Self {
db,
commit_queue,
_phantom: PhantomData,
})
}
pub fn new_with_view(db: DbReaderWriter, persisted_view: ExecutedTrees) -> Self {
let commit_queue = Mutex::new(ChunkCommitQueue::new(persisted_view));
Self {
db,
commit_queue,
_phantom: PhantomData,
}
}
pub fn reset(&self) -> Result<()> {
*self.commit_queue.lock() = ChunkCommitQueue::new_from_db(&self.db.reader)?;
Ok(())
}
fn state_view(
&self,
latest_view: &ExecutedTrees,
persisted_view: &ExecutedTrees,
) -> VerifiedStateView {
latest_view.state_view(
persisted_view,
StateViewId::ChunkExecution {
first_version: latest_view.txn_accumulator().num_leaves(),
},
Arc::clone(&self.db.reader),
)
}
fn apply_chunk_output_for_state_sync(
verified_target_li: &LedgerInfoWithSignatures,
epoch_change_li: Option<&LedgerInfoWithSignatures>,
latest_view: &ExecutedTrees,
chunk_output: ChunkOutput,
transaction_infos: &[TransactionInfo],
) -> Result<ExecutedChunk> {
let (mut executed_chunk, to_discard, to_retry) =
chunk_output.apply_to_ledger(latest_view.txn_accumulator())?;
ensure_no_discard(to_discard)?;
ensure_no_retry(to_retry)?;
executed_chunk.ledger_info = executed_chunk
.maybe_select_chunk_ending_ledger_info(verified_target_li, epoch_change_li)?;
executed_chunk.ensure_transaction_infos_match(transaction_infos)?;
Ok(executed_chunk)
}
fn commit_chunk_impl(&self) -> Result<Arc<ExecutedChunk>> {
let (base_view, to_commit) = self.commit_queue.lock().next_chunk_to_commit()?;
let txns_to_commit = to_commit.transactions_to_commit()?;
let ledger_info = to_commit.ledger_info.as_ref();
if ledger_info.is_some() || !txns_to_commit.is_empty() {
fail_point!("executor::commit_chunk", |_| {
Err(anyhow::anyhow!("Injected error in commit_chunk"))
});
self.db.writer.save_transactions(
&txns_to_commit,
base_view.txn_accumulator().num_leaves(),
ledger_info,
)?;
}
self.commit_queue.lock().dequeue()?;
Ok(to_commit)
}
}
impl<V: VMExecutor> ChunkExecutorTrait for ChunkExecutor<V> {
fn execute_chunk(
&self,
txn_list_with_proof: TransactionListWithProof,
verified_target_li: &LedgerInfoWithSignatures,
epoch_change_li: Option<&LedgerInfoWithSignatures>,
) -> Result<()> {
let _timer = DIEM_EXECUTOR_EXECUTE_CHUNK_SECONDS.start_timer();
let num_txns = txn_list_with_proof.transactions.len();
let first_version_in_request = txn_list_with_proof.first_transaction_version;
let (persisted_view, latest_view) = self.commit_queue.lock().persisted_and_latest_view();
// Verify input transaction list.
txn_list_with_proof.verify(verified_target_li.ledger_info(), first_version_in_request)?;
// Skip transactions already in ledger.
let txns_to_skip = txn_list_with_proof.proof.verify_extends_ledger(
latest_view.txn_accumulator().num_leaves(),
latest_view.txn_accumulator().root_hash(),
first_version_in_request,
)?;
let mut transactions = txn_list_with_proof.transactions;
transactions.drain(..txns_to_skip as usize);
if txns_to_skip == num_txns {
info!(
"Skipping all transactions in the given chunk! Num transactions: {:?}",
num_txns
);
}
// Execute transactions.
let state_view = self.state_view(&latest_view, &persisted_view);
let chunk_output = {
let _timer = DIEM_EXECUTOR_VM_EXECUTE_CHUNK_SECONDS.start_timer();
ChunkOutput::by_transaction_execution::<V>(transactions, state_view)?
};
let executed_chunk = Self::apply_chunk_output_for_state_sync(
verified_target_li,
epoch_change_li,
&latest_view,
chunk_output,
&txn_list_with_proof.proof.transaction_infos[txns_to_skip..],
)?;
// Add result to commit queue.
self.commit_queue.lock().enqueue(executed_chunk);
info!(
LogSchema::new(LogEntry::ChunkExecutor)
.local_synced_version(latest_view.version().unwrap_or(0))
.first_version_in_request(first_version_in_request)
.num_txns_in_request(num_txns),
"sync_request_executed",
);
Ok(())
}
fn apply_chunk(
&self,
txn_output_list_with_proof: TransactionOutputListWithProof,
verified_target_li: &LedgerInfoWithSignatures,
epoch_change_li: Option<&LedgerInfoWithSignatures>,
) -> Result<()> {
let _timer = DIEM_EXECUTOR_APPLY_CHUNK_SECONDS.start_timer();
let num_txns = txn_output_list_with_proof.transactions_and_outputs.len();
let first_version_in_request = txn_output_list_with_proof.first_transaction_output_version;
let (persisted_view, latest_view) = self.commit_queue.lock().persisted_and_latest_view();
// Verify input transaction list.
txn_output_list_with_proof
.verify(verified_target_li.ledger_info(), first_version_in_request)?;
// Skip transactions already in ledger.
let txns_to_skip = txn_output_list_with_proof.proof.verify_extends_ledger(
latest_view.txn_accumulator().num_leaves(),
latest_view.txn_accumulator().root_hash(),
first_version_in_request,
)?;
let mut txns_and_outputs = txn_output_list_with_proof.transactions_and_outputs;
txns_and_outputs.drain(..txns_to_skip as usize);
// Apply transaction outputs.
let state_view = self.state_view(&latest_view, &persisted_view);
let chunk_output = ChunkOutput::by_transaction_output(txns_and_outputs, state_view)?;
let executed_chunk = Self::apply_chunk_output_for_state_sync(
verified_target_li,
epoch_change_li,
&latest_view,
chunk_output,
&txn_output_list_with_proof.proof.transaction_infos[txns_to_skip..],
)?;
// Add result to commit queue.
self.commit_queue.lock().enqueue(executed_chunk);
info!(
LogSchema::new(LogEntry::ChunkExecutor)
.local_synced_version(latest_view.version().unwrap_or(0))
.first_version_in_request(first_version_in_request)
.num_txns_in_request(num_txns),
"sync_request_applied",
);
Ok(())
}
fn commit_chunk(&self) -> Result<(Vec<ContractEvent>, Vec<Transaction>)> {
let _timer = DIEM_EXECUTOR_COMMIT_CHUNK_SECONDS.start_timer();
let executed_chunk = self.commit_chunk_impl()?;
Ok((
executed_chunk.events_to_commit(),
executed_chunk.transactions(),
))
}
fn execute_and_commit_chunk(
&self,
txn_list_with_proof: TransactionListWithProof,
verified_target_li: &LedgerInfoWithSignatures,
epoch_change_li: Option<&LedgerInfoWithSignatures>,
) -> Result<(Vec<ContractEvent>, Vec<Transaction>)> {
// Re-sync with DB, make sure the queue is empty.
self.reset()?;
self.execute_chunk(txn_list_with_proof, verified_target_li, epoch_change_li)?;
self.commit_chunk()
}
fn apply_and_commit_chunk(
&self,
txn_output_list_with_proof: TransactionOutputListWithProof,
verified_target_li: &LedgerInfoWithSignatures,
epoch_change_li: Option<&LedgerInfoWithSignatures>,
) -> Result<(Vec<ContractEvent>, Vec<Transaction>)> {
// Re-sync with DB, make sure the queue is empty.
self.reset()?;
self.apply_chunk(
txn_output_list_with_proof,
verified_target_li,
epoch_change_li,
)?;
self.commit_chunk()
}
}
impl<V: VMExecutor> ChunkExecutor<V> {}
impl<V: VMExecutor> TransactionReplayer for ChunkExecutor<V> {
fn replay(
&self,
transactions: Vec<Transaction>,
mut transaction_infos: Vec<TransactionInfo>,
) -> Result<()> {
let (persisted_view, mut latest_view) =
self.commit_queue.lock().persisted_and_latest_view();
let mut executed_chunk = ExecutedChunk::default();
let mut to_run = Some(transactions);
while !to_run.as_ref().unwrap().is_empty() {
// Execute transactions.
let state_view = self.state_view(&latest_view, &persisted_view);
let txns = to_run.take().unwrap();
let (executed, to_discard, to_retry) =
ChunkOutput::by_transaction_execution::<V>(txns, state_view)?
.apply_to_ledger(latest_view.txn_accumulator())?;
// Accumulate result and deal with retry | executed.ensure_transaction_infos_match(&transaction_infos[..n])?;
transaction_infos.drain(..n);
to_run = Some(to_retry);
executed_chunk = executed_chunk.combine(executed)?;
latest_view = executed_chunk.result_view.clone();
}
// Add result to commit queue.
self.commit_queue.lock().enqueue(executed_chunk);
Ok(())
}
fn commit(&self) -> Result<Arc<ExecutedChunk>> {
self.commit_chunk_impl()
}
} | ensure_no_discard(to_discard)?;
let n = executed.to_commit.len(); |
mod.rs | //! Prediction agents module. | pub trait ValuePredictor<S> {
/// Compute the estimated value of V(s).
fn predict_v(&self, s: &S) -> f64;
}
impl<S, T: ValuePredictor<S>> ValuePredictor<S> for Shared<T> {
fn predict_v(&self, s: &S) -> f64 { self.borrow().predict_v(s) }
}
pub trait ActionValuePredictor<S, A> {
/// Compute the estimated value of Q(s, a).
fn predict_q(&self, s: &S, a: &A) -> f64;
}
impl<S, A, T: ActionValuePredictor<S, A>> ActionValuePredictor<S, A> for Shared<T> {
fn predict_q(&self, s: &S, a: &A) -> f64 { self.borrow().predict_q(s, a) }
}
pub mod gtd;
pub mod lstd;
pub mod mc;
pub mod td;
// TODO:
// Implement the algorithms discussed in https://arxiv.org/pdf/1304.3999.pdf | use crate::Shared;
|
vstream_manager.go | /*
Copyright 2019 The Vitess Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package vtgate
import (
"context"
"fmt"
"io"
"sync"
"time"
"vitess.io/vitess/go/vt/discovery"
querypb "vitess.io/vitess/go/vt/proto/query"
"vitess.io/vitess/go/vt/topo"
vtgatepb "vitess.io/vitess/go/vt/proto/vtgate"
"google.golang.org/protobuf/proto"
"vitess.io/vitess/go/vt/log"
binlogdatapb "vitess.io/vitess/go/vt/proto/binlogdata"
topodatapb "vitess.io/vitess/go/vt/proto/topodata"
vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc"
"vitess.io/vitess/go/vt/srvtopo"
"vitess.io/vitess/go/vt/vterrors"
)
// vstreamManager manages vstream requests.
type vstreamManager struct {
resolver *srvtopo.Resolver
toposerv srvtopo.Server
cell string
}
// vstream contains the metadata for one VStream request.
type vstream struct {
// mu protects parts of vgtid, the semantics of a send, and journaler.
// Once streaming begins, the Gtid within each ShardGtid will be updated on each event.
// Also, the list of ShardGtids can change on a journaling event.
// All other parts of vgtid can be read without a lock.
// The lock is also held to ensure that all grouped events are sent together.
// This can happen if vstreamer breaks up large transactions into smaller chunks.
mu sync.Mutex
vgtid *binlogdatapb.VGtid
send func(events []*binlogdatapb.VEvent) error
journaler map[int64]*journalEvent
// err can only be set once.
// errMu protects err by ensuring its value is read or written by only one goroutine at a time.
once sync.Once
err error
errMu sync.Mutex
// Other input parameters
tabletType topodatapb.TabletType
filter *binlogdatapb.Filter
resolver *srvtopo.Resolver
cancel context.CancelFunc
wg sync.WaitGroup
// this flag is set by the client, default false
// if true skew detection is enabled and we align the streams so that they receive events from
// about the same time as each other. Note that there is no exact ordering of events across shards
minimizeSkew bool
// this flag is set by the client, default false
// if true when a reshard is detected the client will send the corresponding journal event to the client
// default behavior is to automatically migrate the resharded streams from the old to the new shards
stopOnReshard bool
// mutex used to synchronize access to skew detection parameters
skewMu sync.Mutex
// channel is created whenever there is a skew detected. closing it implies the current skew has been fixed
skewCh chan bool
// if a skew lasts for this long, we timeout the vstream call. currently hardcoded
skewTimeoutSeconds int64
// the slow streamId which is causing the skew. streamId is of the form <keyspace>.<shard>
laggard string
// transaction timestamp of the slowest stream
lowestTS int64
// the timestamp of the most recent event, keyed by streamId. streamId is of the form <keyspace>.<shard>
timestamps map[string]int64
vsm *vstreamManager
rss []*srvtopo.ResolvedShard
eventCh chan []*binlogdatapb.VEvent
heartbeatInterval uint32
ts *topo.Server
}
type journalEvent struct {
journal *binlogdatapb.Journal
participants map[*binlogdatapb.ShardGtid]bool
done chan struct{}
}
func newVStreamManager(resolver *srvtopo.Resolver, serv srvtopo.Server, cell string) *vstreamManager |
func (vsm *vstreamManager) VStream(ctx context.Context, tabletType topodatapb.TabletType, vgtid *binlogdatapb.VGtid,
filter *binlogdatapb.Filter, flags *vtgatepb.VStreamFlags, send func(events []*binlogdatapb.VEvent) error) error {
vgtid, filter, flags, err := vsm.resolveParams(ctx, tabletType, vgtid, filter, flags)
if err != nil {
return err
}
ts, err := vsm.toposerv.GetTopoServer()
if err != nil {
return err
}
if ts == nil {
log.Errorf("unable to get topo server in VStream()")
return fmt.Errorf("unable to get topo server")
}
vs := &vstream{
vgtid: vgtid,
tabletType: tabletType,
filter: filter,
send: send,
resolver: vsm.resolver,
journaler: make(map[int64]*journalEvent),
minimizeSkew: flags.GetMinimizeSkew(),
stopOnReshard: flags.GetStopOnReshard(),
skewTimeoutSeconds: 10 * 60,
timestamps: make(map[string]int64),
vsm: vsm,
eventCh: make(chan []*binlogdatapb.VEvent),
heartbeatInterval: flags.GetHeartbeatInterval(),
ts: ts,
}
return vs.stream(ctx)
}
// resolveParams provides defaults for the inputs if they're not specified.
func (vsm *vstreamManager) resolveParams(ctx context.Context, tabletType topodatapb.TabletType, vgtid *binlogdatapb.VGtid,
filter *binlogdatapb.Filter, flags *vtgatepb.VStreamFlags) (*binlogdatapb.VGtid, *binlogdatapb.Filter, *vtgatepb.VStreamFlags, error) {
if filter == nil {
filter = &binlogdatapb.Filter{
Rules: []*binlogdatapb.Rule{{
Match: "/.*",
}},
}
}
if flags == nil {
flags = &vtgatepb.VStreamFlags{}
}
if vgtid == nil || len(vgtid.ShardGtids) == 0 {
return nil, nil, nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "vgtid must have at least one value with a starting position")
}
// To fetch from all keyspaces, the input must contain a single ShardGtid
// that has an empty keyspace, and the Gtid must be "current". In the
// future, we'll allow the Gtid to be empty which will also support
// copying of existing data.
if len(vgtid.ShardGtids) == 1 && vgtid.ShardGtids[0].Keyspace == "" {
if vgtid.ShardGtids[0].Gtid != "current" {
return nil, nil, nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "for an empty keyspace, the Gtid value must be 'current': %v", vgtid)
}
keyspaces, err := vsm.toposerv.GetSrvKeyspaceNames(ctx, vsm.cell, false)
if err != nil {
return nil, nil, nil, err
}
newvgtid := &binlogdatapb.VGtid{}
for _, keyspace := range keyspaces {
newvgtid.ShardGtids = append(newvgtid.ShardGtids, &binlogdatapb.ShardGtid{
Keyspace: keyspace,
Gtid: "current",
})
}
vgtid = newvgtid
}
newvgtid := &binlogdatapb.VGtid{}
for _, sgtid := range vgtid.ShardGtids {
if sgtid.Shard == "" {
if sgtid.Gtid != "current" {
return nil, nil, nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "if shards are unspecified, the Gtid value must be 'current': %v", vgtid)
}
// TODO(sougou): this should work with the new Migrate workflow
_, _, allShards, err := vsm.resolver.GetKeyspaceShards(ctx, sgtid.Keyspace, tabletType)
if err != nil {
return nil, nil, nil, err
}
for _, shard := range allShards {
newvgtid.ShardGtids = append(newvgtid.ShardGtids, &binlogdatapb.ShardGtid{
Keyspace: sgtid.Keyspace,
Shard: shard.Name,
Gtid: sgtid.Gtid,
})
}
} else {
newvgtid.ShardGtids = append(newvgtid.ShardGtids, sgtid)
}
}
//TODO add tablepk validations
return newvgtid, filter, flags, nil
}
func (vsm *vstreamManager) RecordStreamDelay() {
vstreamSkewDelayCount.Add(1)
}
func (vsm *vstreamManager) GetTotalStreamDelay() int64 {
return vstreamSkewDelayCount.Get()
}
func (vs *vstream) stream(ctx context.Context) error {
ctx, vs.cancel = context.WithCancel(ctx)
defer vs.cancel()
go vs.sendEvents(ctx)
// Make a copy first, because the ShardGtids list can change once streaming starts.
copylist := append(([]*binlogdatapb.ShardGtid)(nil), vs.vgtid.ShardGtids...)
for _, sgtid := range copylist {
vs.startOneStream(ctx, sgtid)
}
vs.wg.Wait()
return vs.getError()
}
func (vs *vstream) sendEvents(ctx context.Context) {
var heartbeat <-chan time.Time
var resetHeartbeat func()
if vs.heartbeatInterval == 0 {
heartbeat = make(chan time.Time)
resetHeartbeat = func() {}
} else {
d := time.Duration(vs.heartbeatInterval) * time.Second
timer := time.NewTicker(d)
defer timer.Stop()
heartbeat = timer.C
resetHeartbeat = func() { timer.Reset(d) }
}
send := func(evs []*binlogdatapb.VEvent) error {
if err := vs.send(evs); err != nil {
vs.once.Do(func() {
vs.setError(err)
})
return err
}
return nil
}
for {
select {
case <-ctx.Done():
vs.once.Do(func() {
vs.setError(fmt.Errorf("context canceled"))
})
return
case evs := <-vs.eventCh:
if err := send(evs); err != nil {
vs.once.Do(func() {
vs.setError(err)
})
return
}
resetHeartbeat()
case t := <-heartbeat:
now := t.UnixNano()
evs := []*binlogdatapb.VEvent{{
Type: binlogdatapb.VEventType_HEARTBEAT,
Timestamp: now / 1e9,
CurrentTime: now,
}}
if err := send(evs); err != nil {
vs.once.Do(func() {
vs.setError(err)
})
return
}
}
}
}
// startOneStream sets up one shard stream.
func (vs *vstream) startOneStream(ctx context.Context, sgtid *binlogdatapb.ShardGtid) {
vs.wg.Add(1)
go func() {
defer vs.wg.Done()
err := vs.streamFromTablet(ctx, sgtid)
// Set the error on exit. First one wins.
if err != nil {
log.Errorf("Error in vstream for %+v: %s", sgtid, err)
vs.once.Do(func() {
vs.setError(err)
vs.cancel()
})
}
}()
}
// MaxSkew is the threshold for a skew to be detected. Since MySQL timestamps are in seconds we account for
// two round-offs: one for the actual event and another while accounting for the clock skew
const MaxSkew = int64(2)
// computeSkew sets the timestamp of the current event for the calling stream, accounts for a clock skew
// and declares that a skew has arisen if the streams are too far apart
func (vs *vstream) computeSkew(streamID string, event *binlogdatapb.VEvent) bool {
vs.skewMu.Lock()
defer vs.skewMu.Unlock()
// account for skew between this vtgate and the source mysql server
secondsInThePast := event.CurrentTime/1e9 - event.Timestamp
vs.timestamps[streamID] = time.Now().Unix() - secondsInThePast
var minTs, maxTs int64
var laggardStream string
if len(vs.timestamps) <= 1 {
return false
}
for k, ts := range vs.timestamps {
if ts < minTs || minTs == 0 {
minTs = ts
laggardStream = k
}
if ts > maxTs {
maxTs = ts
}
}
if vs.laggard != "" { // we are skewed, check if this event has fixed the skew
if (maxTs - minTs) <= MaxSkew {
vs.laggard = ""
close(vs.skewCh)
}
} else {
if (maxTs - minTs) > MaxSkew { // check if we are skewed due to this event
log.Infof("Skew found, laggard is %s, %+v", laggardStream, vs.timestamps)
vs.laggard = laggardStream
vs.skewCh = make(chan bool)
}
}
return vs.mustPause(streamID)
}
// mustPause returns true if a skew exists and the stream calling this is not the slowest one
func (vs *vstream) mustPause(streamID string) bool {
switch vs.laggard {
case "":
return false
case streamID:
// current stream is the laggard, not pausing
return false
}
if (vs.timestamps[streamID] - vs.lowestTS) <= MaxSkew {
// current stream is not the laggard, but the skew is still within the limit
return false
}
vs.vsm.RecordStreamDelay()
return true
}
// alignStreams is called by each individual shard's stream before an event is sent to the client or after each heartbeat.
// It checks for skew (if the minimizeSkew option is set). If skew is present this stream is delayed until the skew is fixed
// The faster stream detects the skew and waits. The slower stream resets the skew when it catches up.
func (vs *vstream) alignStreams(ctx context.Context, event *binlogdatapb.VEvent, keyspace, shard string) error {
if !vs.minimizeSkew || event.Timestamp == 0 {
return nil
}
streamID := fmt.Sprintf("%s/%s", keyspace, shard)
for {
mustPause := vs.computeSkew(streamID, event)
if event.Type == binlogdatapb.VEventType_HEARTBEAT {
return nil
}
if !mustPause {
return nil
}
select {
case <-ctx.Done():
return ctx.Err()
case <-time.After(time.Duration(vs.skewTimeoutSeconds) * time.Second):
log.Errorf("timed out while waiting for skew to reduce: %s", streamID)
return fmt.Errorf("timed out while waiting for skew to reduce: %s", streamID)
case <-vs.skewCh:
// once skew is fixed the channel is closed and all waiting streams "wake up"
}
}
}
// streamFromTablet streams from one shard. If transactions come in separate chunks, they are grouped and sent.
func (vs *vstream) streamFromTablet(ctx context.Context, sgtid *binlogdatapb.ShardGtid) error {
// journalDone is assigned a channel when a journal event is encountered.
// It will be closed when all journal events converge.
var journalDone chan struct{}
errCount := 0
for {
select {
case <-ctx.Done():
return ctx.Err()
case <-journalDone:
// Unreachable.
// This can happen if a server misbehaves and does not end
// the stream after we return an error.
return nil
default:
}
var eventss [][]*binlogdatapb.VEvent
var err error
tp, err := discovery.NewTabletPicker(vs.ts, []string{vs.vsm.cell}, sgtid.Keyspace, sgtid.Shard, vs.tabletType.String())
if err != nil {
log.Errorf(err.Error())
return err
}
tablet, err := tp.PickForStreaming(ctx)
if err != nil {
log.Errorf(err.Error())
return err
}
log.Infof("Picked tablet %s for for %s/%s/%s/%s", tablet.Alias.String(), vs.vsm.cell, sgtid.Keyspace, sgtid.Shard, vs.tabletType.String())
target := &querypb.Target{
Keyspace: sgtid.Keyspace,
Shard: sgtid.Shard,
TabletType: vs.tabletType,
Cell: vs.vsm.cell,
}
tabletConn, err := vs.vsm.resolver.GetGateway().QueryServiceByAlias(tablet.Alias, target)
if err != nil {
log.Errorf(err.Error())
return err
}
errCh := make(chan error, 1)
go func() {
_ = tabletConn.StreamHealth(ctx, func(shr *querypb.StreamHealthResponse) error {
var err error
if ctx.Err() != nil {
err = fmt.Errorf("context has ended")
} else if shr == nil || shr.RealtimeStats == nil || shr.Target == nil {
err = fmt.Errorf("health check failed")
} else if vs.tabletType != shr.Target.TabletType {
err = fmt.Errorf("tablet type has changed from %s to %s, restarting vstream",
vs.tabletType, shr.Target.TabletType)
} else if shr.RealtimeStats.HealthError != "" {
err = fmt.Errorf("tablet %s is no longer healthy: %s, restarting vstream",
tablet.Alias, shr.RealtimeStats.HealthError)
}
if err != nil {
errCh <- err
}
return nil
})
}()
log.Infof("Starting to vstream from %s", tablet.Alias.String())
// Safe to access sgtid.Gtid here (because it can't change until streaming begins).
err = tabletConn.VStream(ctx, target, sgtid.Gtid, sgtid.TablePKs, vs.filter, func(events []*binlogdatapb.VEvent) error {
// We received a valid event. Reset error count.
errCount = 0
select {
case <-ctx.Done():
return ctx.Err()
case streamErr := <-errCh:
log.Warningf("Tablet state changed: %s, attempting to restart", streamErr)
return vterrors.New(vtrpcpb.Code_UNAVAILABLE, streamErr.Error())
case <-journalDone:
// Unreachable.
// This can happen if a server misbehaves and does not end
// the stream after we return an error.
return io.EOF
default:
}
sendevents := make([]*binlogdatapb.VEvent, 0, len(events))
for _, event := range events {
switch event.Type {
case binlogdatapb.VEventType_FIELD:
// Update table names and send.
// If we're streaming from multiple keyspaces, this will disambiguate
// duplicate table names.
ev := proto.Clone(event).(*binlogdatapb.VEvent)
ev.FieldEvent.TableName = sgtid.Keyspace + "." + ev.FieldEvent.TableName
sendevents = append(sendevents, ev)
case binlogdatapb.VEventType_ROW:
// Update table names and send.
ev := proto.Clone(event).(*binlogdatapb.VEvent)
ev.RowEvent.TableName = sgtid.Keyspace + "." + ev.RowEvent.TableName
sendevents = append(sendevents, ev)
case binlogdatapb.VEventType_COMMIT, binlogdatapb.VEventType_DDL, binlogdatapb.VEventType_OTHER:
sendevents = append(sendevents, event)
eventss = append(eventss, sendevents)
if err := vs.alignStreams(ctx, event, sgtid.Keyspace, sgtid.Shard); err != nil {
return err
}
if err := vs.sendAll(sgtid, eventss); err != nil {
return err
}
eventss = nil
sendevents = nil
case binlogdatapb.VEventType_HEARTBEAT:
// Remove all heartbeat events for now.
// Otherwise they can accumulate indefinitely if there are no real events.
// TODO(sougou): figure out a model for this.
if err := vs.alignStreams(ctx, event, sgtid.Keyspace, sgtid.Shard); err != nil {
return err
}
case binlogdatapb.VEventType_JOURNAL:
journal := event.Journal
// Journal events are not sent to clients by default, but only when StopOnReshard is set
if vs.stopOnReshard && journal.MigrationType == binlogdatapb.MigrationType_SHARDS {
sendevents = append(sendevents, event)
eventss = append(eventss, sendevents)
if err := vs.sendAll(sgtid, eventss); err != nil {
return err
}
eventss = nil
sendevents = nil
}
je, err := vs.getJournalEvent(ctx, sgtid, journal)
if err != nil {
return err
}
if je != nil {
// Wait till all other participants converge and return EOF.
journalDone = je.done
select {
case <-ctx.Done():
return ctx.Err()
case <-journalDone:
return io.EOF
}
}
default:
sendevents = append(sendevents, event)
}
}
if len(sendevents) != 0 {
eventss = append(eventss, sendevents)
}
return nil
})
// If stream was ended (by a journal event), return nil without checking for error.
select {
case <-journalDone:
return nil
default:
}
if err == nil {
// Unreachable.
err = vterrors.Errorf(vtrpcpb.Code_UNKNOWN, "vstream ended unexpectedly")
}
if vterrors.Code(err) != vtrpcpb.Code_FAILED_PRECONDITION && vterrors.Code(err) != vtrpcpb.Code_UNAVAILABLE {
log.Errorf("vstream for %s/%s error: %v", sgtid.Keyspace, sgtid.Shard, err)
return err
}
errCount++
if errCount >= 3 {
log.Errorf("vstream for %s/%s had three consecutive failures: %v", sgtid.Keyspace, sgtid.Shard, err)
return err
}
log.Infof("vstream for %s/%s error, retrying: %v", sgtid.Keyspace, sgtid.Shard, err)
}
}
// sendAll sends a group of events together while holding the lock.
func (vs *vstream) sendAll(sgtid *binlogdatapb.ShardGtid, eventss [][]*binlogdatapb.VEvent) error {
vs.mu.Lock()
defer vs.mu.Unlock()
// Send all chunks while holding the lock.
for _, events := range eventss {
if err := vs.getError(); err != nil {
return err
}
// convert all gtids to vgtids. This should be done here while holding the lock.
for j, event := range events {
if event.Type == binlogdatapb.VEventType_GTID {
// Update the VGtid and send that instead.
sgtid.Gtid = event.Gtid
events[j] = &binlogdatapb.VEvent{
Type: binlogdatapb.VEventType_VGTID,
Vgtid: proto.Clone(vs.vgtid).(*binlogdatapb.VGtid),
Keyspace: event.Keyspace,
Shard: event.Shard,
}
} else if event.Type == binlogdatapb.VEventType_LASTPK {
var foundIndex = -1
eventTablePK := event.LastPKEvent.TableLastPK
for idx, pk := range sgtid.TablePKs {
if pk.TableName == eventTablePK.TableName {
foundIndex = idx
break
}
}
if foundIndex == -1 {
if !event.LastPKEvent.Completed {
sgtid.TablePKs = append(sgtid.TablePKs, eventTablePK)
}
} else {
if event.LastPKEvent.Completed {
// remove tablepk from sgtid
sgtid.TablePKs[foundIndex] = sgtid.TablePKs[len(sgtid.TablePKs)-1]
sgtid.TablePKs[len(sgtid.TablePKs)-1] = nil
sgtid.TablePKs = sgtid.TablePKs[:len(sgtid.TablePKs)-1]
} else {
sgtid.TablePKs[foundIndex] = eventTablePK
}
}
events[j] = &binlogdatapb.VEvent{
Type: binlogdatapb.VEventType_VGTID,
Vgtid: proto.Clone(vs.vgtid).(*binlogdatapb.VGtid),
Keyspace: event.Keyspace,
Shard: event.Shard,
}
}
}
vs.eventCh <- events
}
return nil
}
func (vs *vstream) getError() error {
vs.errMu.Lock()
defer vs.errMu.Unlock()
return vs.err
}
func (vs *vstream) setError(err error) {
vs.errMu.Lock()
defer vs.errMu.Unlock()
vs.err = err
}
// getJournalEvent returns a journalEvent. The caller has to wait on its done channel.
// Once it closes, the caller has to return (end their stream).
// The function has three parts:
// Part 1: For the first stream that encounters an event, it creates a journal event.
// Part 2: Every stream joins the journalEvent. If all have not joined, the journalEvent
// is returned to the caller.
// Part 3: If all streams have joined, then new streams are created to replace existing
// streams, the done channel is closed and returned. This section is executed exactly
// once after the last stream joins.
func (vs *vstream) getJournalEvent(ctx context.Context, sgtid *binlogdatapb.ShardGtid, journal *binlogdatapb.Journal) (*journalEvent, error) {
if journal.MigrationType == binlogdatapb.MigrationType_TABLES {
// We cannot support table migrations yet because there is no
// good model for it yet. For example, what if a table is migrated
// out of the current keyspace we're streaming from.
return nil, nil
}
vs.mu.Lock()
defer vs.mu.Unlock()
je, ok := vs.journaler[journal.Id]
if !ok {
log.Infof("Journal event received: %v", journal)
// Identify the list of ShardGtids that match the participants of the journal.
je = &journalEvent{
journal: journal,
participants: make(map[*binlogdatapb.ShardGtid]bool),
done: make(chan struct{}),
}
const (
undecided = iota
matchAll
matchNone
)
// We start off as undecided. Once we transition to
// matchAll or matchNone, we have to stay in that state.
mode := undecided
nextParticipant:
for _, jks := range journal.Participants {
for _, inner := range vs.vgtid.ShardGtids {
if inner.Keyspace == jks.Keyspace && inner.Shard == jks.Shard {
switch mode {
case undecided, matchAll:
mode = matchAll
je.participants[inner] = false
case matchNone:
return nil, fmt.Errorf("not all journaling participants are in the stream: journal: %v, stream: %v", journal.Participants, vs.vgtid.ShardGtids)
}
continue nextParticipant
}
}
switch mode {
case undecided, matchNone:
mode = matchNone
case matchAll:
return nil, fmt.Errorf("not all journaling participants are in the stream: journal: %v, stream: %v", journal.Participants, vs.vgtid.ShardGtids)
}
}
if mode == matchNone {
// Unreachable. Journal events are only added to participants.
// But if we do receive such an event, the right action will be to ignore it.
return nil, nil
}
vs.journaler[journal.Id] = je
}
if _, ok := je.participants[sgtid]; !ok {
// Unreachable. See above.
return nil, nil
}
je.participants[sgtid] = true
for _, waiting := range je.participants {
if !waiting {
// Some participants are yet to join the wait.
return je, nil
}
}
if !vs.stopOnReshard { // stop streaming from current shards and start streaming the new shards
// All participants are waiting. Replace old shard gtids with new ones.
newsgtids := make([]*binlogdatapb.ShardGtid, 0, len(vs.vgtid.ShardGtids)-len(je.participants)+len(je.journal.ShardGtids))
log.Infof("Removing shard gtids: %v", je.participants)
for _, cursgtid := range vs.vgtid.ShardGtids {
if je.participants[cursgtid] {
continue
}
newsgtids = append(newsgtids, cursgtid)
}
log.Infof("Adding shard gtids: %v", je.journal.ShardGtids)
for _, sgtid := range je.journal.ShardGtids {
newsgtids = append(newsgtids, sgtid)
// It's ok to start the streams even though ShardGtids are not updated yet.
// This is because we're still holding the lock.
vs.startOneStream(ctx, sgtid)
}
vs.vgtid.ShardGtids = newsgtids
}
close(je.done)
return je, nil
}
| {
return &vstreamManager{
resolver: resolver,
toposerv: serv,
cell: cell,
}
} |
query_store.rs | use std::collections::BTreeMap;
use web3::types::H256;
use crate::deployment_store::{DeploymentStore, ReplicaId};
use graph::components::store::QueryStore as QueryStoreTrait;
use graph::prelude::*;
use crate::primary::Site;
pub(crate) struct QueryStore {
site: Arc<Site>,
replica_id: ReplicaId,
store: Arc<DeploymentStore>,
chain_store: Arc<crate::ChainStore>,
}
impl QueryStore {
pub(crate) fn new(
store: Arc<DeploymentStore>,
chain_store: Arc<crate::ChainStore>,
site: Arc<Site>,
replica_id: ReplicaId,
) -> Self {
QueryStore {
site,
replica_id,
store,
chain_store,
}
}
}
#[async_trait]
impl QueryStoreTrait for QueryStore {
fn find_query_values(
&self,
query: EntityQuery,
) -> Result<Vec<BTreeMap<String, q::Value>>, QueryExecutionError> {
assert_eq!(&self.site.deployment, &query.subgraph_id);
let conn = self
.store
.get_replica_conn(self.replica_id)
.map_err(|e| QueryExecutionError::StoreError(e.into()))?;
self.store.execute_query(&conn, self.site.clone(), query)
}
/// Return true if the deployment with the given id is fully synced,
/// and return false otherwise. Errors from the store are passed back up
async fn is_deployment_synced(&self) -> Result<bool, Error> {
Ok(self
.store
.exists_and_synced(self.site.deployment.cheap_clone())
.await?)
}
fn block_ptr(&self) -> Result<Option<BlockPtr>, Error> {
self.store.block_ptr(&self.site)
}
fn | (&self, block_hash: H256) -> Result<Option<BlockNumber>, StoreError> {
// We should also really check that the block with the given hash is
// on the chain starting at the subgraph's current head. That check is
// very expensive though with the data structures we have currently
// available. Ideally, we'd have the last REORG_THRESHOLD blocks in
// memory so that we can check against them, and then mark in the
// database the blocks on the main chain that we consider final
let subgraph_network = self.network_name();
self.chain_store
.block_number(block_hash)?
.map(|(network_name, number)| {
if &network_name == subgraph_network {
BlockNumber::try_from(number)
.map_err(|e| StoreError::QueryExecutionError(e.to_string()))
} else {
Err(StoreError::QueryExecutionError(format!(
"subgraph {} belongs to network {} but block {:x} belongs to network {}",
&self.site.deployment, subgraph_network, block_hash, network_name
)))
}
})
.transpose()
}
fn wait_stats(&self) -> PoolWaitStats {
self.store.wait_stats(self.replica_id)
}
async fn has_non_fatal_errors(&self, block: Option<BlockNumber>) -> Result<bool, StoreError> {
let id = self.site.deployment.clone();
self.store
.with_conn(move |conn, _| {
crate::deployment::has_non_fatal_errors(conn, &id, block).map_err(|e| e.into())
})
.await
}
async fn deployment_state(&self) -> Result<DeploymentState, QueryExecutionError> {
Ok(self
.store
.deployment_state_from_id(self.site.deployment.clone())
.await?)
}
fn api_schema(&self) -> Result<Arc<ApiSchema>, QueryExecutionError> {
let info = self.store.subgraph_info(&self.site)?;
Ok(info.api)
}
fn network_name(&self) -> &str {
&self.site.network
}
async fn query_permit(&self) -> tokio::sync::OwnedSemaphorePermit {
self.store.query_permit(self.replica_id).await
}
}
| block_number |
v4.rs | use crate::iv4::*;
use core::intrinsics;
use core::ops::{Add, AddAssign, Div, DivAssign, Mul, Sub};
#[derive(Clone, Copy)]
pub struct V4 {
pub x: f32,
pub y: f32,
pub z: f32,
pub w: f32,
}
impl V4 {
pub fn new(x: impl Into<f32>, y: impl Into<f32>, z: impl Into<f32>, w: impl Into<f32>) -> V4 {
V4 {
x: x.into(),
y: y.into(),
z: z.into(),
w: w.into(),
}
}
pub fn splat(value: impl Into<f32>) -> V4 {
let value = value.into();
V4 {
x: value,
y: value,
z: value,
w: value,
}
}
pub fn zero() -> V4 {
V4 {
x: 0.0,
y: 0.0,
z: 0.0,
w: 0.0,
}
}
pub fn len(self) -> f32 {
unsafe { intrinsics::sqrtf32(self.dot(self)) }
}
pub fn normalize(self) -> V4 {
let len = self.len();
self / len
}
pub fn dot(self, other: V4) -> f32 {
self.x * other.x + self.y * other.y + self.z * other.z + self.w * other.w
}
pub fn min(self, other: V4) -> V4 {
V4 {
x: self.x.min(other.x),
y: self.y.min(other.y),
z: self.z.min(other.z),
w: self.w.min(other.w),
}
}
pub fn max(self, other: V4) -> V4 {
V4 {
x: self.x.max(other.x),
y: self.y.max(other.y),
z: self.z.max(other.z),
w: self.w.max(other.w),
}
}
}
impl Add for V4 {
type Output = V4;
fn add(self, other: V4) -> V4 {
V4 {
x: self.x + other.x,
y: self.y + other.y,
z: self.z + other.z,
w: self.w + other.w,
}
}
}
impl AddAssign for V4 {
fn add_assign(&mut self, other: V4) {
*self = *self + other
}
}
impl Div for V4 {
type Output = V4;
fn div(self, other: V4) -> V4 {
V4 {
x: self.x / other.x,
y: self.y / other.y,
z: self.z / other.z,
w: self.w / other.w,
}
}
}
impl Div<f32> for V4 {
type Output = V4;
fn div(self, other: f32) -> V4 {
V4 {
x: self.x / other,
y: self.y / other,
z: self.z / other,
w: self.w / other,
}
}
}
impl DivAssign<f32> for V4 {
fn div_assign(&mut self, other: f32) {
*self = *self / other
}
}
impl<const FRACT_BITS: u32> From<Iv4<FRACT_BITS>> for V4 {
fn from(v: Iv4<FRACT_BITS>) -> Self {
Self::new(v.x, v.y, v.z, v.w)
}
}
impl Mul for V4 {
type Output = V4;
fn mul(self, other: V4) -> V4 {
V4 {
x: self.x * other.x,
y: self.y * other.y,
z: self.z * other.z,
w: self.w * other.w,
}
}
}
impl Mul<f32> for V4 {
type Output = V4;
fn mul(self, other: f32) -> V4 {
V4 {
x: self.x * other,
y: self.y * other,
z: self.z * other,
w: self.w * other,
}
}
} | type Output = V4;
fn sub(self, other: V4) -> V4 {
V4 {
x: self.x - other.x,
y: self.y - other.y,
z: self.z - other.z,
w: self.w - other.w,
}
}
} |
impl Sub for V4 { |
dash_tr.ts | <?xml version="1.0" ?><!DOCTYPE TS><TS language="tr" version="2.0">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="14"/>
<source>About Moneta Core</source>
<translation>Moneta Çekirdeği Hakkında</translation>
</message>
<message>
<location filename="../forms/aboutdialog.ui" line="53"/>
<source><b>Moneta Core</b> version</source>
<translation><b>Moneta Çekirdek</b> versiyon</translation>
</message>
<message>
<location filename="../forms/aboutdialog.ui" line="94"/>
<source>Copyright &copy; 2009-2014 The Bitcoin Core developers.
Copyright &copy; 2014-YYYY The Moneta Core developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/aboutdialog.ui" line="111"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
Bu yazılım deneme safhasındadır.
MIT/X11 yazılım lisansı kapsamında yayınlanmıştır, COPYING dosyasına ya da http://www.opensource.org/licenses/mit-license.php sayfasına bakınız.
Bu ürün OpenSSL projesi tarafından OpenSSL araç takımı (http://www.openssl.org/) için geliştirilen yazılımlar, Eric Young ([email protected]) tarafından hazırlanmış şifreleme yazılımları ve Thomas Bernard tarafından programlanmış UPnP yazılımı içerir.</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="30"/>
<location filename="../utilitydialog.cpp" line="31"/>
<source>Copyright</source>
<translation>Telif hakkı</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="30"/>
<source>The Bitcoin Core developers</source>
<translation>Bitcoin Çekirdeği geliştiricileri</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="31"/>
<source>The Moneta Core developers</source>
<translation>Moneta Çekirdek Geliştiricileri</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="43"/>
<location filename="../utilitydialog.cpp" line="45"/>
<source>(%1-bit)</source>
<translation>(%1-bit)</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="30"/>
<source>Double-click to edit address or label</source>
<translation>Adresi ya da etiketi düzenlemek için çift tıklayınız</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="57"/>
<source>Create a new address</source>
<translation>Yeni bir adres oluştur</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="60"/>
<source>&New</source>
<translation>&Yeni</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="71"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Şu anda seçili olan adresi sistem panosuna kopyala</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="74"/>
<source>&Copy</source>
<translation>&Kopyala</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="85"/>
<source>Delete the currently selected address from the list</source>
<translation>Seçili adresi listeden sil</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="88"/>
<source>&Delete</source>
<translation>&Sil</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="112"/>
<source>Export the data in the current tab to a file</source>
<translation>Güncel sekmedeki verileri bir dosyaya aktar</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="115"/>
<source>&Export</source>
<translation>&Dışa aktar</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="132"/>
<source>C&lose</source>
<translation>K&apat</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="45"/>
<source>Choose the address to send coins to</source>
<translation>Bitcoin yollanacak adresi seç</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="46"/>
<source>Choose the address to receive coins with</source>
<translation>Bitcoin alınacak adresi seç</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="51"/>
<source>C&hoose</source>
<translation>S&eç</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="57"/>
<source>Sending addresses</source>
<translation>Yollama adresleri</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="58"/>
<source>Receiving addresses</source>
<translation>Alım adresleri</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="65"/>
<source>These are your Moneta addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Bunlar ödeme yapmak için kullanacağınız Moneta adreslerinizdir. Moneta yollamadan önce meblağı ve alıcı adresini daima kontrol ediniz</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="69"/>
<source>These are your Moneta addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Bunlar ödeme almak için kullanacağınız Moneta adreslerinizdir. Her muamele için yeni bir alım adresi kullanmanız tavsiye edilir.</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="75"/>
<source>&Copy Address</source>
<translation>Adresi &kopyala</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="76"/>
<source>Copy &Label</source>
<translation>&Etiketi kopyala</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="77"/>
<source>&Edit</source>
<translation>&Düzenle</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="271"/>
<source>Export Address List</source>
<translation>Adres listesini dışa aktar</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="272"/>
<source>Comma separated file (*.csv)</source>
<translation>Virgülle ayrılmış değerler dosyası (*.csv)</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="285"/>
<source>Exporting Failed</source>
<translation>Dışa aktarım başarısız oldu</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="286"/>
<source>There was an error trying to save the address list to %1.</source>
<translation>Adres listesinin %1 konumuna kaydedilmesi sırasında bir hata meydana geldi.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="169"/>
<source>Label</source>
<translation>Etiket</translation>
</message>
<message>
<location filename="../addresstablemodel.cpp" line="169"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../addresstablemodel.cpp" line="205"/>
<source>(no label)</source>
<translation>(boş etiket)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="26"/>
<source>Passphrase Dialog</source>
<translation>Parola diyaloğu</translation>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="47"/>
<source>Enter passphrase</source>
<translation>Parolayı giriniz</translation>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="61"/>
<source>New passphrase</source>
<translation>Yeni parola</translation>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="75"/>
<source>Repeat new passphrase</source>
<translation>Yeni parolayı tekrarlayınız</translation>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="108"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="111"/>
<source>For anonymization only</source>
<translation>Sadece anonimleştirme için</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="41"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Cüzdanınız için yeni parolayı giriniz.<br/>Lütfen <b>10 ya da daha fazla rastgele karakter</b> veya <b>sekiz ya da daha fazla kelime</b> içeren bir parola seçiniz.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="42"/>
<source>Encrypt wallet</source>
<translation>Cüzdanı şifrele</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="48"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Bu işlem cüzdan kilidini açmak için cüzdan parolanızı gerektirir.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="53"/>
<source>Unlock wallet</source>
<translation>Cüzdan kilidini aç</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="56"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Bu işlem, cüzdan şifresini açmak için cüzdan parolasını gerektirir.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="61"/>
<source>Decrypt wallet</source>
<translation>Cüzdan şifresini aç</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="64"/>
<source>Change passphrase</source>
<translation>Parolayı değiştir</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="65"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Cüzdan için eski ve yeni parolaları giriniz.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="112"/>
<source>Confirm wallet encryption</source>
<translation>Cüzdan şifrelenmesini teyit eder</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="113"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR DASH</b>!</source>
<translation>Uyarı: Eğer cüzdanınızı şifreler ve şifrenizi kaybederseniz,<b>TÜM DASHLERİNİZİ" kaybedersiniz.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="113"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Cüzdanınızı şifrelemek istediğinizden emin misiniz?</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="122"/>
<location filename="../askpassphrasedialog.cpp" line="181"/>
<source>Wallet encrypted</source>
<translation>Cüzdan şifrelendi</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="124"/>
<source>Moneta will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your monetas from being stolen by malware infecting your computer.</source>
<translation>Moneta, şifreleme işlemini tamamlamak için kapanacak. Cüzdanınızı şifrelemenizin, bilgisayarınıza bulaşacak zararlı yazılımlar tarafından Moneta'lerinizin çalınmasından sizi tam olarak korumayacağını unutmayınız.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="128"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>ÖNEMLİ: Önceden yapmış olduğunuz cüzdan dosyası yedeklemelerinin yeni oluşturulan şifrelenmiş cüzdan dosyası ile değiştirilmeleri gerekir. Güvenlik nedenleriyle yeni, şifrelenmiş cüzdanı kullanmaya başladığınızda eski şifrelenmemiş cüzdan dosyaları işe yaramaz hale gelecektir.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="137"/>
<location filename="../askpassphrasedialog.cpp" line="144"/>
<location filename="../askpassphrasedialog.cpp" line="187"/>
<location filename="../askpassphrasedialog.cpp" line="193"/>
<source>Wallet encryption failed</source>
<translation>Cüzdan şifrelemesi başarısız oldu</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="138"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Dahili bir hata sebebiyle cüzdan şifrelemesi başarısız oldu. Cüzdanınız şifrelenmedi.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="145"/>
<location filename="../askpassphrasedialog.cpp" line="194"/>
<source>The supplied passphrases do not match.</source>
<translation>Girilen parolalar birbirleriyle uyumlu değil.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="157"/>
<source>Wallet unlock failed</source>
<translation>Cüzdan kilidinin açılması başarısız oldu</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="158"/>
<location filename="../askpassphrasedialog.cpp" line="169"/>
<location filename="../askpassphrasedialog.cpp" line="188"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Cüzdan şifresinin açılması için girilen parola yanlıştı.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="168"/>
<source>Wallet decryption failed</source>
<translation>Cüzdan şifresinin açılması başarısız oldu</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="182"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Cüzdan parolası başarılı bir şekilde değiştirildi.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="230"/>
<location filename="../askpassphrasedialog.cpp" line="254"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Uyarı: Caps Lock tuşu faal durumda!</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="76"/>
<source>Moneta Core</source>
<translation>Moneta Çekirdeği</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="85"/>
<source>Wallet</source>
<translation>Cüzdan</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="87"/>
<source>Node</source>
<translation>Düğüm</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="101"/>
<location filename="../bitcoingui.cpp" line="498"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="227"/>
<source>&Overview</source>
<translation>&Genel bakış</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="228"/>
<source>Show general overview of wallet</source>
<translation>Cüzdana genel bakışı göster</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="234"/>
<source>&Send</source>
<translation>&Gönder</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="235"/>
<source>Send coins to a Moneta address</source>
<translation>Bir Moneta adresine para gönder</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="241"/>
<source>&Receive</source>
<translation>&Al</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="242"/>
<source>Request payments (generates QR codes and moneta: URIs)</source>
<translation>Ödeme iste (QR kodu ve moneta: URIs oluşturur)</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="248"/>
<source>&Transactions</source>
<translation>&Muameleler</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="249"/>
<source>Browse transaction history</source>
<translation>Muamele tarihçesini tara</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="266"/>
<source>E&xit</source>
<translation>&Çık</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="267"/>
<source>Quit application</source>
<translation>Uygulamadan çık</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="271"/>
<location filename="../bitcoingui.cpp" line="273"/>
<source>&About Moneta Core</source>
<translation>&Moneta Çekirdeği Hakkında</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="274"/>
<source>Show information about Moneta</source>
<translation>Moneta hakkında bilgi göster</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="277"/>
<location filename="../bitcoingui.cpp" line="279"/>
<source>About &Qt</source>
<translation>&Qt hakkında</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="281"/>
<source>Show information about Qt</source>
<translation>Qt hakkında bilgi görüntü</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="283"/>
<source>&Options...</source>
<translation>&Seçenekler...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="284"/>
<source>Modify configuration options for Moneta</source>
<translation>Moneta seçeneklerinin yapılandırmasını değiştir</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="287"/>
<location filename="../bitcoingui.cpp" line="289"/>
<source>&Show / Hide</source>
<translation>&Göster / Sakla</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="290"/>
<source>Show or hide the main Window</source>
<translation>Ana pencereyi görüntüle ya da sakla</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="292"/>
<source>&Encrypt Wallet...</source>
<translation>Cüzdanı &şifrele...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="293"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Cüzdanınızın özel anahtarlarını şifrele</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="295"/>
<source>&Backup Wallet...</source>
<translation>Cüzdanı &yedekle...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="296"/>
<source>Backup wallet to another location</source>
<translation>Cüzdanı diğer bir konumda yedekle</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="297"/>
<source>&Change Passphrase...</source>
<translation>Parolayı &değiştir...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="298"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Cüzdan şifrelemesi için kullanılan parolayı değiştir</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="299"/>
<source>&Unlock Wallet...</source>
<translation>&Cüzdan Kilidini Kaldır</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="300"/>
<source>Unlock wallet</source>
<translation>Cüzdan kilidini aç</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="301"/>
<source>&Lock Wallet</source>
<translation>&Cüzdanı Kilitle</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="302"/>
<source>Sign &message...</source>
<translation>&Mesaj imzala...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="303"/>
<source>Sign messages with your Moneta addresses to prove you own them</source>
<translation>Mesajları, adreslerin size ait olduğunu ispatlamak için Moneta adresleri ile imzala</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="304"/>
<source>&Verify message...</source>
<translation>Mesaj &kontrol et...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="305"/>
<source>Verify messages to ensure they were signed with specified Moneta addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="307"/>
<source>&Information</source>
<translation>&Malumat</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="308"/>
<source>Show diagnostic information</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="309"/>
<source>&Debug console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="310"/>
<source>Open debugging console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="311"/>
<source>&Network Monitor</source>
<translation>&Ağ Monitörü</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="312"/>
<source>Show network monitor</source>
<translation>Ağ monitörünü göster</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="313"/>
<source>Open &Configuration File</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="314"/>
<source>Open configuration file</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="316"/>
<source>&Sending addresses...</source>
<translation>&Gönderme adresleri...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="317"/>
<source>Show the list of used sending addresses and labels</source>
<translation>Kullanılmış gönderme adresleri ve etiketlerin listesini göster</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="318"/>
<source>&Receiving addresses...</source>
<translation>&Alma adresleri...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="319"/>
<source>Show the list of used receiving addresses and labels</source>
<translation>Kullanılmış alım adresleri ve etiketlerin listesini göster</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="321"/>
<source>Open &URI...</source>
<translation>&URI aç...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="322"/>
<source>Open a moneta: URI or payment request</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="324"/>
<source>&Command-line options</source>
<translation>&Komut satırı seçenekleri</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="325"/>
<source>Show the Moneta Core help message to get a list with possible Moneta command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="361"/>
<source>&File</source>
<translation>&Dosya</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="375"/>
<source>&Settings</source>
<translation>&Ayarlar</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="388"/>
<source>&Tools</source>
<translation>&Araçlar</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="395"/>
<source>&Help</source>
<translation>&Yardım</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="406"/>
<source>Tabs toolbar</source>
<translation>Sekme araç çubuğu</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="493"/>
<location filename="../bitcoingui.cpp" line="498"/>
<source>Moneta client</source>
<translation>Moneta istemcisi</translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="643"/>
<source>%n active connection(s) to Moneta network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="655"/>
<source>Synchronizing with network...</source>
<translation>Şebeke ile senkronizasyon...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="658"/>
<source>Importing blocks from disk...</source>
<translation>Bloklar diskten içe aktarılıyor...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="661"/>
<source>Reindexing blocks on disk...</source>
<translation>Diskteki bloklar yeniden endeksleniyor...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="665"/>
<source>No block source available...</source>
<translation>Hiçbir blok kaynağı mevcut değil...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="675"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>Muamele tarihçesinde %1 blok işlendi.</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="680"/>
<source>Up to date</source>
<translation>Güncel</translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="701"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="705"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="709"/>
<location filename="../bitcoingui.cpp" line="715"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="715"/>
<source>%1 and %2</source>
<translation>%1 ve %2</translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="715"/>
<source>%n year(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="719"/>
<source>%1 behind</source>
<translation>%1 geride</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="724"/>
<source>Catching up...</source>
<translation>Aralık kapatılıyor...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="740"/>
<source>Last received block was generated %1 ago.</source>
<translation>Son alınan blok %1 evvel oluşturulmuştu.</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="742"/>
<source>Transactions after this will not yet be visible.</source>
<translation>Bundan sonraki muameleler henüz görüntülenemez.</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="755"/>
<source>Moneta</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="769"/>
<source>Error</source>
<translation>Hata</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="772"/>
<source>Warning</source>
<translation>Uyarı</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="775"/>
<source>Information</source>
<translation>Bilgi</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="854"/>
<source>Sent transaction</source>
<translation>Muamele yollandı</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="854"/>
<source>Incoming transaction</source>
<translation>Gelen muamele</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="855"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Tarih: %1
Meblağ: %2
Tür: %3
Adres: %4
</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="926"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Cüzdan <b>şifrelenmiştir</b> ve şu anda <b>kilidi açıktır</b></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="936"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b> for anonimization only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="946"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Cüzdan <b>şifrelenmiştir</b> ve şu anda <b>kilitlidir</b></translation>
</message>
<message>
<location filename="../moneta.cpp" line="449"/>
<source>A fatal error occurred. Moneta can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="164"/>
<source>Network Alert</source>
<translation>Şebeke hakkında uyarı</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="14"/>
<source>Coin Control Address Selection</source>
<translation>Para kontrolü adres seçimi</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="48"/>
<source>Quantity:</source>
<translation>Miktar:</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="77"/>
<source>Bytes:</source>
<translation>Bayt:</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="122"/>
<source>Amount:</source>
<translation>Meblağ:</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="151"/>
<source>Priority:</source>
<translation>Öncelik:</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="196"/>
<source>Fee:</source>
<translation>Ücret:</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="228"/>
<source>Low Output:</source>
<translation>Düşük çıktı:</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="276"/>
<source>After Fee:</source>
<translation>Ücretten sonra:</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="308"/>
<source>Change:</source>
<translation>Para üstü:</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="364"/>
<source>(un)select all</source>
<translation>tümünü seç(me)</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="377"/>
<source>Tree mode</source>
<translation>Ağaç kipi</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="393"/>
<source>List mode</source>
<translation>Liste kipi</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="400"/>
<source>(1 locked)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="446"/>
<source>Amount</source>
<translation>Meblağ</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="451"/>
<source>Label</source>
<translation>Etiket</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="456"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="461"/>
<source>Darksend Rounds</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="466"/>
<source>Date</source>
<translation>Tarih</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="471"/>
<source>Confirmations</source>
<translation>Doğrulamalar</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="474"/>
<source>Confirmed</source>
<translation>Doğrulandı</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="479"/>
<source>Priority</source>
<translation>Öncelik</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="43"/>
<source>Copy address</source>
<translation>Adresi kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="44"/>
<source>Copy label</source>
<translation>Etiketi kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="45"/>
<location filename="../coincontroldialog.cpp" line="71"/>
<source>Copy amount</source>
<translation>Meblağı kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="46"/>
<source>Copy transaction ID</source>
<translation>Muamele kimliğini kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="47"/>
<source>Lock unspent</source>
<translation>Harcanmamışı kilitle</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="48"/>
<source>Unlock unspent</source>
<translation>Harcanmamışın kilidini aç</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="70"/>
<source>Copy quantity</source>
<translation>Miktarı kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="72"/>
<source>Copy fee</source>
<translation>Ücreti kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="73"/>
<source>Copy after fee</source>
<translation>Ücretten sonrakini kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="74"/>
<source>Copy bytes</source>
<translation>Baytları kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="75"/>
<source>Copy priority</source>
<translation>Önceliği kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="76"/>
<source>Copy low output</source>
<translation>Düşük çıktıyı kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="77"/>
<source>Copy change</source>
<translation>Para üstünü kopyala</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="410"/>
<source>highest</source>
<translation>azami</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="411"/>
<source>higher</source>
<translation>daha yüksek</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="412"/>
<source>high</source>
<translation>yüksek</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="413"/>
<source>medium-high</source>
<translation>orta-yüksek</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="759"/>
<source>n/a</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="164"/>
<location filename="../coincontroldialog.cpp" line="414"/>
<source>medium</source>
<translation>orta</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="418"/>
<source>low-medium</source>
<translation>düşük-orta</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="419"/>
<source>low</source>
<translation>düşük</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="420"/>
<source>lower</source>
<translation>daha düşük</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="421"/>
<source>lowest</source>
<translation>asgari</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="432"/>
<source>(%1 locked)</source>
<translation>(%1 kilitlendi)</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="464"/>
<source>none</source>
<translation>boş</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="615"/>
<source>Dust</source>
<translation>Toz</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="615"/>
<source>yes</source>
<translation>evet</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="244"/>
<location filename="../coincontroldialog.cpp" line="615"/>
<source>no</source>
<translation>hayır</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="625"/>
<source>This label turns red, if the transaction size is greater than 1000 bytes.</source>
<translation>Eğer muamele boyutu 1000 bayttan büyükse bu etkiket kırmızı olur.</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="626"/>
<location filename="../coincontroldialog.cpp" line="631"/>
<source>This means a fee of at least %1 per kB is required.</source>
<translation>Bu, kB başına en az %1 ücret gerektiği anlamnına gelir.</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="627"/>
<source>Can vary +/- 1 byte per input.</source>
<translation>Girdi başına +/- 1 bayt değişebilir.</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="629"/>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation>Yüksek öncelikli muamelelerin bir bloğa dahil olmaları daha olasıdır.</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="630"/>
<source>This label turns red, if the priority is smaller than "medium".</source>
<translation>Eğer öncelik "ortadan" düşükse bu etiket kırmızı olur.</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="633"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.</source>
<translation>Eğer herhangi bir alıcı %1'den düşük bir meblağ alırsa bu etiket kırmızı olur.</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="634"/>
<location filename="../coincontroldialog.cpp" line="638"/>
<source>This means a fee of at least %1 is required.</source>
<translation>Bu, en az %1 tutarında bir ücret gerektiği anlamına gelir.</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="635"/>
<source>Amounts below 0.546 times the minimum relay fee are shown as dust.</source>
<translation>Asgari yönlendirme ücretinin 0.546 oranının altındaki meblağlar toz olarak gösterilir.</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="637"/>
<source>This label turns red, if the change is smaller than %1.</source>
<translation>Eğer para üstü %1'den düşükse bu etiket kırmızı olur.</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="680"/>
<location filename="../coincontroldialog.cpp" line="741"/>
<source>(no label)</source>
<translation>(boş etiket)</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="734"/>
<source>change from %1 (%2)</source>
<translation>%1 unsurundan para üstü (%2)</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="735"/>
<source>(change)</source>
<translation>(para üstü)</translation>
</message>
</context>
<context>
<name>DarksendConfig</name>
<message>
<location filename="../forms/darksendconfig.ui" line="14"/>
<source>Configure Darksend</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="26"/>
<source>Basic Privacy</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="39"/>
<source>High Privacy</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="52"/>
<source>Maximum Privacy</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="65"/>
<source>Please select a privacy level.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="78"/>
<source>Use 2 separate masternodes to mix funds up to 1000 DASH</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="91"/>
<source>Use 8 separate masternodes to mix funds up to 1000 DASH</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="104"/>
<source>Use 16 separate masternodes</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="117"/>
<source>This option is the quickest and will cost about ~0.025 DASH to anonymize 1000 DASH</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="130"/>
<source>This option is moderately fast and will cost about 0.05 DASH to anonymize 1000 DASH</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="156"/>
<source>0.1 DASH per 1000 DASH you anonymize.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/darksendconfig.ui" line="143"/>
<source>This is the slowest and most secure option. Using maximum anonymity will cost</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../darksendconfig.cpp" line="43"/>
<location filename="../darksendconfig.cpp" line="58"/>
<location filename="../darksendconfig.cpp" line="73"/>
<source>Darksend Configuration</source>
<translation>Darksend Konfigürasyonu</translation>
</message>
<message>
<location filename="../darksendconfig.cpp" line="44"/>
<source>Darksend was successfully set to basic (%1 and 2 rounds). You can change this at any time by opening Moneta's configuration screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../darksendconfig.cpp" line="59"/>
<source>Darksend was successfully set to high (%1 and 8 rounds). You can change this at any time by opening Moneta's configuration screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../darksendconfig.cpp" line="74"/>
<source>Darksend was successfully set to maximum (%1 and 16 rounds). You can change this at any time by opening Moneta's configuration screen.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="14"/>
<source>Edit Address</source>
<translation>Adresi düzenle</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="25"/>
<source>&Label</source>
<translation>&Etiket</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="35"/>
<source>The label associated with this address list entry</source>
<translation>Bu adres listesi girdisi ile ilişkili etiket</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="42"/>
<source>&Address</source>
<translation>&Adres</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="52"/>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>Bu adres listesi girdisi ile ilişkili adres. Sadece gönderme adresleri için değiştirilebilir.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="29"/>
<source>New receiving address</source>
<translation>Yeni alım adresi</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="33"/>
<source>New sending address</source>
<translation>Yeni gönderi adresi</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="36"/>
<source>Edit receiving address</source>
<translation>Alım adresini düzenle</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="40"/>
<source>Edit sending address</source>
<translation>Gönderi adresini düzenle</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="111"/>
<source>The entered address "%1" is not a valid Moneta address.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="116"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Girilen "%1" adresi hâlihazırda adres defterinde mevcuttur.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="121"/>
<source>Could not unlock wallet.</source>
<translation>Cüzdan kilidi açılamadı.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="126"/>
<source>New key generation failed.</source>
<translation>Yeni anahtar oluşturulması başarısız oldu.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<location filename="../intro.cpp" line="69"/>
<source>A new data directory will be created.</source>
<translation>Yeni bir veri klasörü oluşturulacaktır.</translation>
</message>
<message>
<location filename="../intro.cpp" line="91"/>
<source>name</source>
<translation>isim</translation>
</message>
<message>
<location filename="../intro.cpp" line="93"/>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Klasör hâlihazırda mevcuttur. Burada yeni bir klasör oluşturmak istiyorsanız, %1 ilâve ediniz.</translation>
</message>
<message>
<location filename="../intro.cpp" line="96"/>
<source>Path already exists, and is not a directory.</source>
<translation>Erişim yolu zaten mevcuttur ve klasör değildir.</translation>
</message>
<message>
<location filename="../intro.cpp" line="103"/>
<source>Cannot create data directory here.</source>
<translation>Burada veri klasörü oluşturulamaz.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<location filename="../forms/helpmessagedialog.ui" line="19"/>
<source>Moneta Core - Command-line options</source>
<translation>Moneta Çekirdeği - Komut satırı seçenekleri</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="69"/>
<source>Moneta Core</source>
<translation>Moneta Çekirdeği</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="69"/>
<source>version</source>
<translation>sürüm</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="71"/>
<source>Usage:</source>
<translation>Kullanım:</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="72"/>
<source>command-line options</source>
<translation>komut satırı seçenekleri</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="76"/>
<source>UI options</source>
<translation>Kullanıcı arayüzü seçenekleri</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="77"/>
<source>Choose data directory on startup (default: 0)</source>
<translation>Başlangıçta veri klasörü seç (varsayılan: 0)</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="78"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Lisan belirt, mesela "de_De" (varsayılan: sistem dili)</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="79"/>
<source>Start minimized</source>
<translation>Küçültülmüş olarak başlat</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="80"/>
<source>Set SSL root certificates for payment request (default: -system-)</source>
<translation>Ödeme talebi için SSL kök sertifikalarını belirle (varsayılan: -system-)</translation>
</message>
<message>
<location filename="../utilitydialog.cpp" line="81"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Başlatıldığında başlangıç ekranını göster (varsayılan: 1)</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<location filename="../forms/intro.ui" line="14"/>
<source>Welcome</source>
<translation>Hoş geldiniz</translation>
</message>
<message>
<location filename="../forms/intro.ui" line="23"/>
<source>Welcome to Moneta Core.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/intro.ui" line="49"/>
<source>As this is the first time the program is launched, you can choose where Moneta Core will store its data.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/intro.ui" line="59"/>
<source>Moneta Core will download and store a copy of the Moneta block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/intro.ui" line="69"/>
<source>Use the default data directory</source>
<translation>Varsayılan veri klasörünü kullan</translation>
</message>
<message>
<location filename="../forms/intro.ui" line="76"/>
<source>Use a custom data directory:</source>
<translation>Özel bir veri klasörü kullan:</translation>
</message>
<message>
<location filename="../intro.cpp" line="185"/>
<source>Moneta</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../intro.cpp" line="186"/>
<source>Error: Specified data directory "%1" can not be created.</source>
<translation>Hata: belirtilen "%1" veri klasörü oluşturulamaz.</translation>
</message>
<message>
<location filename="../intro.cpp" line="210"/>
<source>Error</source>
<translation>Hata</translation>
</message>
<message>
<location filename="../intro.cpp" line="219"/>
<source>GB of free space available</source>
<translation>GB boş alan mevcuttur</translation>
</message>
<message>
<location filename="../intro.cpp" line="222"/>
<source>(of %1GB needed)</source>
<translation>(gereken boyut: %1GB)</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<location filename="../forms/openuridialog.ui" line="14"/>
<source>Open URI</source>
<translation>URI aç</translation>
</message>
<message>
<location filename="../forms/openuridialog.ui" line="20"/>
<source>Open payment request from URI or file</source>
<translation>Dosyadan veya URI'den ödeme talebi aç</translation>
</message>
<message>
<location filename="../forms/openuridialog.ui" line="29"/>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<location filename="../forms/openuridialog.ui" line="40"/>
<source>Select payment request file</source>
<translation>Ödeme talebi dosyasını seç</translation>
</message>
<message>
<location filename="../openuridialog.cpp" line="48"/>
<source>Select payment request file to open</source>
<translation>Açılacak ödeme talebi dosyasını seç</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="14"/>
<source>Options</source>
<translation>Seçenekler</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="27"/>
<source>&Main</source>
<translation>&Esas ayarlar</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="33"/>
<source>Automatically start Moneta after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="36"/>
<source>&Start Moneta on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="45"/>
<source>Size of &database cache</source>
<translation>&Veritabanı tamponunun boyutu</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="61"/>
<source>MB</source>
<translation>MB</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="88"/>
<source>Number of script &verification threads</source>
<translation>İş parçacıklarını &denetleme betiği sayısı</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="101"/>
<source>(0 = auto, <0 = leave that many cores free)</source>
<translation>(0 = otomatik, <0 = bu kadar çekirdeği kullanma)</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="138"/>
<source><html><head/><body><p>This setting determines the amount of individual masternodes that an input will be anonymized through. More rounds of anonymization gives a higher degree of privacy, but also costs more in fees.</p></body></html></source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="141"/>
<source>Darksend rounds to use</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="165"/>
<source>This amount acts as a threshold to turn off Darksend once it's reached.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="180"/>
<source>Amount of Moneta to keep anonymized</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="206"/>
<source>W&allet</source>
<translation>&Cüzdan</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="212"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation>Muamelelerin hızlı işlenmesini garantilemeye yardım eden, seçime dayalı kB başı muamele ücreti. Muamelelerin çoğunluğunun boyutu 1 kB'dir.</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="227"/>
<source>Pay transaction &fee</source>
<translation>Muamele ücreti &öde</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="271"/>
<source>Expert</source>
<translation>Gelişmiş</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="277"/>
<source>Whether to show coin control features or not.</source>
<translation>Para kontrol özelliklerinin gösterilip gösterilmeyeceğini ayarlar.</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="280"/>
<source>Enable coin &control features</source>
<translation>Para &kontrolü özelliklerini etkinleştir</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="287"/>
<source>If you disable the spending of unconfirmed change, the change from a transaction cannot be used until that transaction has at least one confirmation. This also affects how your balance is computed.</source>
<translation>Teyit edilmemiş para üstünü harcamayı devre dışı bırakırsanız, bir muamelenin para üstü bu muamele için en az bir teyit olana dek harcanamaz. Bu, aynı zamanda bakiyenizin nasıl hesaplandığını da etkiler.</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="290"/>
<source>&Spend unconfirmed change</source>
<translation>Teyit edilmemiş para üstünü &harca</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="301"/>
<source>&Network</source>
<translation>&Şebeke</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="307"/>
<source>Automatically open the Moneta client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="310"/>
<source>Map port using &UPnP</source>
<translation>Portları &UPnP kullanarak haritala</translation> | <translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="320"/>
<source>&Connect through SOCKS proxy (default proxy):</source>
<translation>SOCKS vekil sunucusuyla &bağlan (varsayılan vekil):</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="329"/>
<source>Proxy &IP:</source>
<translation>Vekil &İP:</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="354"/>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation>Vekil sunucusunun IP adresi (mesela IPv4: 127.0.0.1 / IPv6: ::1)</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="361"/>
<source>&Port:</source>
<translation>&Port:</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="386"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Vekil sunucunun portu (mesela 9050)</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="393"/>
<source>SOCKS &Version:</source>
<translation>SOCKS &sürümü:</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="406"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Vekil sunucunun SOCKS sürümü (mesela 5)</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="442"/>
<source>&Window</source>
<translation>&Pencere</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="448"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Küçültüldükten sonra sadece çekmece ikonu göster.</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="451"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>İşlem çubuğu yerine sistem çekmecesine &küçült</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="458"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Pencere kapatıldığında uygulamadan çıkmak yerine uygulamayı küçültür. Bu seçenek etkinleştirildiğinde, uygulama sadece menüden çıkış seçildiğinde kapanacaktır.</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="461"/>
<source>M&inimize on close</source>
<translation>Kapatma sırasında k&üçült</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="482"/>
<source>&Display</source>
<translation>&Görünüm</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="490"/>
<source>User Interface &language:</source>
<translation>Kullanıcı arayüzü &lisanı:</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="503"/>
<source>The user interface language can be set here. This setting will take effect after restarting Moneta.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="524"/>
<source>Language missing or translation incomplete? Help contributing translations here:
https://www.transifex.com/projects/p/moneta/</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="550"/>
<source>&Unit to show amounts in:</source>
<translation>Meblağları göstermek için &birim:</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="563"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Bitcoin gönderildiğinde arayüzde gösterilecek varsayılan alt birimi seçiniz.</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="572"/>
<source>Whether to show Moneta addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="575"/>
<source>&Display addresses in transaction list</source>
<translation>Muamele listesinde adresleri &göster</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="584"/>
<location filename="../forms/optionsdialog.ui" line="597"/>
<source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source>
<translation>Muameleler sekmesinde bağlam menüsü unsurları olarak görünen üçüncü taraf bağlantıları (mesela bir blok tarayıcısı). URL'deki %s, muamele hash değeri ile değiştirilecektir. Birden çok bağlantılar düşey çubuklar | ile ayrılacaktır.</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="587"/>
<source>Third party transaction URLs</source>
<translation>Üçüncü taraf muamele URL'leri</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="628"/>
<source>Active command-line options that override above options:</source>
<translation>Yukarıdaki seçeneklerin yerine geçen faal komut satırı seçenekleri:</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="671"/>
<source>Reset all client options to default.</source>
<translation>İstemcinin tüm seçeneklerini varsayılan değerlere geri al.</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="674"/>
<source>&Reset Options</source>
<translation>Seçenekleri Sıfı&rla</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="735"/>
<source>&OK</source>
<translation>&Tamam</translation>
</message>
<message>
<location filename="../forms/optionsdialog.ui" line="742"/>
<source>&Cancel</source>
<translation>&İptal</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="73"/>
<source>default</source>
<translation>varsayılan</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="134"/>
<source>none</source>
<translation>boş</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="222"/>
<source>Confirm options reset</source>
<translation>Seçeneklerin sıfırlanmasını teyit et</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="223"/>
<location filename="../optionsdialog.cpp" line="253"/>
<source>Client restart required to activate changes.</source>
<translation>Değişikliklerin uygulanması için istemcinin yeniden başlatılması lazımdır.</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="223"/>
<source>Client will be shutdown, do you want to proceed?</source>
<translation>İstemci kapanacaktır, devam etmek istiyor musunuz?</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="257"/>
<source>This change would require a client restart.</source>
<translation>Bu değişiklik istemcinin tekrar başlatılmasını gerektirir.</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="291"/>
<source>The supplied proxy address is invalid.</source>
<translation>Girilen vekil sunucu adresi geçersizdir.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="20"/>
<source>Form</source>
<translation>Form</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="63"/>
<source>Wallet</source>
<translation>Cüzdan</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="70"/>
<location filename="../forms/overviewpage.ui" line="953"/>
<location filename="../forms/overviewpage.ui" line="1021"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Moneta network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="114"/>
<source>Available:</source>
<translation>Mevcut:</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="130"/>
<source>Your current spendable balance</source>
<translation>Güncel harcanabilir bakiyeniz</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="146"/>
<source>Pending:</source>
<translation>Beklemede:</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="162"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>Henüz teyit edilmemiş ve harcanabilir bakiyeye eklenmemiş muamelelerin toplamı</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="178"/>
<source>Immature:</source>
<translation>Olgunlaşmamış:</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="191"/>
<source>Mined balance that has not yet matured</source>
<translation>Oluşturulan bakiye henüz olgunlaşmamıştır</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="207"/>
<source>Total:</source>
<translation>Toplam:</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="223"/>
<source>Your current total balance</source>
<translation>Güncel toplam bakiyeniz</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="302"/>
<source>Status:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="309"/>
<source>Enabled/Disabled</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="316"/>
<source>Completion:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="336"/>
<source>Darksend Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="349"/>
<source>0 DASH</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="356"/>
<source>Amount and Rounds:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="363"/>
<source>0 DASH / 0 Rounds</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="370"/>
<source>Submitted Denom:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="377"/>
<source>The denominations you submitted to the Masternode. To mix, other users must submit the exact same denominations.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="380"/>
<source>n/a</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="946"/>
<location filename="../overviewpage.cpp" line="453"/>
<location filename="../overviewpage.cpp" line="467"/>
<location filename="../overviewpage.cpp" line="481"/>
<source>Darksend</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="839"/>
<source>Start/Stop Mixing</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="871"/>
<source>(Last Message)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="896"/>
<source>Try to manually submit a Darksend request.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="899"/>
<source>Try Mix</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="918"/>
<source>Reset the current status of Darksend (can interrupt Darksend if it's in the process of Mixing, which can cost you money!)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="924"/>
<source>Reset</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="1014"/>
<source><b>Recent transactions</b></source>
<translation><b>Son muameleler</b></translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="126"/>
<location filename="../overviewpage.cpp" line="127"/>
<location filename="../overviewpage.cpp" line="128"/>
<source>out of sync</source>
<translation>eşleşme dışı</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="143"/>
<location filename="../overviewpage.cpp" line="350"/>
<source>Disabled</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="146"/>
<location filename="../overviewpage.cpp" line="352"/>
<location filename="../overviewpage.cpp" line="495"/>
<source>Start Darksend Mixing</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="148"/>
<location filename="../overviewpage.cpp" line="497"/>
<source>Stop Darksend Mixing</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="270"/>
<source>No inputs detected</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="278"/>
<source>Found unconfirmed denominated outputs, will wait till they confirm to recalculate.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="335"/>
<source>Rounds</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="366"/>
<source>Enabled</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="424"/>
<source>Last Darksend message:
</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="377"/>
<location filename="../overviewpage.cpp" line="406"/>
<source>Darksend is idle.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="381"/>
<source>Mixing in progress...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="387"/>
<source>Darksend request complete: Your transaction was accepted into the pool!</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="393"/>
<source>Submitted following entries to masternode:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="394"/>
<location filename="../overviewpage.cpp" line="395"/>
<location filename="../overviewpage.cpp" line="396"/>
<source>Submitted to masternode, Waiting for more entries</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="399"/>
<source>Found enough users, signing ...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="400"/>
<source>Found enough users, signing ( waiting. )</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="401"/>
<source>Found enough users, signing ( waiting.. )</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="402"/>
<source>Found enough users, signing ( waiting... )</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="404"/>
<source>Transmitting final transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="408"/>
<source>Finalizing transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="410"/>
<source>Darksend request incomplete:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="410"/>
<source>Will retry...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="412"/>
<source>Darksend request complete:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="414"/>
<source>Submitted to masternode, waiting in queue .</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="415"/>
<source>Submitted to masternode, waiting in queue ..</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="416"/>
<source>Submitted to masternode, waiting in queue ...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="418"/>
<source>Unknown state:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="432"/>
<source>N/A</source>
<translation>Mevcut değil</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="454"/>
<source>Darksend was successfully reset.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="468"/>
<source>Darksend requires at least %1 to use.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="482"/>
<source>Wallet is locked and user declined to unlock. Disabling Darksend.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="293"/>
<location filename="../paymentserver.cpp" line="505"/>
<location filename="../paymentserver.cpp" line="518"/>
<location filename="../paymentserver.cpp" line="613"/>
<location filename="../paymentserver.cpp" line="631"/>
<location filename="../paymentserver.cpp" line="647"/>
<source>Payment request error</source>
<translation>Ödeme talebi hatası</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="294"/>
<source>Cannot start moneta: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../paymentserver.cpp" line="352"/>
<source>Net manager warning</source>
<translation>Şebeke yöneticisi uyarısı</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="353"/>
<source>Your active proxy doesn't support SOCKS5, which is required for payment requests via proxy.</source>
<translation>Faal vekil sunucunuz, vekil vasıtasıyla ödeme talepleri için gereken SOCKS5'i desteklememektedir.</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="404"/>
<location filename="../paymentserver.cpp" line="417"/>
<source>URI handling</source>
<translation>URI yönetimi</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="405"/>
<source>Payment request fetch URL is invalid: %1</source>
<translation>Ödeme talebini alma URL'i geçersiz: %1</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="418"/>
<source>URI can not be parsed! This can be caused by an invalid Moneta address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../paymentserver.cpp" line="432"/>
<source>Payment request file handling</source>
<translation>Ödeme talebi dosyası yönetimi</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="433"/>
<source>Payment request file can not be read or processed! This can be caused by an invalid payment request file.</source>
<translation>Ödeme talebi okunamaz ya da işlenemez! Bunun sebebi geçersiz bir ödeme talebi dosyası olabilir.</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="506"/>
<source>Unverified payment requests to custom payment scripts are unsupported.</source>
<translation>Özel ödeme betiklerine teyit edilmemiş ödeme talepleri desteklenmez.</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="514"/>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation>Talep edilen %1 meblağında ödeme çok düşüktür (toz olarak kabul edilir).</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="565"/>
<source>Refund from %1</source>
<translation>%1 öğesinden iade</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="608"/>
<source>Error communicating with %1: %2</source>
<translation>%1 ile iletişimde hata: %2</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="632"/>
<source>Payment request can not be parsed or processed!</source>
<translation>Ödeme talebi ayrıştırılamaz ya da işlenemez!</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="643"/>
<source>Bad response from server %1</source>
<translation>%1 sunucusundan hatalı cevap</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="665"/>
<source>Network request error</source>
<translation>Şebeke talebi hatası</translation>
</message>
<message>
<location filename="../paymentserver.cpp" line="676"/>
<source>Payment acknowledged</source>
<translation>Ödeme teyit edildi</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<location filename="../moneta.cpp" line="525"/>
<location filename="../moneta.cpp" line="532"/>
<location filename="../moneta.cpp" line="539"/>
<location filename="../moneta.cpp" line="552"/>
<source>Moneta</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../moneta.cpp" line="526"/>
<source>Error: Specified data directory "%1" does not exist.</source>
<translation>Hata: belirtilen "%1" veri klasörü yoktur.</translation>
</message>
<message>
<location filename="../moneta.cpp" line="533"/>
<source>Error: Cannot parse configuration file: %1. Only use key=value syntax.</source>
<translation>Hata: %1 yapılandırma dosyası ayrıştırılamadı. Sadece anahtar=değer dizimini kullanınız.</translation>
</message>
<message>
<location filename="../moneta.cpp" line="540"/>
<source>Error reading masternode configuration file: %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../moneta.cpp" line="552"/>
<source>Error: Invalid combination of -regtest and -testnet.</source>
<translation>Hata: -regtest ve -testnet'in geçersiz kombinasyonu.</translation>
</message>
<message>
<location filename="../moneta.cpp" line="612"/>
<source>Moneta Core didn't yet exit safely...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../guiutil.cpp" line="101"/>
<source>Enter a Moneta address (e.g. XwnLY9Tf7Zsef8gMGL2fhWA9ZmMjt4KPwg)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<location filename="../receiverequestdialog.cpp" line="36"/>
<source>&Save Image...</source>
<translation>Resmi k&aydet...</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="39"/>
<source>&Copy Image</source>
<translation>Resmi &kopyala</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="71"/>
<source>Save QR Code</source>
<translation>QR kodu kaydet</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="71"/>
<source>PNG Image (*.png)</source>
<translation>PNG resim (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="14"/>
<source>Tools window</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="24"/>
<source>&Information</source>
<translation>&Malumat</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="33"/>
<source>Masternode Count</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="46"/>
<source>General</source>
<translation>Genel</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="53"/>
<source>Name</source>
<translation>İsim</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="60"/>
<source>Client name</source>
<translation>İstemci ismi</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="70"/>
<location filename="../forms/rpcconsole.ui" line="139"/>
<location filename="../forms/rpcconsole.ui" line="192"/>
<location filename="../forms/rpcconsole.ui" line="215"/>
<location filename="../forms/rpcconsole.ui" line="231"/>
<location filename="../forms/rpcconsole.ui" line="247"/>
<location filename="../forms/rpcconsole.ui" line="276"/>
<location filename="../forms/rpcconsole.ui" line="292"/>
<location filename="../forms/rpcconsole.ui" line="315"/>
<location filename="../forms/rpcconsole.ui" line="341"/>
<source>N/A</source>
<translation>Mevcut değil</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="83"/>
<source>Number of connections</source>
<translation>Bağlantı sayısı</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="90"/>
<source>Open the Moneta debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="93"/>
<source>&Open</source>
<translation>&Aç</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="116"/>
<source>Startup time</source>
<translation>Başlama zamanı</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="129"/>
<source>Network</source>
<translation>Şebeke</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="152"/>
<source>Last block time</source>
<translation>Son blok zamanı</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="165"/>
<source>Debug log file</source>
<translation>Hata ayıklama kütük dosyası</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="172"/>
<source>Using OpenSSL version</source>
<translation>Kullanılan OpenSSL sürümü</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="182"/>
<source>Build date</source>
<translation>Derleme tarihi</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="205"/>
<source>Current number of blocks</source>
<translation>Güncel blok sayısı</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="305"/>
<source>Client version</source>
<translation>İstemci sürümü</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="334"/>
<source>Block chain</source>
<translation>Blok zinciri</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="349"/>
<source>&Console</source>
<translation>&Konsol</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="398"/>
<source>Clear console</source>
<translation>Konsolu temizle</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="421"/>
<source>&Network Traffic</source>
<translation>&Şebeke trafiği</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="473"/>
<source>&Clear</source>
<translation>&Temizle</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="486"/>
<source>Totals</source>
<translation>Toplamlar</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="550"/>
<location filename="../rpcconsole.cpp" line="367"/>
<source>In:</source>
<translation>İçeri:</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="630"/>
<location filename="../rpcconsole.cpp" line="368"/>
<source>Out:</source>
<translation>Dışarı:</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="333"/>
<source>Welcome to the Moneta RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="334"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Tarihçede gezinmek için imleç tuşlarını kullanınız, <b>Ctrl-L</b> ile de ekranı temizleyebilirsiniz.</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="335"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Mevcut komutların listesi için <b>help</b> yazınız.</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="474"/>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="476"/>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="478"/>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="480"/>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="487"/>
<source>%1 m</source>
<translation>%1 d</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="492"/>
<source>%1 h</source>
<translation>%1 s</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="494"/>
<source>%1 h %2 m</source>
<translation>%1 s %2 d</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="34"/>
<source>Reuse one of the previously used receiving addresses. Reusing addresses has security and privacy issues. Do not use this unless re-generating a payment request made before.</source>
<translation>Daha önce kullanılmış bir alım adresini kullan. Adresleri tekrar kullanmak güvenlik ve gizlilik sorunları doğurur. Bunu, daha önce yaptığınız bir talebi tekrar oluşturmak durumu dışında kullanmayınız.</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="37"/>
<source>R&euse an existing receiving address (not recommended)</source>
<translation>&Hâlihazırda bulunan bir alım adresini kullan (önerilmez)</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="51"/>
<location filename="../forms/receivecoinsdialog.ui" line="74"/>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the Moneta network.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="54"/>
<source>&Message:</source>
<translation>Me&saj:</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="67"/>
<location filename="../forms/receivecoinsdialog.ui" line="88"/>
<source>An optional label to associate with the new receiving address.</source>
<translation>Yeni alım adresi ile ilişkili, seçiminize dayalı etiket.</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="81"/>
<source>Use this form to request payments. All fields are <b>optional</b>.</source>
<translation>Ödeme talep etmek için bu formu kullanın. Tüm alanlar <b>seçime dayalıdır</b>.</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="91"/>
<source>&Label:</source>
<translation>&Etiket:</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="104"/>
<location filename="../forms/receivecoinsdialog.ui" line="126"/>
<source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source>
<translation>Seçiminize dayalı talep edilecek meblağ. Belli bir meblağ talep etmemek için bunu boş bırakın veya sıfır değerini kullanın.</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="107"/>
<source>&Amount:</source>
<translation>&Meblağ:</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="141"/>
<source>&Request payment</source>
<translation>Ödeme &talep et</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="158"/>
<source>Clear all fields of the form.</source>
<translation>Formdaki tüm alanları temizle.</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="161"/>
<source>Clear</source>
<translation>Temizle</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="239"/>
<source>Requested payments history</source>
<translation>Talep edilen ödemelerin tarihçesi</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="261"/>
<source>Show the selected request (does the same as double clicking an entry)</source>
<translation>Seçilen talebi göster (bir unsura çift tıklamakla aynı anlama gelir)</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="264"/>
<source>Show</source>
<translation>Göster</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="278"/>
<source>Remove the selected entries from the list</source>
<translation>Seçilen unsurları listeden kaldır</translation>
</message>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="281"/>
<source>Remove</source>
<translation>Kaldır</translation>
</message>
<message>
<location filename="../receivecoinsdialog.cpp" line="39"/>
<source>Copy label</source>
<translation>Etiketi kopyala</translation>
</message>
<message>
<location filename="../receivecoinsdialog.cpp" line="40"/>
<source>Copy message</source>
<translation>Mesajı kopyala</translation>
</message>
<message>
<location filename="../receivecoinsdialog.cpp" line="41"/>
<source>Copy amount</source>
<translation>Meblağı kopyala</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<location filename="../forms/receiverequestdialog.ui" line="29"/>
<source>QR Code</source>
<translation>QR Kodu</translation>
</message>
<message>
<location filename="../forms/receiverequestdialog.ui" line="75"/>
<source>Copy &URI</source>
<translation>&URI'yi kopyala</translation>
</message>
<message>
<location filename="../forms/receiverequestdialog.ui" line="82"/>
<source>Copy &Address</source>
<translation>&Adresi kopyala</translation>
</message>
<message>
<location filename="../forms/receiverequestdialog.ui" line="89"/>
<source>&Save Image...</source>
<translation>Resmi ka&ydet...</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="136"/>
<source>Request payment to %1</source>
<translation>%1 unsuruna ödeme talep et</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="142"/>
<source>Payment information</source>
<translation>Ödeme bilgisi</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="143"/>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="145"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="147"/>
<source>Amount</source>
<translation>Meblağ</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="149"/>
<source>Label</source>
<translation>Etiket</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="151"/>
<source>Message</source>
<translation>Mesaj</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="161"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Sonuç URI çok uzun, etiket ya da mesaj metnini kısaltmayı deneyiniz.</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="166"/>
<source>Error encoding URI into QR Code.</source>
<translation>URI'nin QR koduna kodlanmasında hata oluştu.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<location filename="../recentrequeststablemodel.cpp" line="24"/>
<source>Date</source>
<translation>Tarih</translation>
</message>
<message>
<location filename="../recentrequeststablemodel.cpp" line="24"/>
<source>Label</source>
<translation>Etiket</translation>
</message>
<message>
<location filename="../recentrequeststablemodel.cpp" line="24"/>
<source>Message</source>
<translation>Mesaj</translation>
</message>
<message>
<location filename="../recentrequeststablemodel.cpp" line="24"/>
<source>Amount</source>
<translation>Meblağ</translation>
</message>
<message>
<location filename="../recentrequeststablemodel.cpp" line="62"/>
<source>(no label)</source>
<translation>(boş etiket)</translation>
</message>
<message>
<location filename="../recentrequeststablemodel.cpp" line="71"/>
<source>(no message)</source>
<translation>(boş mesaj)</translation>
</message>
<message>
<location filename="../recentrequeststablemodel.cpp" line="79"/>
<source>(no amount)</source>
<translation>(boş meblağ)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="14"/>
<location filename="../sendcoinsdialog.cpp" line="429"/>
<location filename="../sendcoinsdialog.cpp" line="521"/>
<location filename="../sendcoinsdialog.cpp" line="531"/>
<source>Send Coins</source>
<translation>Bitcoin yolla</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="90"/>
<source>Coin Control Features</source>
<translation>Para kontrolü özellikleri</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="110"/>
<source>Inputs...</source>
<translation>Girdiler...</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="117"/>
<source>automatically selected</source>
<translation>otomatik seçilmiş</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="136"/>
<source>Insufficient funds!</source>
<translation>Yetersiz fon!</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="225"/>
<source>Quantity:</source>
<translation>Miktar:</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="260"/>
<source>Bytes:</source>
<translation>Bayt:</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="308"/>
<source>Amount:</source>
<translation>Meblağ:</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="340"/>
<source>Priority:</source>
<translation>Öncelik:</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="353"/>
<source>medium</source>
<translation>orta</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="388"/>
<source>Fee:</source>
<translation>Ücret:</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="420"/>
<source>Low Output:</source>
<translation>Düşük çıktı:</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="433"/>
<source>no</source>
<translation>hayır</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="468"/>
<source>After Fee:</source>
<translation>Ücretten sonra:</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="500"/>
<source>Change:</source>
<translation>Para üstü:</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="544"/>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation>Bu etkinleştirildiyse fakat para üstü adresi boş ya da geçersizse para üstü yeni oluşturulan bir adrese gönderilecektir.</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="547"/>
<source>Custom change address</source>
<translation>Özel para üstü adresi</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="668"/>
<source>Confirm the send action</source>
<translation>Yollama etkinliğini teyit ediniz</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="671"/>
<source>S&end</source>
<translation>G&önder</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="691"/>
<source>Clear all fields of the form.</source>
<translation>Formdaki tüm alanları temizle.</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="694"/>
<source>Clear &All</source>
<translation>Tümünü &temizle</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="711"/>
<source>Send to multiple recipients at once</source>
<translation>Birçok alıcıya aynı anda gönder</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="714"/>
<source>Add &Recipient</source>
<translation>&Alıcı ekle</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="752"/>
<source>Darksend</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="771"/>
<source>InstantX</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="778"/>
<source>Balance:</source>
<translation>Bakiye:</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="53"/>
<source>Copy quantity</source>
<translation>Miktarı kopyala</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="54"/>
<source>Copy amount</source>
<translation>Meblağı kopyala</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="55"/>
<source>Copy fee</source>
<translation>Ücreti kopyala</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="56"/>
<source>Copy after fee</source>
<translation>Ücretten sonrakini kopyala</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="57"/>
<source>Copy bytes</source>
<translation>Baytları kopyala</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="58"/>
<source>Copy priority</source>
<translation>Önceliği kopyala</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="59"/>
<source>Copy low output</source>
<translation>Düşük çıktıyı kopyala</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="60"/>
<source>Copy change</source>
<translation>Para üstünü kopyala</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="145"/>
<location filename="../sendcoinsdialog.cpp" line="151"/>
<location filename="../sendcoinsdialog.cpp" line="160"/>
<source>using</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="145"/>
<location filename="../sendcoinsdialog.cpp" line="151"/>
<source>anonymous funds</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="155"/>
<source>(darksend requires this amount to be rounded up to the nearest %1).</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="160"/>
<source>any available funds (not recommended)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="166"/>
<source>and InstantX</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="190"/>
<location filename="../sendcoinsdialog.cpp" line="195"/>
<location filename="../sendcoinsdialog.cpp" line="200"/>
<location filename="../sendcoinsdialog.cpp" line="204"/>
<source>%1 to %2</source>
<translation>%1 öğesinden %2 unsuruna</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="253"/>
<source>Are you sure you want to send?</source>
<translation>Göndermek istediğinizden emin misiniz?</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="262"/>
<source>are added as transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="276"/>
<source>Total Amount %1 (= %2)</source>
<translation>Toplam meblağ %1 (= %2)</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="278"/>
<source>or</source>
<translation>veya</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="280"/>
<source>Confirm send coins</source>
<translation>Gönderiyi teyit ediniz</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="435"/>
<source>Payment request expired</source>
<translation>Ödeme talebinin ömrü doldu</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="443"/>
<source>Invalid payment address %1</source>
<translation>Geçersiz ödeme adresi %1</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="498"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>Alıcı adresi geçerli değildir, lütfen denetleyiniz.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="501"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Ödeyeceğiniz tutarın sıfırdan yüksek olması gerekir.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="504"/>
<source>The amount exceeds your balance.</source>
<translation>Tutar bakiyenizden yüksektir.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="507"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Toplam, %1 muamele ücreti ilâve edildiğinde bakiyenizi geçmektedir.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="510"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Çift adres bulundu, belli bir gönderi sırasında her adrese sadece tek bir gönderide bulunulabilir.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="513"/>
<source>Transaction creation failed!</source>
<translation>Muamelenin oluşturulması başarısız oldu!</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="517"/>
<source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Muamele reddedildi! Cüzdanınızdaki madenî paraların bazıları zaten harcanmış olduğunda bu meydana gelebilir. Örneğin wallet.dat dosyasının bir kopyasını kullandıysanız ve kopyada para harcandığında ancak burada harcandığı işaretlenmediğinde.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="522"/>
<source>Error: The wallet was unlocked only to anonymize coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="635"/>
<source>Warning: Invalid Moneta address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="644"/>
<source>Warning: Unknown change address</source>
<translation>Uyarı: geçersiz para üstü adresi</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="655"/>
<source>(no label)</source>
<translation>(boş etiket)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="21"/>
<source>This is a normal payment.</source>
<translation>Bu, normal bir ödemedir.</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="36"/>
<source>Pay &To:</source>
<translation>&Şu adrese öde:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="54"/>
<source>The address to send the payment to (e.g. XwnLY9Tf7Zsef8gMGL2fhWA9ZmMjt4KPwg)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="61"/>
<source>Choose previously used address</source>
<translation>Önceden kullanılmış adres seç</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="71"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="78"/>
<source>Paste address from clipboard</source>
<translation>Panodan adres yapıştır</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="88"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="95"/>
<location filename="../forms/sendcoinsentry.ui" line="619"/>
<location filename="../forms/sendcoinsentry.ui" line="1155"/>
<source>Remove this entry</source>
<translation>Bu unsuru kaldır</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="111"/>
<source>&Label:</source>
<translation>&Etiket:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="124"/>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Kullanılmış adres listesine eklemek için bu adrese bir etiket girin</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="131"/>
<location filename="../forms/sendcoinsentry.ui" line="652"/>
<location filename="../forms/sendcoinsentry.ui" line="1188"/>
<source>A&mount:</source>
<translation>Mebla&ğ:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="147"/>
<source>Message:</source>
<translation>Mesaj:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="157"/>
<source>A message that was attached to the moneta: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the Moneta network.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="583"/>
<source>This is an unverified payment request.</source>
<translation>Bu, teyit edilmemiş bir ödeme talebidir.</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="601"/>
<location filename="../forms/sendcoinsentry.ui" line="1133"/>
<source>Pay To:</source>
<translation>Şu adrese öde:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="635"/>
<location filename="../forms/sendcoinsentry.ui" line="1171"/>
<source>Memo:</source>
<translation>Not:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="1115"/>
<source>This is a verified payment request.</source>
<translation>Bu, teyit edilmiş bir ödeme talebidir.</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="31"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Adres defterinize eklemek için bu adrese ilişik bir etiket giriniz</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<location filename="../utilitydialog.cpp" line="129"/>
<source>Moneta Core is shutting down...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../utilitydialog.cpp" line="130"/>
<source>Do not shut down the computer until this window disappears.</source>
<translation>Bu pencere kalkıncaya dek bilgisayarı kapatmayınız.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>İmzalar - Mesaj İmzala / Kontrol et</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="24"/>
<source>&Sign Message</source>
<translation>Mesaj &imzala</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="30"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Bir adresin sizin olduğunu ispatlamak için adresinizle mesaj imzalayabilirsiniz. Oltalama saldırılarının kimliğinizi imzanızla elde etmeyi deneyebilecekleri için belirsiz hiçbir şey imzalamamaya dikkat ediniz. Sadece ayrıntılı açıklaması olan ve tümüne katıldığınız ifadeleri imzalayınız.</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="48"/>
<source>The address to sign the message with (e.g. XwnLY9Tf7Zsef8gMGL2fhWA9ZmMjt4KPwg)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="55"/>
<location filename="../forms/signverifymessagedialog.ui" line="265"/>
<source>Choose previously used address</source>
<translation>Önceden kullanılmış adres seç</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="65"/>
<location filename="../forms/signverifymessagedialog.ui" line="275"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="75"/>
<source>Paste address from clipboard</source>
<translation>Panodan adres yapıştır</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="85"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="97"/>
<source>Enter the message you want to sign here</source>
<translation>İmzalamak istediğiniz mesajı burada giriniz</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="104"/>
<source>Signature</source>
<translation>İmza</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="131"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Güncel imzayı sistem panosuna kopyala</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="152"/>
<source>Sign the message to prove you own this Moneta address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="155"/>
<source>Sign &Message</source>
<translation>&Mesajı imzala</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="169"/>
<source>Reset all sign message fields</source>
<translation>Tüm mesaj alanlarını sıfırla</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="172"/>
<location filename="../forms/signverifymessagedialog.ui" line="315"/>
<source>Clear &All</source>
<translation>Tümünü &temizle</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="231"/>
<source>&Verify Message</source>
<translation>Mesaj &kontrol et</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="237"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>İmza için kullanılan adresi, mesajı (satır sonları, boşluklar, sekmeler vs. karakterleri tam olarak kopyaladığınızdan emin olunuz) ve imzayı aşağıda giriniz. Bir ortadaki adam saldırısı tarafından kandırılmaya mâni olmak için imzadan, imzalı mesajın içeriğini aşan bir anlam çıkarmamaya dikkat ediniz.</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="258"/>
<source>The address the message was signed with (e.g. XwnLY9Tf7Zsef8gMGL2fhWA9ZmMjt4KPwg)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="295"/>
<source>Verify the message to ensure it was signed with the specified Moneta address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="298"/>
<source>Verify &Message</source>
<translation>&Mesaj kontrol et</translation>
</message>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="312"/>
<source>Reset all verify message fields</source>
<translation>Tüm mesaj kontrolü alanlarını sıfırla</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="30"/>
<source>Click "Sign Message" to generate signature</source>
<translation>İmzayı oluşturmak için "Mesaj İmzala" unsurunu tıklayın</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="31"/>
<source>Enter a Moneta address (e.g. XwnLY9Tf7Zsef8gMGL2fhWA9ZmMjt4KPwg)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="114"/>
<location filename="../signverifymessagedialog.cpp" line="194"/>
<source>The entered address is invalid.</source>
<translation>Girilen adres geçersizdir.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="114"/>
<location filename="../signverifymessagedialog.cpp" line="122"/>
<location filename="../signverifymessagedialog.cpp" line="194"/>
<location filename="../signverifymessagedialog.cpp" line="202"/>
<source>Please check the address and try again.</source>
<translation>Adresi kontrol edip tekrar deneyiniz.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="122"/>
<location filename="../signverifymessagedialog.cpp" line="202"/>
<source>The entered address does not refer to a key.</source>
<translation>Girilen adres herhangi bir anahtara işaret etmemektedir.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="130"/>
<source>Wallet unlock was cancelled.</source>
<translation>Cüzdan kilidinin açılması iptal edildi.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="138"/>
<source>Private key for the entered address is not available.</source>
<translation>Girilen adres için özel anahtar mevcut değildir.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="150"/>
<source>Message signing failed.</source>
<translation>Mesajın imzalanması başarısız oldu.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="155"/>
<source>Message signed.</source>
<translation>Mesaj imzalandı.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="213"/>
<source>The signature could not be decoded.</source>
<translation>İmzanın kodu çözülemedi.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="213"/>
<location filename="../signverifymessagedialog.cpp" line="226"/>
<source>Please check the signature and try again.</source>
<translation>İmzayı kontrol edip tekrar deneyiniz.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="226"/>
<source>The signature did not match the message digest.</source>
<translation>İmza mesajın hash değeri ile eşleşmedi.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="233"/>
<source>Message verification failed.</source>
<translation>Mesaj doğrulaması başarısız oldu.</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="238"/>
<source>Message verified.</source>
<translation>Mesaj doğrulandı.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="33"/>
<source>Moneta Core</source>
<translation>Moneta Çekirdeği</translation>
</message>
<message>
<location filename="../splashscreen.cpp" line="34"/>
<source>Version %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../splashscreen.cpp" line="35"/>
<source>The Bitcoin Core developers</source>
<translation>Bitcoin Çekirdeği geliştiricileri</translation>
</message>
<message>
<location filename="../splashscreen.cpp" line="36"/>
<source>The Moneta Core developers</source>
<translation>Moneta Çekirdek Geliştiricileri</translation>
</message>
<message>
<location filename="../splashscreen.cpp" line="37"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<location filename="../trafficgraphwidget.cpp" line="79"/>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message numerus="yes">
<location filename="../transactiondesc.cpp" line="28"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="30"/>
<source>Open until %1</source>
<translation>%1 değerine dek açık</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="41"/>
<location filename="../transactiondesc.cpp" line="52"/>
<location filename="../transactiondesc.cpp" line="62"/>
<location filename="../transactiondesc.cpp" line="74"/>
<source>conflicted</source>
<translation>çakışma</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="43"/>
<source>%1/offline (verified via instantx)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiondesc.cpp" line="45"/>
<source>%1/confirmed (verified via instantx)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiondesc.cpp" line="47"/>
<source>%1 confirmations (verified via instantx)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiondesc.cpp" line="76"/>
<source>%1/offline</source>
<translation>%1/çevrim dışı</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="78"/>
<source>%1/unconfirmed</source>
<translation>%1/doğrulanmadı</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="68"/>
<location filename="../transactiondesc.cpp" line="80"/>
<source>%1 confirmations</source>
<translation>%1 teyit</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="54"/>
<source>%1/offline (InstantX verification in progress - %2 of %3 signatures)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiondesc.cpp" line="56"/>
<source>%1/confirmed (InstantX verification in progress - %2 of %3 signatures )</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiondesc.cpp" line="58"/>
<source>%1 confirmations (InstantX verification in progress - %2 of %3 signatures)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiondesc.cpp" line="64"/>
<source>%1/offline (InstantX verification failed)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiondesc.cpp" line="66"/>
<source>%1/confirmed (InstantX verification failed)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiondesc.cpp" line="98"/>
<source>Status</source>
<translation>Durum</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="103"/>
<source>, has not been successfully broadcast yet</source>
<translation>, henüz başarılı bir şekilde yayınlanmadı</translation>
</message>
<message numerus="yes">
<location filename="../transactiondesc.cpp" line="105"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="109"/>
<source>Date</source>
<translation>Tarih</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="116"/>
<source>Source</source>
<translation>Kaynak</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="116"/>
<source>Generated</source>
<translation>Oluşturuldu</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="121"/>
<location filename="../transactiondesc.cpp" line="138"/>
<source>From</source>
<translation>Gönderen</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="138"/>
<source>unknown</source>
<translation>bilinmiyor</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="139"/>
<location filename="../transactiondesc.cpp" line="161"/>
<location filename="../transactiondesc.cpp" line="219"/>
<source>To</source>
<translation>Alıcı</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="142"/>
<location filename="../transactiondesc.cpp" line="144"/>
<source>own address</source>
<translation>kendi adresiniz</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="142"/>
<source>label</source>
<translation>etiket</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="179"/>
<location filename="../transactiondesc.cpp" line="191"/>
<location filename="../transactiondesc.cpp" line="236"/>
<location filename="../transactiondesc.cpp" line="253"/>
<location filename="../transactiondesc.cpp" line="306"/>
<source>Credit</source>
<translation>Gider</translation>
</message>
<message numerus="yes">
<location filename="../transactiondesc.cpp" line="181"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="183"/>
<source>not accepted</source>
<translation>kabul edilmedi</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="227"/>
<location filename="../transactiondesc.cpp" line="235"/>
<location filename="../transactiondesc.cpp" line="250"/>
<location filename="../transactiondesc.cpp" line="303"/>
<source>Debit</source>
<translation>Gelir</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="241"/>
<source>Transaction fee</source>
<translation>Muamele ücreti</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="257"/>
<source>Net amount</source>
<translation>Net meblağ</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="263"/>
<location filename="../transactiondesc.cpp" line="272"/>
<source>Message</source>
<translation>Mesaj</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="265"/>
<source>Comment</source>
<translation>Yorum</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="267"/>
<source>Transaction ID</source>
<translation>Muamele tanımlayıcı</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="285"/>
<source>Merchant</source>
<translation>Tüccar</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="292"/>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Oluşturulan bitcoin'lerin harcanabilmelerinden önce %1 blok beklemeleri gerekmektedir. Bu blok, oluşturduğunuzda, blok zincirine eklenmesi için ağda yayınlandı. Zincire eklenmesi başarısız olursa, durumu "kabul edilmedi" olarak değiştirilecek ve harcanamayacaktır. Bu, bazen başka bir düğüm sizden birkaç saniye önce ya da sonra blok oluşturursa meydana gelebilir.</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="300"/>
<source>Debug information</source>
<translation>Hata ayıklama verileri</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="308"/>
<source>Transaction</source>
<translation>Muamele</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="311"/>
<source>Inputs</source>
<translation>Girdiler</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="332"/>
<source>Amount</source>
<translation>Meblağ</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="333"/>
<source>true</source>
<translation>doğru</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="333"/>
<source>false</source>
<translation>yanlış</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="14"/>
<source>Transaction details</source>
<translation>Muamele detayları</translation>
</message>
<message>
<location filename="../forms/transactiondescdialog.ui" line="20"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Bu pano muamelenin ayrıntılı açıklamasını gösterir</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="233"/>
<source>Date</source>
<translation>Tarih</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="233"/>
<source>Type</source>
<translation>Tür</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="233"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="233"/>
<source>Amount</source>
<translation>Meblağ</translation>
</message>
<message numerus="yes">
<location filename="../transactiontablemodel.cpp" line="285"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="288"/>
<source>Open until %1</source>
<translation>%1 değerine dek açık</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="291"/>
<source>Offline</source>
<translation>Çevrim dışı</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="294"/>
<source>Unconfirmed</source>
<translation>Teyit edilmemiş</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="297"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation>Teyit ediliyor (tavsiye edilen %2 teyit üzerinden %1 doğrulama)</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="300"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Doğrulandı (%1 teyit)</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="303"/>
<source>Conflicted</source>
<translation>Çakışma</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="306"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation>Olgunlaşmamış (%1 teyit, %2 teyit ardından kullanılabilir olacaktır)</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="309"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Bu blok başka hiçbir düğüm tarafından alınmamıştır ve muhtemelen kabul edilmeyecektir!</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="312"/>
<source>Generated but not accepted</source>
<translation>Oluşturuldu ama kabul edilmedi</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="354"/>
<source>Received with</source>
<translation>Şununla alındı</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="356"/>
<source>Received from</source>
<translation>Alındığı kişi</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="358"/>
<source>Received via Darksend</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="361"/>
<source>Sent to</source>
<translation>Gönderildiği adres</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="363"/>
<source>Payment to yourself</source>
<translation>Kendinize ödeme</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="365"/>
<source>Mined</source>
<translation>Madenden çıkarılan</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="368"/>
<source>Darksend Denominate</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="370"/>
<source>Darksend Collateral Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="372"/>
<source>Darksend Make Collateral Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="374"/>
<source>Darksend Create Denominations</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="376"/>
<source>Darksent</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="418"/>
<source>(n/a)</source>
<translation>(mevcut değil)</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="610"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Muamele durumu. Doğrulama sayısını görüntülemek için imleci bu alanda tutunuz.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="612"/>
<source>Date and time that the transaction was received.</source>
<translation>Muamelenin alındığı tarih ve zaman.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="614"/>
<source>Type of transaction.</source>
<translation>Muamele türü.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="616"/>
<source>Destination address of transaction.</source>
<translation>Muamelenin alıcı adresi.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="618"/>
<source>Amount removed from or added to balance.</source>
<translation>Bakiyeden alınan ya da bakiyeye eklenen meblağ.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="60"/>
<location filename="../transactionview.cpp" line="76"/>
<source>All</source>
<translation>Hepsi</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="61"/>
<source>Today</source>
<translation>Bugün</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="62"/>
<source>This week</source>
<translation>Bu hafta</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="63"/>
<source>This month</source>
<translation>Bu ay</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="64"/>
<source>Last month</source>
<translation>Geçen ay</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="65"/>
<source>This year</source>
<translation>Bu sene</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="66"/>
<source>Range...</source>
<translation>Aralık...</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="77"/>
<source>Received with</source>
<translation>Şununla alınan</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="79"/>
<source>Sent to</source>
<translation>Gönderildiği adres</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="81"/>
<source>Darksent</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactionview.cpp" line="82"/>
<source>Darksend Make Collateral Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactionview.cpp" line="83"/>
<source>Darksend Create Denominations</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactionview.cpp" line="84"/>
<source>Darksend Denominate</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactionview.cpp" line="85"/>
<source>Darksend Collateral Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactionview.cpp" line="86"/>
<source>To yourself</source>
<translation>Kendinize</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="87"/>
<source>Mined</source>
<translation>Oluşturulan</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="88"/>
<source>Other</source>
<translation>Diğer</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="94"/>
<source>Enter address or label to search</source>
<translation>Aranacak adres ya da etiket giriniz</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="100"/>
<source>Min amount</source>
<translation>Asgari meblağ</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="134"/>
<source>Copy address</source>
<translation>Adresi kopyala</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="135"/>
<source>Copy label</source>
<translation>Etiketi kopyala</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="136"/>
<source>Copy amount</source>
<translation>Meblağı kopyala</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="137"/>
<source>Copy transaction ID</source>
<translation>Muamele kimliğini kopyala</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="138"/>
<source>Edit label</source>
<translation>Etiketi düzenle</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="139"/>
<source>Show transaction details</source>
<translation>Muamele detaylarını göster</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="302"/>
<source>Export Transaction History</source>
<translation>Muamele tarihçesini dışa aktar</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="303"/>
<source>Comma separated file (*.csv)</source>
<translation>Virgülle ayrılmış değerler dosyası (*.csv)</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="312"/>
<source>Confirmed</source>
<translation>Doğrulandı</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="313"/>
<source>Date</source>
<translation>Tarih</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="314"/>
<source>Type</source>
<translation>Tür</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="315"/>
<source>Label</source>
<translation>Etiket</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="316"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="317"/>
<source>Amount</source>
<translation>Meblağ</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="318"/>
<source>ID</source>
<translation>Tanımlayıcı</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="321"/>
<source>Exporting Failed</source>
<translation>Dışa aktarım başarısız oldu</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="321"/>
<source>There was an error trying to save the transaction history to %1.</source>
<translation>Muamele tarihçesinin %1 konumuna kaydedilmesi sırasında bir hata meydana geldi.</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="325"/>
<source>Exporting Successful</source>
<translation>Dışa aktarım başarılı oldu</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="325"/>
<source>The transaction history was successfully saved to %1.</source>
<translation>Muamele tarihçesi başarılı bir şekilde %1 konumuna kaydedildi.</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="434"/>
<source>Range:</source>
<translation>Aralık:</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="442"/>
<source>to</source>
<translation>ilâ</translation>
</message>
</context>
<context>
<name>WalletFrame</name>
<message>
<location filename="../walletframe.cpp" line="26"/>
<source>No wallet has been loaded.</source>
<translation>Hiçbir cüzdan yüklenmemiştir.</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="271"/>
<location filename="../walletmodel.cpp" line="285"/>
<source>Send Coins</source>
<translation>Bitcoin yolla</translation>
</message>
<message numerus="yes">
<location filename="../walletmodel.cpp" line="271"/>
<source>InstantX doesn't support sending values that high yet. Transactions are currently limited to %n DASH.</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="44"/>
<source>&Export</source>
<translation>&Dışa aktar</translation>
</message>
<message>
<location filename="../walletview.cpp" line="45"/>
<source>Export the data in the current tab to a file</source>
<translation>Güncel sekmedeki verileri bir dosyaya aktar</translation>
</message>
<message>
<location filename="../walletview.cpp" line="231"/>
<source>Backup Wallet</source>
<translation>Cüzdanı Yedekle</translation>
</message>
<message>
<location filename="../walletview.cpp" line="232"/>
<source>Wallet Data (*.dat)</source>
<translation>Cüzdan verileri (*.dat)</translation>
</message>
<message>
<location filename="../walletview.cpp" line="238"/>
<source>Backup Failed</source>
<translation>Yedekleme başarısız oldu</translation>
</message>
<message>
<location filename="../walletview.cpp" line="238"/>
<source>There was an error trying to save the wallet data to %1.</source>
<translation>Cüzdan verilerinin %1 konumuna kaydedilmesi sırasında bir hata meydana geldi.</translation>
</message>
<message>
<location filename="../walletview.cpp" line="242"/>
<source>Backup Successful</source>
<translation>Yedekleme başarılı</translation>
</message>
<message>
<location filename="../walletview.cpp" line="242"/>
<source>The wallet data was successfully saved to %1.</source>
<translation>Cüzdan verileri %1 konumuna başarıyla kaydedildi.</translation>
</message>
</context>
<context>
<name>moneta-core</name>
<message>
<location filename="../monetastrings.cpp" line="16"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=monetarpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Moneta Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="29"/>
<source>Acceptable ciphers (default: TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH)</source>
<translation>Kabul edilebilir şifreler (varsayılan: TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="32"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>IPv4 üzerinde dinlemek için %u numaralı RPC portunun kurulumu sırasında hata meydana geldi: %s</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="34"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>IPv6 üzerinde dinlemek için %u numaralı RPC portu kurulurken bir hata meydana geldi, IPv4'e dönülüyor: %s</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="37"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Belirtilen adrese bağlan ve daima ondan dinle. IPv6 için [makine]:port yazımını kullanınız</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="40"/>
<source>Cannot obtain a lock on data directory %s. Moneta Core is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="43"/>
<source>Continuously rate-limit free transactions to <n>*1000 bytes per minute (default:15)</source>
<translation>Devamlı olarak ücretsiz muameleleri dakikada <n>*1000 bayt olarak sınırla (varsayılan: 15)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="46"/>
<source>Darksend uses exact denominated amounts to send funds, you might simply need to anonymize some more coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="49"/>
<source>Disable all Masternode and Darksend related functionality (0-1, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="51"/>
<source>Enable instantx, show confirmations for locked transactions (bool, default: true)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="54"/>
<source>Enable use of automated darksend for funds stored in this wallet (0-1, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="57"/>
<source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly. This is intended for regression testing tools and app development.</source>
<translation>Anında çözümlenebilen bloklar içeren ve özel zincir kullanan regresyon test kipine gir. Bu, uygulama geliştirme ve regresyon testi araçları için tasarlanmıştır.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="61"/>
<source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source>
<translation>Blokların anında çözülebileceği özel bir zincir kullanan regresyon deneme kipine gir.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="64"/>
<source>Error: Listening for incoming connections failed (listen returned error %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="66"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Hata: Muamele reddedildi! Cüzdanınızdaki madenî paraların bazıları zaten harcanmış olduğunda bu meydana gelebilir. Örneğin wallet.dat dosyasının bir kopyasını kullandıysanız ve kopyada para harcandığında ancak burada harcandığı işaretlenmediğinde.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="70"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>Hata: Muamelenin meblağı, karmaşıklığı ya da yakın geçmişte alınan fonların kullanılması nedeniyle bu muamele en az %s tutarında ücret gerektirmektedir!</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="73"/>
<source>Error: Wallet unlocked for anonymization only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="75"/>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation>İlgili bir uyarı alındığında ya da gerçekten uzun bir çatallama gördüğümüzde komutu çalıştır (komuttaki %s mesaj ile değiştirilir)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="78"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Bir cüzdan muamelesi değiştiğinde komutu çalıştır (komuttaki %s TxID ile değiştirilecektir)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="81"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>En iyi blok değiştiğinde komutu çalıştır (komut için %s parametresi blok hash değeri ile değiştirilecektir)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="84"/>
<source>Fees smaller than this are considered zero fee (for transaction creation) (default:</source>
<translation>Bundan düşük ücretler sıfır değerinde sayılır (muamele oluşturulması için) (varsayılan:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="87"/>
<source>Flush database activity from memory pool to disk log every <n> megabytes (default: 100)</source>
<translation>Veritabanı etkinliğini bellekten disk kütüğüne her <n> megabaytta aktar (varsayılan: 100)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="90"/>
<source>Found unconfirmed denominated outputs, will wait till they confirm to continue.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="93"/>
<source>How thorough the block verification of -checkblocks is (0-4, default: 3)</source>
<translation>-checkblocks'un blok kontrolünün ne kadar kapsamlı olacağı (0 ilâ 4, varsayılan: 3)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="95"/>
<source>In this mode -genproclimit controls how many blocks are generated immediately.</source>
<translation>Bu kipte -genproclimit kaç sayıda bloğun anında oluşturulduğunu kontrol eder.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="98"/>
<source>InstantX requires inputs with at least 6 confirmations, you might need to wait a few minutes and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="101"/>
<source>Listen for JSON-RPC connections on <port> (default: 9998 or testnet: 19998)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="103"/>
<source>Name to construct url for KeePass entry that stores the wallet passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="105"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Aksaklık gösteren eşlerle yeni bağlantıları engelleme süresi, saniye olarak (varsayılan: 86400)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="108"/>
<source>Output debugging information (default: 0, supplying <category> is optional)</source>
<translation>Hata ayıklama bilgisi dök (varsayılan:0, <kategori> sağlanması seçime dayalıdır)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="110"/>
<source>Provide liquidity to Darksend by infrequently mixing coins on a continual basis (0-100, default: 0, 1=very frequent, high fees, 100=very infrequent, low fees)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="114"/>
<source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="117"/>
<source>Set external address:port to get to this masternode (example: address:port)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="119"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source>
<translation>Yüksek öncelikli/düşük ücretli muamelelerin azami boyutunu bayt olarak ayarla (varsayılan: %d)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="121"/>
<source>Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</source>
<translation>Betik kontrolü iş parçacıklarının sayısını belirler (%u ilâ %d, 0 = otomatik, <0 = bu sayıda çekirdeği kullanma, varsayılan: %d)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="124"/>
<source>Set the processor limit for when generation is on (-1 = unlimited, default: -1)</source>
<translation>Oluşturma etkinken işlemci sınırını belirler (-1 = sınırsız, varsayılan: -1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="127"/>
<source>Show N confirmations for a successfully locked transaction (0-9999, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="130"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Bu yayın öncesi bir deneme sürümüdür - tüm riski siz üstlenmiş olursunuz - bitcoin oluşturmak ya da ticari uygulamalar için kullanmayınız</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="133"/>
<source>Unable to bind to %s on this computer. Moneta Core is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="136"/>
<source>Unable to locate enough Darksend denominated funds for this transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="138"/>
<source>Unable to locate enough Darksend non-denominated funds for this transaction that are not equal 1000 DASH.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="141"/>
<source>Unable to locate enough Darksend non-denominated funds for this transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="143"/>
<source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: -proxy)</source>
<translation>Eşlere gizli Tor servisleri ile ulaşmak için ayrı SOCKS5 vekil sunucusu kullan (varsayılan: -proxy)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="146"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Uyarı: -paytxfee çok yüksek bir değere ayarlanmış! Bu, muamele gönderirseniz ödeyeceğiniz muamele ücretidir.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="149"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Moneta will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="152"/>
<source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source>
<translation>Uyarı: şebeke tamamen mutabık değil gibi görünüyor! Bazı madenciler sorun yaşıyor gibi görünüyor.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="155"/>
<source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Uyarı: eşlerimizle tamamen mutabık değiliz gibi görünüyor! Güncelleme yapmanız gerekebilir ya da diğer düğümlerin güncelleme yapmaları gerekebilir.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="158"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Uyarı: wallet.dat dosyasının okunması sırasında bir hata meydana geldi! Tüm anahtarlar doğru bir şekilde okundu, ancak muamele verileri ya da adres defteri unsurları hatalı veya eksik olabilir.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="161"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Uyarı: wallet.dat bozuk, veriler geri kazanıldı! Özgün wallet.dat, wallet.{zamandamgası}.bak olarak %s klasörüne kaydedildi; bakiyeniz ya da muameleleriniz yanlışsa bir yedeklemeden tekrar yüklemeniz gerekir.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="165"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>rpcpassword=<parola> şu yapılandırma dosyasında belirtilmelidir:
%s
Dosya mevcut değilse, sadece sahibi için okumayla sınırlı izin ile oluşturunuz.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="170"/>
<source>You must specify a masternodeprivkey in the configuration. Please see documentation for help.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="173"/>
<source>(default: 1)</source>
<translation>(varsayılan: 1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="174"/>
<source>(default: wallet.dat)</source>
<translation>(varsayılan: wallet.dat)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="175"/>
<source><category> can be:</source>
<translation><kategori> şunlar olabilir:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="176"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Komut satırı ve JSON-RPC komutlarını kabul et</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="177"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Dışarıdan gelen bağlantıları kabul et (varsayılan: -proxy veya -connect yoksa 1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="178"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Bağlanılacak düğüm ekle ve bağlantıyı zinde tutmaya çalış</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="179"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>-addnode, -seednode ve -connect için DNS aramalarına izin ver</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="180"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Belirtilen İP adresinden JSON-RPC bağlantılarını kabul et</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="181"/>
<source>Already have that input.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="182"/>
<source>Always query for peer addresses via DNS lookup (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="183"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Bozuk bir wallet.dat dosyasından özel anahtarları geri kazanmayı dene</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="184"/>
<source>Block creation options:</source>
<translation>Blok oluşturma seçenekleri:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="185"/>
<source>Can't denominate: no compatible inputs left.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="186"/>
<source>Cannot downgrade wallet</source>
<translation>Cüzdan eski biçime geri alınamaz</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="187"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>-bind adresi çözümlenemedi: '%s'</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="188"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>-externalip adresi çözümlenemedi: '%s'</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="189"/>
<source>Cannot write default address</source>
<translation>Varsayılan adres yazılamadı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="190"/>
<source>Clear list of wallet transactions (diagnostic tool; implies -rescan)</source>
<translation>Cüzdanın muamele listesini temizle (tanı aracı; -rescan ima eder)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="191"/>
<source>Collateral is not valid.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="192"/>
<source>Collateral not valid.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="193"/>
<source>Connect only to the specified node(s)</source>
<translation>Sadece belirtilen düğüme veya düğümlere bağlan</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="194"/>
<source>Connect through SOCKS proxy</source>
<translation>SOCKS vekil sunucusuyla bağlan</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="195"/>
<source>Connect to JSON-RPC on <port> (default: 9998 or testnet: 19998)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="196"/>
<source>Connect to KeePassHttp on port <port> (default: 19455)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="197"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Eş adresleri elde etmek için bir düğüme bağlan ve ardından bağlantıyı kes</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="198"/>
<source>Connection options:</source>
<translation>Bağlantı seçenekleri:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="199"/>
<source>Corrupted block database detected</source>
<translation>Bozuk blok veritabanı tespit edildi</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="200"/>
<source>Moneta Core Daemon</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="201"/>
<source>Moneta Core RPC client version</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="202"/>
<source>Darksend is disabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="203"/>
<source>Darksend options:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="204"/>
<source>Debugging/Testing options:</source>
<translation>Hata ayıklama/deneme seçenekleri:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="205"/>
<source>Disable safemode, override a real safe mode event (default: 0)</source>
<translation>Güvenli kipi devre dışı bırak, gerçek bir güvenli olayı geçersiz kıl (varsayılan: 0)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="206"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Kendi IP adresini keşfet (varsayılan: dinlenildiğinde ve -externalip yoksa 1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="207"/>
<source>Do not load the wallet and disable wallet RPC calls</source>
<translation>Cüzdanı yükleme ve cüzdan RPC çağrılarını devre dışı bırak</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="208"/>
<source>Do you want to rebuild the block database now?</source>
<translation>Blok veritabanını şimdi yeniden inşa etmek istiyor musunuz?</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="209"/>
<source>Done loading</source>
<translation>Yükleme tamamlandı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="210"/>
<source>Downgrading and trying again.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="211"/>
<source>Enable the client to act as a masternode (0-1, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="212"/>
<source>Entries are full.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="213"/>
<source>Error connecting to masternode.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="214"/>
<source>Error initializing block database</source>
<translation>Blok veritabanını başlatılırken bir hata meydana geldi</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="215"/>
<source>Error initializing wallet database environment %s!</source>
<translation>%s cüzdan veritabanı ortamının başlatılmasında hata meydana geldi!</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="216"/>
<source>Error loading block database</source>
<translation>Blok veritabanının yüklenmesinde hata</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="217"/>
<source>Error loading wallet.dat</source>
<translation>wallet.dat dosyasının yüklenmesinde hata oluştu</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="218"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>wallet.dat dosyasının yüklenmesinde hata oluştu: bozuk cüzdan</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="219"/>
<source>Error loading wallet.dat: Wallet requires newer version of Moneta</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="220"/>
<source>Error opening block database</source>
<translation>Blok veritabanının açılışı sırasında hata</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="221"/>
<source>Error reading from database, shutting down.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="222"/>
<source>Error recovering public key.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="223"/>
<source>Error</source>
<translation>Hata</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="224"/>
<source>Error: Disk space is low!</source>
<translation>Hata: Disk alanı düşük!</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="225"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>Hata: Cüzdan kilitli, muamele oluşturulamadı!</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="226"/>
<source>Error: You already have pending entries in the Darksend pool</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="227"/>
<source>Error: system error: </source>
<translation>Hata: sistem hatası:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="228"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Herhangi bir portun dinlenmesi başarısız oldu. Bunu istiyorsanız -listen=0 seçeneğini kullanınız.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="229"/>
<source>Failed to read block info</source>
<translation>Blok verileri okunamadı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="230"/>
<source>Failed to read block</source>
<translation>Blok okunamadı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="231"/>
<source>Failed to sync block index</source>
<translation>Blok indeksi eşleştirilemedi</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="232"/>
<source>Failed to write block index</source>
<translation>Blok indeksi yazılamadı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="233"/>
<source>Failed to write block info</source>
<translation>Blok verileri yazılamadı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="234"/>
<source>Failed to write block</source>
<translation>Blok yazılamadı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="235"/>
<source>Failed to write file info</source>
<translation>Dosya verileri yazılamadı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="236"/>
<source>Failed to write to coin database</source>
<translation>Madenî para veritabanına yazılamadı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="237"/>
<source>Failed to write transaction index</source>
<translation>Muamele indeksi yazılamadı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="238"/>
<source>Failed to write undo data</source>
<translation>Geri alma verilerinin yazılamadı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="239"/>
<source>Fee per kB to add to transactions you send</source>
<translation>Yolladığınız muameleler için eklenecek kB başı ücret</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="240"/>
<source>Fees smaller than this are considered zero fee (for relaying) (default:</source>
<translation>Bundan düşük ücretler sıfır değerinde sayılacaktır (aktarım için) (varsayılan:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="241"/>
<source>Force safe mode (default: 0)</source>
<translation>Güvenli kipi zorla (varsayılan: 0)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="242"/>
<source>Generate coins (default: 0)</source>
<translation>Bitcoin oluştur (varsayılan: 0)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="243"/>
<source>Get help for a command</source>
<translation>Bir komut için yardım al</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="244"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>Başlangıçta kontrol edilecek blok sayısı (varsayılan: 288, 0 = hepsi)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="245"/>
<source>If <category> is not supplied, output all debugging information.</source>
<translation><kategori> sağlanmamışsa tüm hata ayıklama verilerini dök.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="246"/>
<source>Ignore masternodes less than version (example: 70050; default : 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="247"/>
<source>Importing...</source>
<translation>İçe aktarılıyor...</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="248"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Harici blk000??.dat dosyasından blokları içe aktarır</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="249"/>
<source>Incompatible mode.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="250"/>
<source>Incompatible version.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="251"/>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation>Yanlış ya da bulunamamış doğuş bloku. Şebeke için yanlış veri klasörü mü?</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="252"/>
<source>Information</source>
<translation>Bilgi</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="253"/>
<source>Initialization sanity check failed. Moneta Core is shutting down.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="254"/>
<source>Input is not valid.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="255"/>
<source>InstantX options:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="256"/>
<source>Insufficient funds</source>
<translation>Yetersiz bakiye</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="257"/>
<source>Insufficient funds.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="258"/>
<source>Invalid -onion address: '%s'</source>
<translation>Geçersiz -onion adresi: '%s'</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="259"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Geçersiz -proxy adresi: '%s'</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="260"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>-minrelaytxfee=<amount> için geçersiz meblağ: '%s'</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="261"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>-mintxfee=<amount> için geçersiz meblağ: '%s'</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="262"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>-paytxfee=<meblağ> için geçersiz meblağ: '%s'</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="263"/>
<source>Invalid amount</source>
<translation>Geçersiz meblağ</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="264"/>
<source>Invalid masternodeprivkey. Please see documenation.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="265"/>
<source>Invalid private key.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="266"/>
<source>Invalid script detected.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="267"/>
<source>KeePassHttp id for the established association</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="268"/>
<source>KeePassHttp key for AES encrypted communication with KeePass</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="269"/>
<source>Keep N moneta anonymized (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="270"/>
<source>Keep at most <n> unconnectable blocks in memory (default: %u)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="271"/>
<source>Keep at most <n> unconnectable transactions in memory (default: %u)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="272"/>
<source>Last Darksend was too recent.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="273"/>
<source>Last successful darksend action was too recent.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="274"/>
<source>Limit size of signature cache to <n> entries (default: 50000)</source>
<translation>İmza arabelleğinin boyutunu <n> unsurla sınırla (varsayılan: 50000)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="275"/>
<source>List commands</source>
<translation>Komutları listele</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="276"/>
<source>Listen for connections on <port> (default: 9999 or testnet: 19999)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="277"/>
<source>Loading addresses...</source>
<translation>Adresler yükleniyor...</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="278"/>
<source>Loading block index...</source>
<translation>Blok indeksi yükleniyor...</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="279"/>
<source>Loading masternode list...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="280"/>
<source>Loading wallet... (%3.2f %%)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="281"/>
<source>Loading wallet...</source>
<translation>Cüzdan yükleniyor...</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="282"/>
<source>Log transaction priority and fee per kB when mining blocks (default: 0)</source>
<translation>Blok oluşturulduğunda muamele önceliğini ve kB başı ücreti kütüğe al (varsayılan: 0)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="283"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>Muamelelerin tamamının indeksini tut (varsayılan: 0)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="284"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Eşler ile en çok <n> adet bağlantı kur (varsayılan: 125)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="285"/>
<source>Masternode options:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="286"/>
<source>Masternode queue is full.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="287"/>
<source>Masternode:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="288"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Bağlantı başına azami alım tamponu, <n>*1000 bayt (varsayılan: 5000)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="289"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Bağlantı başına azami yollama tamponu, <n>*1000 bayt (varsayılan: 1000)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="290"/>
<source>Missing input transaction information.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="291"/>
<source>No compatible masternode found.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="292"/>
<source>No funds detected in need of denominating.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="293"/>
<source>No masternodes detected.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="294"/>
<source>No matching denominations found for mixing.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="295"/>
<source>Non-standard public key detected.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="296"/>
<source>Not compatible with existing transactions.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="297"/>
<source>Not enough file descriptors available.</source>
<translation>Kafi derecede dosya tanımlayıcıları mevcut değil.</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="298"/>
<source>Not in the masternode list.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="299"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>Sadece yerleşik kontrol noktalarıyla eşleşen blok zincirini kabul et (varsayılan: 1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="300"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Sadece <net> şebekesindeki düğümlere bağlan (IPv4, IPv6 ya da Tor)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="301"/>
<source>Options:</source>
<translation>Seçenekler:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="302"/>
<source>Password for JSON-RPC connections</source>
<translation>JSON-RPC bağlantıları için parola</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="303"/>
<source>Prepend debug output with timestamp (default: 1)</source>
<translation>Hata ayıklama verilerinin önüne zaman damgası ekle (varsayılan: 1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="304"/>
<source>Print block on startup, if found in block index</source>
<translation>Başlangıçta bloğu göster, blok indeksinde bulunduysa</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="305"/>
<source>Print block tree on startup (default: 0)</source>
<translation>Başlangıçta blok ağacını göster (varsayılan: 0)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="306"/>
<source>RPC SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>RPC SSL seçenekleri: (SSL kurulumu yönergeleri için Bitcoin vikisine bakınız)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="307"/>
<source>RPC client options:</source>
<translation>RPC istemci seçenekleri:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="308"/>
<source>RPC server options:</source>
<translation>RPC sunucu seçenekleri:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="309"/>
<source>Randomly drop 1 of every <n> network messages</source>
<translation>Her <n> şebeke mesajından rastgele 1 mesajı görmezden gel</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="310"/>
<source>Randomly fuzz 1 of every <n> network messages</source>
<translation>Her <n> şebeke mesajından rastgele birini bulanıklaştır</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="311"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Blok zinciri indeksini güncel blk000??.dat dosyalarından tekrar inşa et</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="312"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Blok zincirini eksik cüzdan muameleleri için tekrar tara</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="313"/>
<source>Rescanning...</source>
<translation>Yeniden tarama...</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="314"/>
<source>Run a thread to flush wallet periodically (default: 1)</source>
<translation>Periyodik olarak cüdanı diske yazdırmak için bir iş parçacığı çalıştır (varsayılan: 1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="315"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Arka planda daemon (servis) olarak çalış ve komutları kabul et</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="316"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation> SSL seçenekleri: (SSL kurulum bilgisi için Bitcoin vikisine bakınız)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="317"/>
<source>Select SOCKS version for -proxy (4 or 5, default: 5)</source>
<translation>-proxy için SOCKS sürümünü seç (4 veya 5, varsayılan: 5)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="318"/>
<source>Send command to Moneta Core</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="319"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Şu <ip> adresinde (varsayılan: 127.0.0.1) çalışan düğüme komut yolla</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="320"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Trace/hata ayıklama verilerini debug.log dosyası yerine konsola gönder</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="321"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Sunucu sertifika dosyası (varsayılan: server.cert)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="322"/>
<source>Server private key (default: server.pem)</source>
<translation>Sunucu özel anahtarı (varsayılan: server.pem)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="323"/>
<source>Session not complete!</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="324"/>
<source>Session timed out (30 seconds), please resubmit.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="325"/>
<source>Set database cache size in megabytes (%d to %d, default: %d)</source>
<translation>Veritabanı önbellek boyutunu megabayt olarak belirt (%d ilâ %d, varsayılan: %d)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="326"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Anahtar alan boyutunu <n> değerine ayarla (varsayılan: 100)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="327"/>
<source>Set maximum block size in bytes (default: %d)</source>
<translation>Azami blok boyutunu bayt olarak ayarla (varsayılan: %d)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="328"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Bayt olarak asgari blok boyutunu tanımla (varsayılan: 0)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="329"/>
<source>Set the masternode private key</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="330"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>RPC aramaları için iş parçacığı sayısını belirle (varsayılan: 4)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="331"/>
<source>Sets the DB_PRIVATE flag in the wallet db environment (default: 1)</source>
<translation>Cüzdan veritabanı ortamında DB_PRIVATE bayrağını koyar (varsayılan: 1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="332"/>
<source>Show all debugging options (usage: --help -help-debug)</source>
<translation>Tüm hata ayıklama seçeneklerini göster (kullanımı: --help -help-debug)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="333"/>
<source>Show benchmark information (default: 0)</source>
<translation>Denektaşı verilerini göster (varsayılan: 0)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="334"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>İstemci başlatıldığında debug.log dosyasını küçült (varsayılan: -debug bulunmadığında 1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="335"/>
<source>Signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="336"/>
<source>Signing timed out, please resubmit.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="337"/>
<source>Signing transaction failed</source>
<translation>Muamelenin imzalanması başarısız oldu</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="338"/>
<source>Specify configuration file (default: moneta.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="339"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Bağlantı zaman aşım süresini milisaniye olarak belirt (varsayılan: 5000)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="340"/>
<source>Specify data directory</source>
<translation>Veri dizinini belirt</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="341"/>
<source>Specify masternode configuration file (default: masternode.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="342"/>
<source>Specify pid file (default: monetad.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="343"/>
<source>Specify wallet file (within data directory)</source>
<translation>Cüzdan dosyası belirtiniz (veri klasörünün içinde)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="344"/>
<source>Specify your own public address</source>
<translation>Kendi genel adresinizi tanımlayın</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="345"/>
<source>Spend unconfirmed change when sending transactions (default: 1)</source>
<translation>Gönderme muamelelerinde teyit edilmemiş para üstünü harca (varsayılan: 1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="346"/>
<source>Start Moneta Core Daemon</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="347"/>
<source>System error: </source>
<translation>Sistem hatası:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="348"/>
<source>This help message</source>
<translation>Bu yardım mesajı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="349"/>
<source>This is intended for regression testing tools and app development.</source>
<translation>Bu, regresyon deneme araçları ve uygulama geliştirmesi için tasarlanmıştır. </translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="350"/>
<source>This is not a masternode.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="351"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Aksaklık gösteren eşlerle bağlantıyı kesme sınırı (varsayılan: 100)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="352"/>
<source>To use the %s option</source>
<translation>%s seçeneğini kullanmak için</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="353"/>
<source>Transaction amount too small</source>
<translation>Muamele meblağı çok düşük</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="354"/>
<source>Transaction amounts must be positive</source>
<translation>Muamele tutarının pozitif olması lazımdır</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="355"/>
<source>Transaction created successfully.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="356"/>
<source>Transaction fees are too high.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="357"/>
<source>Transaction not valid.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="358"/>
<source>Transaction too large</source>
<translation>Muamele çok büyük</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="359"/>
<source>Unable to bind to %s on this computer (bind returned error %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="360"/>
<source>Unable to sign masternode payment winner, wrong key?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="361"/>
<source>Unable to sign spork message, wrong key?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="362"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Bilinmeyen bir -socks vekil sürümü talep edildi: %i</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="363"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>-onlynet için bilinmeyen bir şebeke belirtildi: '%s'</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="364"/>
<source>Upgrade wallet to latest format</source>
<translation>Cüzdanı en yeni biçime güncelle</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="365"/>
<source>Usage (deprecated, use moneta-cli):</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="366"/>
<source>Usage:</source>
<translation>Kullanım:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="367"/>
<source>Use KeePass 2 integration using KeePassHttp plugin (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="368"/>
<source>Use N separate masternodes to anonymize funds (2-8, default: 2)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="369"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>JSON-RPC bağlantıları için OpenSSL (https) kullan</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="370"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Dinlenecek portu haritalamak için UPnP kullan (varsayılan: 0)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="371"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Dinlenecek portu haritalamak için UPnP kullan (varsayılan: dinlenildiğinde 1)</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="372"/>
<source>Use the test network</source>
<translation>Deneme şebekesini kullan</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="373"/>
<source>Username for JSON-RPC connections</source>
<translation>JSON-RPC bağlantıları için kullanıcı ismi</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="374"/>
<source>Value more than Darksend pool maximum allows.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="375"/>
<source>Verifying blocks...</source>
<translation>Bloklar kontrol ediliyor...</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="376"/>
<source>Verifying wallet...</source>
<translation>Cüzdan kontrol ediliyor...</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="377"/>
<source>Wait for RPC server to start</source>
<translation>RPC sunucusunun başlamasını bekle</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="378"/>
<source>Wallet %s resides outside data directory %s</source>
<translation>%s cüzdan %s veri klasörünün dışında bulunuyor</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="379"/>
<source>Wallet is locked.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="380"/>
<source>Wallet needed to be rewritten: restart Moneta to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="381"/>
<source>Wallet options:</source>
<translation>Cüzdan seçenekleri:</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="382"/>
<source>Warning</source>
<translation>Uyarı</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="383"/>
<source>Warning: Deprecated argument -debugnet ignored, use -debug=net</source>
<translation>Uyarı: eskimiş seçenek -debugnet görmezden gelinir, -debug=net kullanınız</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="384"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Uyarı: Bu sürüm çok eskidir, güncellemeniz gerekir!</translation>
</message>
<message>
<source>Wrong state.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../monetastrings.cpp" line="385"/>
<source>You need to rebuild the database using -reindex to change -txindex</source>
<translation>-txindex'i değiştirmek için veritabanını -reindex kullanarak tekrar inşa etmeniz gerekmektedir</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="386"/>
<source>Zapping all transactions from wallet...</source>
<translation>Cüzdandaki tüm muameleler kaldırılıyor...</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="387"/>
<source>on startup</source>
<translation>başlangıçta</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="388"/>
<source>version</source>
<translation>sürüm</translation>
</message>
<message>
<location filename="../monetastrings.cpp" line="389"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat bozuk, geri kazanım başarısız oldu</translation>
</message>
</context>
</TS> | </message>
<message>
<location filename="../forms/optionsdialog.ui" line="317"/>
<source>Connect to the Moneta network through a SOCKS proxy.</source> |
webhook_handler.rs | use std::fs;
use log::{error, warn};
use serenity::http::Http;
use crate::json::recent::Channel;
use crate::json::webhooks::{FilterType, Hooks, WebhookAuth};
use crate::scrapers::scraper_resources::resources::ScrapeType;
use crate::TOKEN_PATH;
const DEFAULT_KEYWORDS: [&str; 28] = [
"devblog", "event", "maintenance", "major", "trailer", "teaser", "developers",
"fix", "vehicles", "economy", "changes", "sale", "twitch", "bundles", "development",
"shop", "pass", "season", "operation", "pass", "summer", "2021", "planned", "bonds",
"issues", "technical", "servers", "christmas"
];
impl Channel {
pub async fn handle_webhook(&self, content: &str, is_filtered: bool, scrape_type: ScrapeType) {
let token_raw = fs::read_to_string(TOKEN_PATH).expect("Cannot read file");
let webhook_auth: WebhookAuth = serde_json::from_str(&token_raw).expect("Json cannot be read");
for (i, hook) in webhook_auth.hooks.iter().enumerate() {
if is_filtered {
if match_filter(content, hook, scrape_type) {
deliver_webhooks(content, i).await;
}
} else {
deliver_webhooks(content, i).await;
}
}
}
}
fn match_filter(content: &str, hook: &Hooks, scrape_type: ScrapeType) -> bool {
match scrape_type {
ScrapeType::Main => {
filter_main(content, hook)
}
ScrapeType::Forum => {
filter_forum(content, hook)
}
}
}
fn filter_main(content: &str, hook: &Hooks) -> bool {
let main_filter = &hook.main_filter;
match main_filter {
FilterType::Default => {
for keyword in DEFAULT_KEYWORDS {
if content.contains(keyword) {
print_log(&format!("URL {} matched with default main keyword {}", content, keyword));
return true;
}
}
print_log(&format!("URL {} did not match any whitelist in main default list", content));
false
}
FilterType::Blacklist => {
let blacklist = &hook.main_keywords;
if blacklist.is_empty() {
print_log(&format!("URL {} matched empty blacklist for main", content));
return true;
}
for keyword in blacklist {
if content.contains(keyword) {
print_log(&format!("URL {} found in blacklist for main", content));
return false;
}
}
print_log(&format!("{} is not in main blacklist", content));
true
}
FilterType::Whitelist => {
let whitelist = &hook.main_keywords;
for keyword in whitelist {
if content.contains(keyword) {
print_log(&format!("URL {} matched with whitelisted keyword {} from main list", content, keyword));
return true;
}
}
print_log(&format!("URL {} did not match any whitelist in main list", content));
false
}
}
}
fn filter_forum(content: &str, hook: &Hooks) -> bool |
//Finally sends the webhook to the servers
async fn deliver_webhooks(content: &str, pos: usize) {
let token_raw = fs::read_to_string(TOKEN_PATH).expect("Cannot read file");
let webhook_auth: WebhookAuth = serde_json::from_str(&token_raw).expect("Json cannot be read");
let uid = webhook_auth.hooks[pos].uid;
let token = &webhook_auth.hooks[pos].token;
let my_http_client = Http::new_with_token(token);
let webhook = match my_http_client.get_webhook_with_token(uid, token).await {
Err(why) => {
println!("{}", why);
error!("{}", why);
panic!("")
}
Ok(hook) => hook,
};
webhook.execute(my_http_client, false, |w| {
w.content(&format!("[{a}]()", a = content));
w.username("The WT news bot");
w.avatar_url("https://cdn.discordapp.com/attachments/866634236232597534/868623209631744000/the_news_broke.png");
w
}).await.unwrap();
}
fn print_log(input: &str) {
println!("{}", input);
warn!("{}", input);
}
// Tests -----------------------------------------------------------------------
mod tests {
#[allow(unused_imports)]
use crate::json::webhooks::FilterType::{Blacklist, Whitelist};
#[allow(unused_imports)]
use super::*;
// Main tests -------------------------------------------------------------------
#[test]
fn main_test_filter_default_pass() {
assert_eq!(match_filter("pass", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: FilterType::default(),
forum_filter: FilterType::default(),
main_keywords: vec![],
forum_keywords: vec![],
}, ScrapeType::Main), true)
}
#[test]
fn main_test_filter_default_no_match() {
assert_eq!(match_filter("xyz", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: FilterType::default(),
forum_filter: FilterType::default(),
main_keywords: vec![],
forum_keywords: vec![],
}, ScrapeType::Main), false);
}
#[test]
fn main_test_filter_whitelist_match() {
assert_eq!(match_filter("C", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: Whitelist,
forum_filter: Blacklist,
main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
forum_keywords: vec!["W".to_owned(), "X".to_owned(), "Y".to_owned(), "Z".to_owned()],
}, ScrapeType::Main), true);
}
#[test]
#[should_panic]
fn main_test_filter_whitelist_miss() {
assert_eq!(match_filter("E", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: Whitelist,
forum_filter: Whitelist,
main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
forum_keywords: vec!["W".to_owned(), "X".to_owned(), "Y".to_owned(), "Z".to_owned()],
}, ScrapeType::Main), true);
}
#[test]
#[should_panic]
fn main_test_filter_blacklist_match() {
assert_eq!(match_filter("C", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: Blacklist,
forum_filter: Blacklist,
main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
forum_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
}, ScrapeType::Main), true);
}
#[test]
fn main_test_filter_blacklist_miss() {
assert_eq!(match_filter("E", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: Blacklist,
forum_filter: Blacklist,
main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
forum_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
}, ScrapeType::Main), true);
}
// forum tests ------------------------------------------------------------------
#[test]
fn forum_test_filter_default_pass() {
assert_eq!(match_filter("pass", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: FilterType::default(),
forum_filter: FilterType::default(),
main_keywords: vec![],
forum_keywords: vec![],
}, ScrapeType::Forum), true)
}
#[test]
fn forum_test_filter_default_no_match() {
assert_eq!(match_filter("xyz", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: FilterType::default(),
forum_filter: FilterType::default(),
main_keywords: vec![],
forum_keywords: vec![],
}, ScrapeType::Forum), false);
}
#[test]
fn forum_test_filter_whitelist_match() {
assert_eq!(match_filter("C", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: Whitelist,
forum_filter: Blacklist,
main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
forum_keywords: vec!["W".to_owned(), "X".to_owned(), "Y".to_owned(), "Z".to_owned()],
}, ScrapeType::Forum), true);
}
#[test]
fn forum_test_filter_whitelist_miss() {
assert_eq!(match_filter("E", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: Whitelist,
forum_filter: Whitelist,
main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
forum_keywords: vec!["W".to_owned(), "X".to_owned(), "Y".to_owned(), "Z".to_owned()],
}, ScrapeType::Forum), false);
}
#[test]
fn forum_test_filter_blacklist_match() {
assert_eq!(match_filter("C", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: Blacklist,
forum_filter: Blacklist,
main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
forum_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
}, ScrapeType::Forum), false);
}
#[test]
fn forum_test_filter_blacklist_miss() {
match_filter("E", &Hooks {
name: "".to_string(),
token: "".to_string(),
uid: 0,
main_filter: Blacklist,
forum_filter: Blacklist,
main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
forum_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()],
}, ScrapeType::Forum);
}
}
| {
let forum_filter = &hook.forum_filter;
match forum_filter {
FilterType::Default => {
for keyword in DEFAULT_KEYWORDS {
if content.contains(keyword) {
print_log(&format!("URL {} matched with default forum keyword {}", content, keyword));
return true;
}
}
print_log(&format!("URL {} did not match any whitelist in forum default list", content));
false
}
FilterType::Blacklist => {
let blacklist = &hook.forum_keywords;
println!("{:?}", blacklist);
if blacklist.is_empty() {
print_log(&format!("URL {} matched empty blacklist for forum", content));
return true;
}
for keyword in blacklist {
if content.contains(keyword) {
print_log(&format!("URL {} found in blacklist for forum", content));
return false;
}
}
print_log(&format!("{} is not in forum blacklist", content));
true
}
FilterType::Whitelist => {
let whitelist = &hook.forum_keywords;
for keyword in whitelist {
if content.contains(keyword) {
print_log(&format!("URL {} matched with whitelisted keyword {} from forum list", content, keyword));
return true;
}
}
print_log(&format!("URL {} did not match any whitelist in forum list", content));
false
}
}
} |
visualizar_reta.py | import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
def | (filepath, theta):
path = os.getcwd() + filepath
dataset = pd.read_csv(path, header=None)
X = dataset.iloc[:, 0:-1].values
y = dataset.iloc[:, -1:].values
t = np.arange(0, 25, 1)
plt.scatter(X, y, color='red', marker='x', label='Training Data')
plt.plot(t, theta[0] + (theta[1]*t), color='blue', label='Linear Regression')
plt.axis([4, 25, -5, 25])
plt.title('Populacao da cidade x Lucro da filial')
plt.xlabel('Populacao da cidade (10k)')
plt.ylabel('Lucro (10k)')
plt.legend()
plt.show()
filename = 'target/plot1.2.png'
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
plt.savefig(filename)
| plot |
resolveUser.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const enhanced_resolve_1 = require("enhanced-resolve");
const path = require("path");
function | (id) {
return new Promise((resolve, reject) => {
if (id.startsWith('/')) {
id = '.' + id;
}
resolveUser.Resolver.resolve({}, resolveUser.rootDir, id, (err, res) => {
if (err)
return reject(err);
resolve(res);
});
});
}
exports.resolveUser = resolveUser;
resolveUser.init = function init(rootDir) {
resolveUser.rootDir = rootDir;
resolveUser.config = require(path.join(rootDir, 'webpack.config.js'));
resolveUser.Resolver = enhanced_resolve_1.ResolverFactory.createResolver({
fileSystem: new enhanced_resolve_1.CachedInputFileSystem(new enhanced_resolve_1.NodeJsInputFileSystem(), 4000),
...resolveUser.config.resolve,
});
};
resolveUser.Resolver = null;
resolveUser.rootDir = null;
resolveUser.config = null;
//# sourceMappingURL=resolveUser.js.map | resolveUser |
painter.rs | // Copyright 2020 The druid Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::piet::{FixedGradient, LinearGradient, PaintBrush, RadialGradient};
use crate::{
BoxConstraints, Color, Data, Env, Event, EventCtx, Key, LayoutCtx, LifeCycle, LifeCycleCtx,
PaintCtx, RenderContext, Size, UpdateCtx, Widget,
};
/// A widget that only handles painting.
///
/// This is useful in a situation where layout is controlled elsewhere and you
/// do not need to handle events, but you would like to customize appearance.
///
/// **When is paint called?**
///
/// The `Painter` widget will call its [`paint`] method anytime its [`Data`]
/// is changed. If you would like it to repaint at other times (such as when
/// hot or active state changes) you will need to call [`request_paint`] further
/// up the tree, perhaps in a [`Controller`] widget.
///
/// # Examples
///
/// Changing background color based on some part of data:
///
/// ```
/// use druid::{Env, PaintCtx,Rect, RenderContext};
/// use druid::widget::Painter;
/// # const ENABLED_BG_COLOR: druid::Key<druid::Color> = druid::Key::new("fake key");
/// # const DISABLED_BG_COLOR: druid::Key<druid::Color> = druid::Key::new("fake key 2");
///
/// struct MyData { is_enabled: bool }
///
/// let my_painter = Painter::new(|ctx, data: &MyData, env| {
/// let bounds = ctx.size().to_rect();
/// if data.is_enabled {
/// ctx.fill(bounds, &env.get(ENABLED_BG_COLOR));
/// } else {
///
/// ctx.fill(bounds, &env.get(DISABLED_BG_COLOR));
/// }
/// });
/// ```
///
/// Using painter to make a simple widget that will draw a selected color
///
///
/// ```
/// use druid::{Color, Env, PaintCtx,Rect, RenderContext};
/// use druid::widget::Painter;
///
/// const CORNER_RADIUS: f64 = 4.0;
/// const STROKE_WIDTH: f64 = 2.0;
///
/// let colorwell: Painter<Color> = Painter::new(|ctx, data: &Color, env| {
/// // Shrink the bounds a little, to ensure that our stroke remains within
/// // the paint bounds.
/// let bounds = ctx.size().to_rect().inset(-STROKE_WIDTH / 2.0);
/// let rounded = bounds.to_rounded_rect(CORNER_RADIUS);
/// ctx.fill(rounded, data);
/// ctx.stroke(rounded, &env.get(druid::theme::PRIMARY_DARK), STROKE_WIDTH);
/// });
/// ```
///
/// [`paint`]: ../trait.Widget.html#tymethod.paint
/// [`Data`]: ../trait.Data.html
/// [`request_paint`]: ../EventCtx.html#method.request_paint
/// [`Controller`]: trait.Controller.html
pub struct Painter<T>(Box<dyn FnMut(&mut PaintCtx, &T, &Env)>);
/// Something that can be used as the background for a widget.
///
/// This represents anything that can be painted inside a widgets [`paint`]
/// method; that is, it may have access to the [`Data`] and the [`Env`].
///
/// [`paint`]: ../trait.Widget.html#tymethod.paint
/// [`Data`]: ../trait.Data.html
/// [`Env`]: ../struct.Env.html
#[non_exhaustive]
pub enum BackgroundBrush<T> {
Color(Color),
ColorKey(Key<Color>),
Linear(LinearGradient),
Radial(RadialGradient),
Fixed(FixedGradient),
Painter(Painter<T>),
}
impl<T> Painter<T> {
/// Create a new `Painter` with the provided [`paint`] fn.
///
/// [`paint`]: ../trait.Widget.html#tymethod.paint
pub fn new(f: impl FnMut(&mut PaintCtx, &T, &Env) + 'static) -> Self {
Painter(Box::new(f))
}
}
impl<T: Data> BackgroundBrush<T> {
/// Draw this `BackgroundBrush` into a provided [`PaintCtx`].
///
/// [`PaintCtx`]: ../struct.PaintCtx.html
pub fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) {
let bounds = ctx.size().to_rect();
match self {
Self::Color(color) => ctx.fill(bounds, color),
Self::ColorKey(key) => ctx.fill(bounds, &env.get(key)),
Self::Linear(grad) => ctx.fill(bounds, grad),
Self::Radial(grad) => ctx.fill(bounds, grad),
Self::Fixed(grad) => ctx.fill(bounds, grad),
Self::Painter(painter) => painter.paint(ctx, data, env),
}
}
}
impl<T: Data> Widget<T> for Painter<T> {
fn event(&mut self, _: &mut EventCtx, _: &Event, _: &mut T, _: &Env) {}
fn lifecycle(&mut self, _: &mut LifeCycleCtx, _: &LifeCycle, _: &T, _: &Env) {}
fn update(&mut self, ctx: &mut UpdateCtx, old: &T, new: &T, _: &Env) { | }
fn layout(&mut self, _ctx: &mut LayoutCtx, bc: &BoxConstraints, _: &T, _: &Env) -> Size {
bc.max()
}
fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) {
(self.0)(ctx, data, env)
}
}
impl<T> From<Color> for BackgroundBrush<T> {
fn from(src: Color) -> BackgroundBrush<T> {
BackgroundBrush::Color(src)
}
}
impl<T> From<Key<Color>> for BackgroundBrush<T> {
fn from(src: Key<Color>) -> BackgroundBrush<T> {
BackgroundBrush::ColorKey(src)
}
}
impl<T> From<LinearGradient> for BackgroundBrush<T> {
fn from(src: LinearGradient) -> BackgroundBrush<T> {
BackgroundBrush::Linear(src)
}
}
impl<T> From<RadialGradient> for BackgroundBrush<T> {
fn from(src: RadialGradient) -> BackgroundBrush<T> {
BackgroundBrush::Radial(src)
}
}
impl<T> From<FixedGradient> for BackgroundBrush<T> {
fn from(src: FixedGradient) -> BackgroundBrush<T> {
BackgroundBrush::Fixed(src)
}
}
impl<T> From<Painter<T>> for BackgroundBrush<T> {
fn from(src: Painter<T>) -> BackgroundBrush<T> {
BackgroundBrush::Painter(src)
}
}
impl<T> From<PaintBrush> for BackgroundBrush<T> {
fn from(src: PaintBrush) -> BackgroundBrush<T> {
match src {
PaintBrush::Linear(grad) => BackgroundBrush::Linear(grad),
PaintBrush::Radial(grad) => BackgroundBrush::Radial(grad),
PaintBrush::Fixed(grad) => BackgroundBrush::Fixed(grad),
PaintBrush::Color(color) => BackgroundBrush::Color(color),
}
}
} | if !old.same(new) {
ctx.request_paint();
} |
concurrency.go | // Copyright 2019 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package remotestorage
import (
"context"
"golang.org/x/sync/errgroup"
)
func concurrentExec(work []func() error, concurrency int) error |
func batchItr(elemCount, batchSize int, cb func(start, end int) (stop bool)) {
for st, end := 0, batchSize; st < elemCount; st, end = end, end+batchSize {
if end > elemCount {
end = elemCount
}
stop := cb(st, end)
if stop {
break
}
}
}
| {
if concurrency <= 0 {
panic("Invalid argument")
} else if len(work) < concurrency {
concurrency = len(work)
}
ch := make(chan func() error)
eg, ctx := errgroup.WithContext(context.Background())
// Push the work...
eg.Go(func() error {
defer close(ch)
for _, w := range work {
select {
case ch <- w:
case <-ctx.Done():
return ctx.Err()
}
}
return nil
})
// Do the work...
for i := 0; i < concurrency; i++ {
eg.Go(func() error {
for {
select {
case w, ok := <-ch:
if !ok {
return nil
}
if err := w(); err != nil {
return err
}
case <-ctx.Done():
return ctx.Err()
}
}
})
}
return eg.Wait()
} |
peers.go | // Copyright (c) 2022 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package peertest
import (
"fmt"
"sync"
"github.com/golang/mock/gomock"
"go.uber.org/yarpc/api/peer"
)
// MockPeerIdentifier is a small wrapper around the PeerIdentifier interfaces for a string
// unfortunately gomock + assert.Equal has difficulty seeing between mock objects of the same type.
type MockPeerIdentifier string
// Identifier returns a unique identifier for MockPeerIDs
func (pid MockPeerIdentifier) Identifier() string {
return string(pid)
}
// NewLightMockPeer returns a new MockPeer
func NewLightMockPeer(pid MockPeerIdentifier, conStatus peer.ConnectionStatus) *LightMockPeer |
// LightMockPeer is a small simple wrapper around the Peer interface for mocking and changing
// a peer's attributes
// MockPeer is NOT thread safe
type LightMockPeer struct {
sync.Mutex
MockPeerIdentifier
PeerStatus peer.Status
}
// Status returns the Status Object of the MockPeer
func (p *LightMockPeer) Status() peer.Status {
return p.PeerStatus
}
// StartRequest is run when a Request starts
func (p *LightMockPeer) StartRequest() {
p.Lock()
p.PeerStatus.PendingRequestCount++
p.Unlock()
}
// EndRequest should be run after a MockPeer request has finished
func (p *LightMockPeer) EndRequest() {
p.Lock()
p.PeerStatus.PendingRequestCount--
p.Unlock()
}
// PeerIdentifierMatcher is used to match a Peer/PeerIdentifier by comparing
// The peer's .Identifier function with the Matcher string
type PeerIdentifierMatcher string
// Matches returns true of got is equivalent to the PeerIdentifier Matching string
func (pim PeerIdentifierMatcher) Matches(got interface{}) bool {
gotPID, ok := got.(peer.Identifier)
if !ok {
return false
}
return gotPID.Identifier() == string(pim)
}
// String returns a description of the matcher
func (pim PeerIdentifierMatcher) String() string {
return fmt.Sprintf("PeerIdentifierMatcher(%s)", string(pim))
}
// CreatePeerIDs takes a slice of peerID strings and returns a slice of PeerIdentifiers
func CreatePeerIDs(peerIDStrs []string) []peer.Identifier {
pids := make([]peer.Identifier, 0, len(peerIDStrs))
for _, id := range peerIDStrs {
pids = append(pids, MockPeerIdentifier(id))
}
return pids
}
// ExpectPeerRetains registers expectations on a MockTransport to generate peers on the RetainPeer function
func ExpectPeerRetains(
transport *MockTransport,
availablePeerStrs []string,
unavailablePeerStrs []string,
) map[string]*LightMockPeer {
peers := make(map[string]*LightMockPeer, len(availablePeerStrs)+len(unavailablePeerStrs))
for _, peerStr := range availablePeerStrs {
p := NewLightMockPeer(MockPeerIdentifier(peerStr), peer.Available)
transport.EXPECT().RetainPeer(PeerIdentifierMatcher(peerStr), gomock.Any()).Return(p, nil)
peers[p.Identifier()] = p
}
for _, peerStr := range unavailablePeerStrs {
p := NewLightMockPeer(MockPeerIdentifier(peerStr), peer.Unavailable)
transport.EXPECT().RetainPeer(PeerIdentifierMatcher(peerStr), gomock.Any()).Return(p, nil)
peers[p.Identifier()] = p
}
return peers
}
// ExpectPeerRetainsWithError registers expectations on a MockTransport return errors
func ExpectPeerRetainsWithError(
transport *MockTransport,
peerStrs []string,
err error, // Will be returned from the MockTransport on the Retains of these Peers
) {
for _, peerStr := range peerStrs {
transport.EXPECT().RetainPeer(PeerIdentifierMatcher(peerStr), gomock.Any()).Return(nil, err)
}
}
// ExpectPeerReleases registers expectations on a MockTransport to release peers through the ReleasePeer function
func ExpectPeerReleases(
transport *MockTransport,
peerStrs []string,
err error,
) {
for _, peerStr := range peerStrs {
transport.EXPECT().ReleasePeer(PeerIdentifierMatcher(peerStr), gomock.Any()).Return(err)
}
}
| {
return &LightMockPeer{
MockPeerIdentifier: pid,
PeerStatus: peer.Status{
ConnectionStatus: conStatus,
PendingRequestCount: 0,
},
}
} |
issue-2863.rs | // rustfmt-reorder_impl_items: true
impl<T> IntoIterator for SafeVec<T> {
existential type F: Trait;
type IntoIter = self::IntoIter<T>;
type Item = T;
// comment on foo()
fn foo() {println!("hello, world");}
type Bar = u32;
fn foo1() {println!("hello, world");}
type FooBar = u32;
fn foo2() {println!("hello, world");}
fn | () {println!("hello, world");}
const SomeConst: i32 = 100;
fn foo4() {println!("hello, world");}
fn foo5() {println!("hello, world");}
// comment on FoooooBar
type FoooooBar = u32;
fn foo6() {println!("hello, world");}
fn foo7() {println!("hello, world");}
type BarFoo = u32;
existential type E: Trait;
const AnotherConst: i32 = 100;
fn foo8() {println!("hello, world");}
}
| foo3 |
profile.service.ts | import { Injectable } from '@angular/core';
import {User} from '../user';
import {HttpClient} from '@angular/common/http';
import {environment} from '../../environments/environment';
import {Repo} from '../repo';
@Injectable({
providedIn: 'root'
})
export class | {
repo: Repo;
user: User;
private username: string;
items;
constructor(private http: HttpClient) {
this.user = new User (' ', ' ', ' ', ' ', ' ', 0, ' ');
this.repo = new Repo (' ', ' ', ' ', ' ', '');
}
getProfileInfo(username) {
interface ApiResponse {
name: string;
login: string;
avatar_url: string;
email: string;
location: string;
public_repos: number;
html_url: string;
}
const promise = new Promise((resolve, reject) => {
this.http.get<ApiResponse>(environment.apiUrl + username + environment.apikey).toPromise().then(profile => {
this.user.name = profile.name;
this.user.login = profile.login;
this.user.avatar_url = profile.avatar_url;
this.user.email = profile.email;
this.user.location = profile.location;
this.user.public_repos = profile.public_repos;
this.user.html_url = profile.html_url;
console.log(profile);
resolve();
},
);
});
return promise;
}
getRepoInfo(username) {
interface ApiResponse {
name: string;
homepage: string;
description: string;
html_url: string;
clone_url: string;
}
this.http.get<ApiResponse>(environment.apiUrl + username + environment.apiRepokey).subscribe(response => {
this.items = response;
});
}
}
| ProfileService |
monitor-default.service.spec.ts | import { Test, TestingModule } from '@nestjs/testing';
import { LoggerModule } from '@us-epa-camd/easey-common/logger';
import { MonitorDefaultMap } from '../maps/monitor-default.map';
import { MonitorDefaultService } from './monitor-default.service';
import { MonitorDefaultRepository } from './monitor-default.repository';
const mockRepository = () => ({
find: jest.fn().mockResolvedValue(''),
});
const mockMap = () => ({
many: jest.fn().mockResolvedValue(''),
});
describe('MonitorDefaultService', () => {
let service: MonitorDefaultService;
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
imports: [LoggerModule],
providers: [
MonitorDefaultService,
{
provide: MonitorDefaultRepository,
useFactory: mockRepository,
},
{
provide: MonitorDefaultMap,
useFactory: mockMap,
},
],
}).compile();
service = module.get<MonitorDefaultService>(MonitorDefaultService);
});
it('should be defined', () => { | expect(service).toBeDefined();
});
describe('getDefaults', () => {
it('should return array of location defaults', async () => {
const result = await service.getDefaults(null);
expect(result).toEqual('');
});
});
}); | |
string_array_test.go | package gosql
import (
"encoding/json"
"reflect"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func TestStringArrayJSON(t *testing.T) {
var arr1 = StringArray{"val1", "val2", "val3"}
data, err := json.Marshal(arr1)
assert.NoError(t, err, "marshal array")
var arr2 StringArray
err = json.Unmarshal(data, &arr2)
assert.NoError(t, err, "unmarshal array")
assert.True(t, reflect.DeepEqual(arr1, arr2),
"compare encode/decode result")
}
func | (t *testing.T) {
var arr StringArray
err := arr.Scan("{10000,10000,10000,10000}")
assert.NoError(t, err, "scan array")
assert.ElementsMatch(t, arr, []string{"10000", "10000", "10000", "10000"},
"compare scan result")
arr = arr[:0]
sqlStringArray := `{"breakfast", "consulting", "bar-""#1"""}`
err = arr.Scan(sqlStringArray)
assert.NoError(t, err, "scan array")
assert.ElementsMatch(t, arr, []string{"breakfast", "consulting", `bar-"#1"`},
"compare scan result")
sqlVal, err := arr.Value()
assert.NoError(t, err, "encode array")
assert.Equal(t, strings.ReplaceAll(sqlStringArray, " ", ""), sqlVal)
}
| TestStringArraySQL |
config.go | // Copyright (c) 2017 VMware, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package config
import (
"io/ioutil"
"os"
yaml "gopkg.in/yaml.v2"
)
//DefaultConfig ...
var DefaultConfig = &Configuration{}
//Configuration loads the configuration of registry controller.
type Configuration struct {
Protocol string `yaml:"protocol"`
Port string `yaml:"port"`
LogLevel string `yaml:"log_level"`
HTTPSConfig struct {
Cert string `yaml:"cert"`
Key string `yaml:"key"`
} `yaml:"https_config,omitempty"`
}
//Load the configuration options from the specified yaml file.
func (c *Configuration) Load(yamlFilePath string, detectEnv bool) error {
if len(yamlFilePath) != 0 {
//Try to load from file first
data, err := ioutil.ReadFile(yamlFilePath)
if err != nil {
return err
}
if err = yaml.Unmarshal(data, c); err != nil {
return err
}
}
if detectEnv {
c.loadEnvs()
}
return nil
}
//GetLogLevel returns the log level
func GetLogLevel() string {
return DefaultConfig.LogLevel
}
//GetJobAuthSecret get the auth secret from the env
func GetJobAuthSecret() string {
return os.Getenv("JOBSERVICE_SECRET")
}
//GetUIAuthSecret get the auth secret of UI side
func GetUIAuthSecret() string |
//loadEnvs Load env variables
func (c *Configuration) loadEnvs() {
prot := os.Getenv("REGISTRYCTL_PROTOCOL")
if len(prot) != 0 {
c.Protocol = prot
}
p := os.Getenv("PORT")
if len(p) != 0 {
c.Port = p
}
//Only when protocol is https
if c.Protocol == "HTTPS" {
cert := os.Getenv("REGISTRYCTL_HTTPS_CERT")
if len(cert) != 0 {
c.HTTPSConfig.Cert = cert
}
certKey := os.Getenv("REGISTRYCTL_HTTPS_KEY")
if len(certKey) != 0 {
c.HTTPSConfig.Key = certKey
}
}
loggerLevel := os.Getenv("LOG_LEVEL")
if len(loggerLevel) != 0 {
c.LogLevel = loggerLevel
}
}
| {
return os.Getenv("UI_SECRET")
} |
node.go | package node
import (
"errors"
"github.com/devfeel/rockman/cluster"
"github.com/devfeel/rockman/config"
"github.com/devfeel/rockman/core"
"github.com/devfeel/rockman/logger"
"github.com/devfeel/rockman/metrics"
"github.com/devfeel/rockman/protected/service"
"github.com/devfeel/rockman/registry"
"github.com/devfeel/rockman/rpc/client"
"github.com/devfeel/rockman/runtime"
"strconv"
"time"
)
const defaultLockerTTL = "10s"
type (
Node struct {
NodeId string
NodeName string
isLeader bool
Status int
config *config.Profile
nodeInfo *core.NodeInfo
Cluster *cluster.Cluster
Registry *registry.Registry
Runtime *runtime.Runtime
shutdownChan chan string
isSTW bool //stop the world flag
logLogic *service.LogService
isRunCycleLoadExecutors bool
executorFlagLastIndex uint64
}
)
var (
ErrorCanNotSubmitToNotLeaderNode = errors.New("can not submit to not leader node")
ErrorStopTheWorld = errors.New("node is stop the world")
)
func NewNode(profile *config.Profile, shutdown chan string) (*Node, error) {
logger.Node().Debug("Node {" + profile.Node.NodeId + "} begin init...")
node := &Node{
NodeId: profile.Node.NodeId,
NodeName: profile.Node.NodeName,
config: profile,
shutdownChan: shutdown,
logLogic: service.NewLogService(),
}
err := node.init()
if err != nil {
logger.Node().Debug("Node init error: " + err.Error())
} else {
logger.Node().Debug("Node init success.")
}
return node, err
}
func (n *Node) init() error {
// init registry
registry, err := registry.NewRegistry(n.config.Cluster.RegistryServer)
if err != nil {
return err
}
registry.OnServerOnline = n.onRegistryOnline
registry.OnServerOffline = n.onRegistryOffline
n.Registry = registry
// init cluster
cluster := cluster.NewCluster(n.config, registry)
cluster.OnLeaderChange = n.onLeaderChange
cluster.OnLeaderChangeFailed = n.onLeaderChangeFailed
// init runtime
n.Cluster = cluster
if n.config.Node.IsWorker {
n.Runtime = runtime.NewRuntime(n.NodeInfo(), n.config)
}
return nil
}
func (n *Node) Start() error {
logger.Default().Debug("Node start...")
metrics.Default().Inc(metrics.LabelNodeStart)
// create session with node info
err := n.createSession(n.NodeInfo().GetNodeKey(n.Cluster.ClusterId))
if err != nil {
return err
}
if n.config.Node.IsMaster {
n.electionLeader()
}
// register node to cluster
err = n.registerNode()
if err != nil {
return err
}
err = n.Registry.Start()
if err != nil {
return err
}
err = n.Cluster.Start()
if err != nil {
return err
}
if n.config.Node.IsWorker {
err = n.Runtime.Start()
if err != nil {
return err
}
}
return nil
}
func (n *Node) Config() *config.Profile {
return n.config
}
func (n *Node) IsMaster() bool { |
func (n *Node) IsWorker() bool {
return n.config.Node.IsWorker
}
func (n *Node) IsLeader() bool {
return n.isLeader
}
func (n *Node) ClusterId() string {
return n.Cluster.ClusterId
}
func (n *Node) Shutdown() {
logTitle := "Node Shutdown "
logger.Node().Debug(logTitle + "doing.")
n.stopTheWorld()
n.shutdownChan <- "ok"
}
func (n *Node) NodeInfo() *core.NodeInfo {
if n.nodeInfo != nil {
return n.nodeInfo
}
n.nodeInfo = &core.NodeInfo{
NodeID: n.NodeId,
Cluster: n.config.Cluster.ClusterId,
OuterHost: n.config.Rpc.OuterHost,
OuterPort: n.config.Rpc.OuterPort,
Host: n.config.Rpc.RpcHost,
Port: n.config.Rpc.RpcPort,
IsMaster: n.config.Node.IsMaster,
IsWorker: n.config.Node.IsWorker,
IsOnline: true,
}
if n.Runtime != nil {
n.nodeInfo.Executors = n.Runtime.GetTaskIDs()
}
return n.nodeInfo
}
func (n *Node) stopTheWorld() {
lt := "Node stopTheWorld "
logger.Node().Debug(lt + "begin.")
metrics.Default().Inc(metrics.LabelStopTheWorld)
logger.Node().Debug(lt + "set SWT flag true.")
n.isSTW = true
if n.Cluster != nil {
n.Cluster.Stop()
}
if n.Runtime != nil {
n.Runtime.Stop()
}
if n.Registry != nil {
n.Registry.Stop()
}
n.Registry = nil
n.Cluster = nil
n.Runtime = nil
logger.Node().Debug(lt + "success.")
}
func (n *Node) startTheWorld() {
lt := "Node startTheWorld "
logger.Node().Debug(lt + "begin.")
metrics.Default().Inc(metrics.LabelStartTheWorld)
logger.Node().Debug(lt + "set SWT flag false.")
n.isSTW = false
err := n.init()
if err != nil {
logger.Node().Debug(lt + "node init failed, error: " + err.Error())
n.Shutdown()
return
}
err = n.Start()
if err != nil {
logger.Node().Debug(lt + "node start failed, error: " + err.Error())
n.Shutdown()
return
}
logger.Node().Debug(lt + "success.")
}
// registerNode register node to cluster
func (n *Node) registerNode() error {
logTitle := "Node registerNode "
var leaderServer string
var err error
var retryCount int
nodeInfo := n.NodeInfo()
logger.Node().Debug(logTitle + "begin.")
RegisterNode:
for {
if n.isSTW {
return ErrorStopTheWorld
}
if retryCount > n.config.Global.RetryLimit {
err = errors.New("retry more than 5 times and stop it")
logger.Node().DebugS(logTitle + "error: " + err.Error())
return err
}
retryCount += 1
// get leader info
leaderServer, err = n.Cluster.GetLeaderInfo()
if err != nil {
logger.Node().Debug(logTitle + "GetLeaderInfo error:" + err.Error() + ", will retry 10 seconds after.")
time.Sleep(time.Second * 10)
continue RegisterNode
} else {
logger.Node().Debug(logTitle + "GetLeaderInfo success [" + leaderServer + "]")
rpcClient := client.NewRpcClient(leaderServer, n.config.Rpc.EnableTls, n.config.Rpc.ClientCertFile, n.config.Rpc.ClientKeyFile)
err, reply := rpcClient.CallRegisterNode(nodeInfo)
if err != nil {
logger.Node().Debug(logTitle + "CallRegisterNode error:" + err.Error() + ", will retry 10 seconds after.")
time.Sleep(time.Second * 10)
continue RegisterNode
}
if !reply.IsSuccess() {
logger.Node().Debug(logTitle + "CallRegisterNode failed:" + strconv.Itoa(reply.RetCode) + ", will retry 10 seconds after.")
time.Sleep(time.Second * 10)
continue RegisterNode
} else {
retryCount = 0
// if node is leader and register to self, mean cluster is init, remove old init flag
if leaderServer == n.NodeInfo().EndPoint() {
if err != nil {
logger.Node().Warn(logTitle + "delete executor-init flag error:" + err.Error())
}
}
logger.Node().DebugS(logTitle + "success.")
}
break
}
}
return nil
}
// createSession create session to registry server
func (n *Node) createSession(nodeKey string) error {
lt := "Node create session "
logger.Node().Debug(lt + "begin.")
locker, err := n.Registry.CreateLocker(nodeKey, n.NodeInfo().Json(), defaultLockerTTL)
if err != nil {
logger.Node().Debug(lt + "error: " + err.Error())
}
_, err = locker.Lock()
if err != nil {
logger.Node().Debug(lt + "error: " + err.Error())
return err
}
logger.Node().Debug(lt + "success with key {" + nodeKey + "}")
return nil
}
// onLeaderChange do something when leader is changed
func (n *Node) onLeaderChange() {
metrics.Default().Inc(metrics.LabelLeaderChange)
err := n.registerNode()
if err != nil {
logger.Node().DebugS("Node.onLeaderChange registerNode error:", err.Error())
} else {
logger.Node().Debug("Node.onLeaderChange registerNode success")
}
if n.IsLeader() {
if n.Cluster.LeaderServer != n.NodeInfo().EndPoint() {
n.becomeLeaderRole()
}
}
if n.IsMaster() && !n.IsLeader() {
if n.Cluster.LeaderServer == n.NodeInfo().EndPoint() {
n.removeLeaderRole()
}
}
}
// onLeaderChangeFailed
func (n *Node) onLeaderChangeFailed() {
metrics.Default().Inc(metrics.LabelLeaderChangeFailed)
logger.Node().DebugS("Node.onLeaderChangeFailed, now will shutdown node.")
n.Shutdown()
}
// onWorkerNodeOffline
func (n *Node) onWorkerNodeOffline(nodeInfo *core.NodeInfo) {
logTitle := "Node.onWorkerNodeOffline[" + nodeInfo.NodeID + "] "
if !n.isLeader {
logger.Node().Warn(logTitle + "is be called, but it's not leader")
return
}
metrics.Default().Inc(metrics.LabelWorkerNodeOffline)
var needReSubmits []*core.ExecutorInfo
for _, v := range n.Cluster.ExecutorInfos {
if v.Worker.NodeID == nodeInfo.NodeID {
needReSubmits = append(needReSubmits, v)
}
}
go func() {
for _, exec := range needReSubmits {
n.SubmitExecutor(exec)
}
}()
}
func (n *Node) onRegistryOnline() {
logger.Node().DebugS("Node.onRegistryOnline registry online, now start the world.")
metrics.Default().Inc(metrics.LabelRegistryOnline)
if n.isSTW {
n.startTheWorld()
}
}
func (n *Node) onRegistryOffline() {
logger.Node().DebugS("Node.onRegistryOffline registry offline, now stop the world.")
metrics.Default().Inc(metrics.LabelRegistryOffline)
n.stopTheWorld()
}
func (n *Node) refreshNodeInfo() *core.NodeInfo {
n.nodeInfo = &core.NodeInfo{
NodeID: n.NodeId,
Cluster: n.config.Cluster.ClusterId,
OuterHost: n.config.Rpc.OuterHost,
OuterPort: n.config.Rpc.OuterPort,
Host: n.config.Rpc.RpcHost,
Port: n.config.Rpc.RpcPort,
IsMaster: n.config.Node.IsMaster,
IsWorker: n.config.Node.IsWorker,
IsOnline: true,
}
if n.IsWorker() {
n.nodeInfo.Executors = n.Runtime.GetTaskIDs()
}
return n.nodeInfo
} | return n.config.Node.IsMaster
} |
server.js | const config = require('nconf');
const cors = require('@koa/cors');
const Koa = require('koa'); | const path = require('path');
const {
getMovie,
} = require('./api/index.js');
if (!!process.env.BOND_ENV && process.env.BOND_ENV === 'docker') {
config.file('docker', { file: `${process.cwd()}/config/config-docker.json` });
}
config.file(`${process.cwd()}/config/config.json`);
logger.info({
msg: `Starting up...\n\nSettings: ${JSON.stringify(config.get())}`,
})
const app = new Koa();
app.use(cors({
origin: '*',
}));
app.use(KoaBodyParser());
app.use(async (ctx, next) => {
ctx.logger = logger;
ctx.config = config;
await next();
});
const router = new KoaRouter();
router.get('/api/v1/movies/:title', getMovie);
app.use(router.routes());
app.listen(3004); | const KoaBodyParser = require('koa-bodyparser');
const KoaRouter = require('koa-router');
const logger = require('pino')().child({ app: 'MOVIE-METADATA-API' }); |
tdt2r.rs | #[doc = "Reader of register TDT2R"]
pub type R = crate::R<u32, super::TDT2R>;
#[doc = "Writer for register TDT2R"]
pub type W = crate::W<u32, super::TDT2R>;
#[doc = "Register TDT2R `reset()`'s with value 0"]
impl crate::ResetValue for super::TDT2R {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `TIME`"]
pub type TIME_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `TIME`"]
pub struct TIME_W<'a> {
w: &'a mut W,
}
impl<'a> TIME_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xffff << 16)) | (((value as u32) & 0xffff) << 16);
self.w
}
}
#[doc = "Reader of field `TGT`"]
pub type TGT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TGT`"]
pub struct TGT_W<'a> {
w: &'a mut W,
}
impl<'a> TGT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `DLC`"]
pub type DLC_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DLC`"]
pub struct DLC_W<'a> {
w: &'a mut W,
}
impl<'a> DLC_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
impl R {
#[doc = "Bits 16:31 - TIME"]
#[inline(always)]
pub fn time(&self) -> TIME_R {
TIME_R::new(((self.bits >> 16) & 0xffff) as u16)
}
#[doc = "Bit 8 - TGT"]
#[inline(always)]
pub fn tgt(&self) -> TGT_R {
TGT_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bits 0:3 - DLC"]
#[inline(always)]
pub fn dlc(&self) -> DLC_R {
DLC_R::new((self.bits & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 16:31 - TIME"]
#[inline(always)]
pub fn time(&mut self) -> TIME_W {
TIME_W { w: self }
}
#[doc = "Bit 8 - TGT"]
#[inline(always)]
pub fn | (&mut self) -> TGT_W {
TGT_W { w: self }
}
#[doc = "Bits 0:3 - DLC"]
#[inline(always)]
pub fn dlc(&mut self) -> DLC_W {
DLC_W { w: self }
}
}
| tgt |
Context.tsx | import React, { Component } from "react";
import {
ProjectType,
ClusterType,
CapabilityType,
ContextProps,
} from "shared/types";
import { pushQueryParams } from "shared/routing";
const Context = React.createContext<Partial<ContextProps>>(null);
const { Provider } = Context;
const ContextConsumer = Context.Consumer;
type PropsType = {
history: any;
location: any;
};
type StateType = GlobalContextType;
export interface GlobalContextType {
currentModal: string;
currentModalData: any;
setCurrentModal: (currentModal: string, currentModalData?: any) => void;
currentError: string | null;
setCurrentError: (currentError: string) => void;
currentCluster: ClusterType;
setCurrentCluster: (currentCluster: ClusterType, callback?: any) => void;
currentProject: ProjectType | null;
setCurrentProject: (
currentProject: ProjectType,
callback?: () => void
) => void;
projects: ProjectType[];
setProjects: (projects: ProjectType[]) => void;
user: any;
setUser: (userId: number, email: string) => void;
devOpsMode: boolean;
setDevOpsMode: (devOpsMode: boolean) => void;
capabilities: CapabilityType;
setCapabilities: (capabilities: CapabilityType) => void;
clearContext: () => void;
}
/**
* Component managing a universal (application-wide) data store.
*
* Important Usage Notes:
* 1) Each field must have an accompanying setter
* 2) No function calls are allowed from within Context (not counting
* initialization)
* 3) Context should be used as a last-resort (changes will re-render ALL
* components consuming Context)
* 4) As a rule of thumb, Context should not be used for UI-related state
*/
class | extends Component<PropsType, StateType> {
state: GlobalContextType = {
currentModal: null,
currentModalData: null,
setCurrentModal: (currentModal: string, currentModalData?: any) => {
this.setState({ currentModal, currentModalData });
},
currentError: null,
setCurrentError: (currentError: string) => {
this.setState({ currentError });
},
currentCluster: {
id: -1,
name: "",
server: "",
service_account_id: -1,
infra_id: -1,
service: "",
},
setCurrentCluster: (currentCluster: ClusterType, callback?: any) => {
localStorage.setItem(
this.state.currentProject.id + "-cluster",
JSON.stringify(currentCluster)
);
this.setState({ currentCluster }, () => {
callback && callback();
});
},
currentProject: null,
setCurrentProject: (currentProject: ProjectType, callback?: any) => {
if (currentProject) {
localStorage.setItem("currentProject", currentProject.id.toString());
pushQueryParams(this.props, {
project_id: currentProject.id.toString(),
});
} else {
localStorage.removeItem("currentProject");
}
this.setState({ currentProject }, () => {
callback && callback();
});
},
projects: [],
setProjects: (projects: ProjectType[]) => {
projects.sort((a: any, b: any) => (a.name > b.name ? 1 : -1));
this.setState({ projects });
},
user: null,
setUser: (userId: number, email: string) => {
this.setState({ user: { userId, email } });
},
devOpsMode: true,
setDevOpsMode: (devOpsMode: boolean) => {
this.setState({ devOpsMode });
},
capabilities: null,
setCapabilities: (capabilities: CapabilityType) => {
this.setState({ capabilities });
},
clearContext: () => {
this.setState({
currentModal: null,
currentModalData: null,
currentError: null,
currentCluster: null,
currentProject: null,
projects: [],
user: null,
devOpsMode: true,
});
},
};
render() {
return <Provider value={this.state}>{this.props.children}</Provider>;
}
}
export { Context, ContextProvider, ContextConsumer };
| ContextProvider |
data.js | $axure.loadCurrentPage( | (function() {
var _ = function() { var r={},a=arguments; for(var i=0; i<a.length; i+=2) r[a[i]]=a[i+1]; return r; }
var _creator = function() { return _(b,c,d,e,f,g,h,g,i,_(j,k),l,[m],n,_(o,p,q,r,s,t,u,_(),v,_(w,x,y,z,A,_(B,C,D,E),F,null,G,z,H,z,I,J,K,null,L,M,N,O,P,Q,R,M),S,_(),T,_(),U,_(V,[_(W,X,Y,j,Z,ba,q,bb,bc,bb,bd,be,v,_(bf,_(bg,bh,bi,bj)),S,_(),bk,_(),bl,bm),_(W,bn,Y,bo,Z,bp,q,bq,bc,bq,bd,be,v,_(br,_(bs,bt,bu,bt)),S,_(),bk,_(),bv,[_(W,bw,Y,bx,Z,by,q,bz,bc,bz,bd,be,v,_(bf,_(bg,bA,bi,bB),w,bC,br,_(bs,bD,bu,bE),bF,bG),S,_(),bk,_(),bH,g),_(W,bI,Y,bJ,Z,bK,q,bz,bc,bL,bd,be,v,_(bf,_(bg,bM,bi,bN),w,bC,br,_(bs,bO,bu,bP),bQ,_(bR,_()),bS,_(B,C,D,bT),R,bU,bF,bV),S,_(),bk,_(),bW,_(bX,bY),bH,g),_(W,bZ,Y,ca,Z,bK,q,bz,bc,bL,bd,be,v,_(bf,_(bg,bM,bi,bN),w,bC,br,_(bs,bO,bu,cb),bQ,_(bR,_()),bS,_(B,C,D,bT),R,bU,bF,bG),S,_(),bk,_(),bW,_(bX,bY),bH,g),_(W,cc,Y,cd,Z,ce,q,cf,bc,cf,bd,be,v,_(P,cg,ch,ci,cj,_(B,C,D,bT,ck,cl),bf,_(bg,cm,bi,cn),bQ,_(co,_(cj,_(B,C,D,cp,ck,cl))),w,cq,br,_(bs,cr,bu,cs),ct,cu,cv,cw),cx,g,S,_(),bk,_(),cy,j),_(W,cz,Y,cA,Z,by,q,bz,bc,bz,bd,be,v,_(P,cg,ch,ci,cj,_(B,C,D,E,ck,cl),bf,_(bg,cs,bi,cB),w,cC,br,_(bs,cD,bu,cE),ct,cu,bF,bG,A,_(B,C,D,bT)),S,_(),bk,_(),T,_(cF,_(cG,cH,cI,[_(cG,cJ,cK,g,cL,_(cM,cN,cO,cP,cQ,_(cM,cN,cO,cR,cQ,_(cM,cS,cT,cU,cV,[_(cM,cW,cX,g,cY,g,cZ,g,da,[db])]),dc,_(cM,dd,da,de,df,[])),dc,_(cM,cN,cO,cR,cQ,_(cM,cS,cT,cU,cV,[_(cM,cW,cX,g,cY,g,cZ,g,da,[dg])]),dc,_(cM,dd,da,dh,df,[]))),di,[_(dj,dk,cG,dl,dm,_(dn,n,b,dp,dq,be),dr,ds)]),_(cG,dt,cK,g,cL,_(cM,cN,cO,du,cQ,_(cM,cN,cO,dv,cQ,_(cM,cS,cT,cU,cV,[_(cM,cW,cX,g,cY,g,cZ,g,da,[db])]),dc,_(cM,dd,da,de,df,[])),dc,_(cM,cN,cO,dv,cQ,_(cM,cS,cT,cU,cV,[_(cM,cW,cX,g,cY,g,cZ,g,da,[dg])]),dc,_(cM,dd,da,dh,df,[]))),di,[_(dj,dw,cG,dx,dy,[_(dz,[dA],dB,_(dC,dD,dE,_(dF,dG,dH,g)))])])])),dI,be,bH,g),_(W,db,Y,dJ,Z,ce,q,cf,bc,cf,bd,be,v,_(bf,_(bg,dK,bi,dL),bQ,_(co,_(cj,_(B,C,D,cp,ck,cl))),w,cq,br,_(bs,dM,bu,dN)),cx,be,S,_(),bk,_(),T,_(cF,_(cG,cH,cI,[_(cG,dJ,cK,g,di,[_(dj,dO,cG,dP,dQ,_(cM,dR,dS,[_(cM,cS,cT,dT,cV,[_(cM,cW,cX,g,cY,g,cZ,g,da,[db]),_(cM,dd,da,de,df,[])])]))])])),dI,be,cy,bJ),_(W,dg,Y,dU,Z,ce,q,cf,bc,cf,bd,be,v,_(bf,_(bg,dK,bi,dL),bQ,_(co,_(cj,_(B,C,D,cp,ck,cl))),w,cq,br,_(bs,dM,bu,dV)),cx,be,S,_(),bk,_(),T,_(cF,_(cG,cH,cI,[_(cG,dU,cK,g,di,[_(dj,dO,cG,dW,dQ,_(cM,dR,dS,[_(cM,cS,cT,dT,cV,[_(cM,cW,cX,g,cY,g,cZ,g,da,[dg]),_(cM,dd,da,dh,df,[])])]))])])),dI,be,cy,ca),_(W,dX,Y,dY,Z,ce,q,cf,bc,cf,bd,be,v,_(cj,_(B,C,D,dZ,ck,cl),bf,_(bg,ea,bi,eb),bQ,_(co,_(cj,_(B,C,D,cp,ck,cl))),w,cq,br,_(bs,ec,bu,ed),cv,cw,A,_(B,C,D,ee)),cx,g,S,_(),bk,_(),T,_(cF,_(cG,cH,cI,[_(cG,ef,cK,g,di,[_(dj,dk,cG,eg,dm,_(dn,eh,ei,_(cM,dd,da,ej,df,[]),dq,g),dr,ds)])])),dI,be,cy,j)],ek,g),_(W,dA,Y,el,Z,bp,q,bq,bc,bq,bd,g,bR,be,v,_(br,_(bs,bt,bu,bt),bd,g),S,_(),bk,_(),bv,[_(W,em,Y,en,Z,by,q,bz,bc,bz,bd,g,bR,be,v,_(P,eo,bf,_(bg,ep,bi,bE),w,eq,br,_(bs,er,bu,es),bF,bG,ct,et),S,_(),bk,_(),bH,g),_(W,eu,Y,ev,Z,by,q,bz,bc,bz,bd,g,bR,be,v,_(P,eo,bf,_(bg,ew,bi,ex),w,cC,br,_(bs,ey,bu,ez),ct,et),S,_(),bk,_(),T,_(cF,_(cG,cH,cI,[_(cG,eA,cK,g,di,[_(dj,dw,cG,eB,dy,[_(dz,[dA],dB,_(dC,eC,dE,_(dF,dG,dH,g)))]),_(dj,dO,cG,eD,dQ,_(cM,dR,dS,[_(cM,cS,cT,dT,cV,[_(cM,cW,cX,g,cY,g,cZ,g,da,[dg]),_(cM,dd,da,j,df,[])]),_(cM,cS,cT,dT,cV,[_(cM,cW,cX,g,cY,g,cZ,g,da,[db]),_(cM,dd,da,j,df,[])])]))])])),dI,be,bH,g)],ek,g)])),eE,_(eF,_(o,eF,q,eG,s,ba,u,_(),v,_(w,x,y,z,A,_(B,C,D,E),F,null,G,z,H,z,I,J,K,null,L,M,N,O,P,Q,R,M),S,_(),T,_(),U,_(V,[_(W,eH,Y,eI,Z,by,q,bz,bc,bz,bd,be,v,_(bf,_(bg,bh,bi,bj),w,bC,A,_(B,C,D,eJ),bS,_(B,C,D,eJ)),S,_(),bk,_(),bH,g)]))),eK,_(eL,_(eM,eN,eO,_(eM,eP)),eQ,_(eM,eR),eS,_(eM,eT),eU,_(eM,eV),eW,_(eM,eX),eY,_(eM,eZ),fa,_(eM,fb),fc,_(eM,fd),fe,_(eM,ff),fg,_(eM,fh),fi,_(eM,fj),fk,_(eM,fl),fm,_(eM,fn)));};
var b="url",c="log_in.html",d="generationDate",e=new Date(1512757774989.68),f="isCanvasEnabled",g=false,h="isAdaptiveEnabled",i="sketchKeys",j="",k="s0",l="variables",m="OnLoadVariable",n="page",o="packageId",p="b8eec4c9d4df4290af9ddc7dfea895c3",q="type",r="Axure:Page",s="name",t="Log In",u="notes",v="style",w="baseStyle",x="627587b6038d43cca051c114ac41ad32",y="pageAlignment",z="near",A="fill",B="fillType",C="solid",D="color",E=0xFFFFFFFF,F="image",G="imageHorizontalAlignment",H="imageVerticalAlignment",I="imageRepeat",J="auto",K="favicon",L="sketchFactor",M="0",N="colorStyle",O="appliedColor",P="fontName",Q="Applied Font",R="borderWidth",S="adaptiveStyles",T="interactionMap",U="diagram",V="objects",W="id",X="f6891af993d74dc4aa9b457f9162aae6",Y="label",Z="friendlyType",ba="BG 1024x768 Outline",bb="referenceDiagramObject",bc="styleType",bd="visible",be=true,bf="size",bg="width",bh=1024,bi="height",bj=768,bk="imageOverrides",bl="masterId",bm="065e543ea3f04698b793bc69c6c83e00",bn="81942007ab3044eba77c781d431d5ab3",bo="Welcome - Log In",bp="Group",bq="layer",br="location",bs="x",bt=0,bu="y",bv="objs",bw="acdc3a8053f34fa38329ca439f382a5a",bx="Log In Box Bg",by="Rectangle",bz="vectorShape",bA=520,bB=404,bC="4b7bfc596114427989e10bb0b557d0ce",bD=252,bE=185,bF="cornerRadius",bG="10",bH="generateCompound",bI="11592941b7fb48309e3ee54ff049d893",bJ="Username",bK="Beveled Rectangle",bL="flowShape",bM=409,bN=61,bO=307,bP=280,bQ="stateStyles",bR="selected",bS="borderFill",bT=0xFF009966,bU="2",bV="120",bW="images",bX="normal~",bY="images/log_in/username_u4.png",bZ="61de94ae79824a9f9a1c12d67df5517d",ca="Password",cb=356,cc="6645f1e1252340d096cebceb5700b9ce",cd="Welcome",ce="Text Field",cf="textBox",cg="'Century Gothic Bold', 'Century Gothic'",ch="fontWeight",ci="700",cj="foreGroundFill",ck="opacity",cl=1,cm=438,cn=62,co="hint",cp=0xFF999999,cq="44157808f2934100b68f2394a66b2bba",cr=293,cs=204,ct="fontSize",cu="28px",cv="horizontalAlignment",cw="center",cx="HideHintOnFocused",cy="placeholderText",cz="b2a36011ac114266bfba7d2161de9eb2",cA="Log In Button",cB=58,cC="c9f35713a1cf4e91a0f2dbac65e6fb5c",cD=410,cE=432,cF="onClick",cG="description",cH="OnClick",cI="cases",cJ="Click without username/ password<br> (If text on Username Click equals "proxyuser" and text on Password Click equals "********")",cK="isNewIfGroup",cL="condition",cM="exprType",cN="binaryOp",cO="op",cP="&&",cQ="leftExpr",cR="==",cS="fcall",cT="functionName",cU="GetWidgetText",cV="arguments",cW="pathLiteral",cX="isThis",cY="isFocused",cZ="isTarget",da="value",db="e6b81344f3aa48aeb5ca9fc22f910eb1",dc="rightExpr",dd="stringLiteral",de="proxyuser",df="stos",dg="5b036408e20e4d2ebea0c8efef9ab2e4",dh="********",di="actions",dj="action",dk="linkWindow",dl="Open Home in Current Window",dm="target",dn="targetType",dp="home.html",dq="includeVariables",dr="linkType",ds="current",dt="No user no pass<br> (Else If text on Username Click does not equal "proxyuser" or text on Password Click does not equal "********")",du="||",dv="!=",dw="fadeWidget",dx="Show Error Pop Up",dy="objectsToFades",dz="objectPath",dA="50691b50de8e46208a5ee09f887007f2",dB="fadeInfo",dC="fadeType",dD="show",dE="options",dF="showType",dG="none",dH="bringToFront",dI="tabbable",dJ="Username Click",dK=384,dL=46,dM=319,dN=287,dO="setFunction",dP="Set text on Username Click equal to "proxyuser"",dQ="expr",dR="block",dS="subExprs",dT="SetWidgetFormText",dU="Password Click",dV=364,dW="Set text on Password Click equal to "********"",dX="9b87dca23e234639baaec9ff9f11f06b",dY="For more info...",dZ=0xFF336633,ea=300,eb=25,ec=362,ed=500,ee=0xFFFFFF,ef="Case 1",eg="Open https://www.linkedin.com/company/cultureddotbio/ in Current Window",eh="webUrl",ei="urlLiteral",ej="https://www.linkedin.com/company/cultureddotbio/",ek="propagate",el="Error Pop Up",em="49ee031ce8584a4884a4b37a62379cd4",en="Error message",eo="'Century Gothic'",ep=273,eq="47641f9a00ac465095d6b672bbdffef6",er=375,es=267,et="20px",eu="c9bf3cb018c04d2aa16055c0b3be4dd3",ev="Error Message Button",ew=131,ex=26,ey=445,ez=403,eA="Close Error Message",eB="Hide Error Pop Up",eC="hide",eD="Set text on Password Click equal to "", and<br> text on Username Click equal to """,eE="masters",eF="065e543ea3f04698b793bc69c6c83e00",eG="Axure:Master",eH="0d21a49b95d14b80b732973b416883ee",eI="Screen Outline",eJ=0xFF339966,eK="objectPaths",eL="f6891af993d74dc4aa9b457f9162aae6",eM="scriptId",eN="u0",eO="0d21a49b95d14b80b732973b416883ee",eP="u1",eQ="81942007ab3044eba77c781d431d5ab3",eR="u2",eS="acdc3a8053f34fa38329ca439f382a5a",eT="u3",eU="11592941b7fb48309e3ee54ff049d893",eV="u4",eW="61de94ae79824a9f9a1c12d67df5517d",eX="u5",eY="6645f1e1252340d096cebceb5700b9ce",eZ="u6",fa="b2a36011ac114266bfba7d2161de9eb2",fb="u7",fc="e6b81344f3aa48aeb5ca9fc22f910eb1",fd="u8",fe="5b036408e20e4d2ebea0c8efef9ab2e4",ff="u9",fg="9b87dca23e234639baaec9ff9f11f06b",fh="u10",fi="50691b50de8e46208a5ee09f887007f2",fj="u11",fk="49ee031ce8584a4884a4b37a62379cd4",fl="u12",fm="c9bf3cb018c04d2aa16055c0b3be4dd3",fn="u13";
return _creator();
})()); | |
choicelists.py | # -*- coding: UTF-8 -*-
# Copyright 2017-2018 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
"""The choicelists for this plugin.
"""
from lino.api import dd, _
class | (dd.ChoiceList):
verbose_name = _("Client tariff")
verbose_name_plural = _("Client tariffs")
add = PartnerTariffs.add_item
add('10', _("Plain"), 'plain')
add('20', _("Reduced"), 'reduced')
| PartnerTariffs |
api.py | import time
import re
import jwt
import requests
import json
from pprint import pprint
from abc import abstractmethod, ABC
import logging
logging.basicConfig(filename='app.log', level=logging.DEBUG,
format='%(asctime)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S')
class Response:
def __init__(self, r):
self.headers = r.headers
self.status_code = r.status_code
self.response = r.json()
def __repr__(self):
return str(self.status_code)
def __str__(self):
return str(self.status_code)
class API(ABC):
def __init__(self, server=None, client_key=None, client_secret=None, params={}):
if not isinstance(server, str) or not isinstance(client_key, str) or not isinstance(client_secret, str):
raise TypeError("Invalid API credentials")
self.__client_key = client_key
self.__client_secret = client_secret
self.__params = params
self.__access_token = None
self.__access_token_expiration = None
self.__start_time = None
self.__session = requests.Session()
self.__session.headers.update({'Content-Type': 'application/json'})
self.__api_calls = 0
self.__last_execution_time = None
self.__rate_limit_retry = params.get('rate_limit_retry', False)
self.requestAccessToken()
def requestAccessToken(self):
"""Create JWT and request iFormBuilder Access Token
If token is successfully returned, stored in session header
Else null token is stored in session header
"""
try: | url = "https://identity.zerionsoftware.com/oauth2/token"
# url = "https://qa-identity.zerionsoftware.com/oauth2/token" if self.__isQA else "https://identity.zerionsoftware.com/oauth2/token"
jwt_payload = {
'iss': self.__client_key,
'aud': url,
'iat': time.time(),
'exp': time.time() + 300
}
encoded_jwt = jwt.encode(
jwt_payload, self.__client_secret, algorithm='HS256')
token_body = {
'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
'assertion': encoded_jwt
}
result = requests.post(url, data=token_body, timeout=5)
result.raise_for_status()
except Exception as e:
print(f'Exception: {e}')
return
else:
self.__start_time = time.time()
self.__access_token = result.json()['access_token']
self.__session.headers.update(
{'Authorization': "Bearer %s" % self.__access_token})
self.__access_token_expiration = time.time() + 3300
def getParams(self):
return self.__params
def getAccessToken(self):
return self.__access_token
def getAccessTokenExpiration(self):
return self.__access_token_expiration
def getApiCount(self):
return self.__api_calls
def getLastExecution(self):
return self.__last_execution_time
def getStartTime(self):
return self.__start_time
def getApiLifetime(self):
return round(time.time() - self.__start_time, 2)
def call(self, method, resource, body=None):
if self.getAccessToken() is not None and time.time() > self.getAccessTokenExpiration():
self.requestAccessToken()
method = method.upper()
if method not in ('GET','POST','PUT','DELETE'):
raise ValueError(f'{method} is not an accepted method')
isRateLimited = False
while not isRateLimited:
if method == 'GET':
result = self.__session.get(resource)
elif method == 'POST':
result = self.__session.post(resource, data=json.dumps(body))
elif method == 'PUT':
result = self.__session.put(resource, data=json.dumps(body))
elif method == 'DELETE':
result = self.__session.delete(resource)
self.__api_calls += 1
self.__last_execution_time = result.elapsed
if result.status_code == 429 and self.__rate_limit_retry == True:
print(f'Rate Limited for {resource}, waiting 60 seconds to retry...')
time.sleep(60)
else:
isRateLimited = True
return Response(result)
@abstractmethod
def describeResources(self):
raise NotImplementedError
@abstractmethod
def describeResource(self, resource):
raise NotImplementedError | |
context_menu_utils.py | # -*- coding: utf-8 -*-
"""
Copyright (C) 2017 Sebastian Golasch (plugin.video.netflix)
Copyright (C) 2020 Stefano Gottardo (original implementation module)
Miscellaneous utility functions for generating context menu items
SPDX-License-Identifier: MIT
See LICENSES/MIT.md for more information.
"""
from __future__ import absolute_import, division, unicode_literals
import resources.lib.common as common
from resources.lib.globals import G
# Normally it wouldn't be necessary to split a module so small into two files,
# unfortunately use 'get_local_string' on a variable in the module header, makes that method (get_local_string)
# run immediately upon loading of the add-on modules, making it impossible to load the service instance.
# Separating the process of the loading of local strings would cause a huge slowdown in the processing of video lists.
def ctx_item_url(paths, mode=G.MODE_ACTION):
"""Return a function that builds an URL from a videoid for the predefined path"""
def | (videoid, params):
"""Build a context menu item URL"""
return common.build_url(paths, videoid, params, mode=mode)
return ctx_url_builder
CONTEXT_MENU_ACTIONS = {
'export': {
'label': common.get_local_string(30018),
'url': ctx_item_url(['export'], G.MODE_LIBRARY)},
'remove': {
'label': common.get_local_string(30030),
'url': ctx_item_url(['remove'], G.MODE_LIBRARY)},
'update': {
'label': common.get_local_string(30061),
'url': ctx_item_url(['update'], G.MODE_LIBRARY)},
'export_new_episodes': {
'label': common.get_local_string(30195),
'url': ctx_item_url(['export_new_episodes'], G.MODE_LIBRARY)},
'exclude_from_auto_update': {
'label': common.get_local_string(30196),
'url': ctx_item_url(['exclude_from_auto_update'], G.MODE_LIBRARY)},
'include_in_auto_update': {
'label': common.get_local_string(30197),
'url': ctx_item_url(['include_in_auto_update'], G.MODE_LIBRARY)},
'rate': {
'label': common.get_local_string(30019),
'url': ctx_item_url(['rate'])},
'rate_thumb': {
'label': common.get_local_string(30019),
'url': ctx_item_url(['rate_thumb'])},
'add_to_list': {
'label': common.get_local_string(30021),
'url': ctx_item_url(['my_list', 'add'])},
'remove_from_list': {
'label': common.get_local_string(30020),
'url': ctx_item_url(['my_list', 'remove'])},
'trailer': {
'label': common.get_local_string(30179),
'url': ctx_item_url(['trailer'])},
'force_update_list': {
'label': common.get_local_string(30214),
'url': ctx_item_url(['force_update_list'])},
'change_watched_status': {
'label': common.get_local_string(30236),
'url': ctx_item_url(['change_watched_status'])},
'search_remove': {
'label': common.get_local_string(15015),
'url': ctx_item_url(['search', 'search', 'remove'], G.MODE_DIRECTORY)},
'search_edit': {
'label': common.get_local_string(21450),
'url': ctx_item_url(['search', 'search', 'edit'], G.MODE_DIRECTORY)},
'remove_watched_status': {
'label': common.get_local_string(15015),
'url': ctx_item_url(['remove_watched_status'])},
}
| ctx_url_builder |
property.rs | use serde::{Deserialize, Serialize};
use crate::symbol::Declaration;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct | {
#[serde(flatten)]
pub declaration: Declaration,
/// Type of the property
pub r#type: String,
/// Whether getter exists
pub getter: bool,
/// Whether setter exists
pub setter: bool,
}
| Property |
mod.rs | pub mod step;
use std::collections::HashMap;
use std::fmt::Debug;
use std::time::Duration;
use ibc::ics02_client::client_consensus::AnyConsensusState;
use ibc::ics02_client::client_state::AnyClientState;
use ibc::ics02_client::client_type::ClientType;
use ibc::ics02_client::context::ClientReader;
use ibc::ics02_client::error as client_error;
use ibc::ics02_client::header::AnyHeader;
use ibc::ics02_client::msgs::create_client::MsgCreateAnyClient;
use ibc::ics02_client::msgs::update_client::MsgUpdateAnyClient;
use ibc::ics02_client::msgs::ClientMsg;
use ibc::ics03_connection::connection::{Counterparty, State as ConnectionState};
use ibc::ics03_connection::error as connection_error;
use ibc::ics03_connection::msgs::conn_open_ack::MsgConnectionOpenAck;
use ibc::ics03_connection::msgs::conn_open_confirm::MsgConnectionOpenConfirm;
use ibc::ics03_connection::msgs::conn_open_init::MsgConnectionOpenInit;
use ibc::ics03_connection::msgs::conn_open_try::MsgConnectionOpenTry;
use ibc::ics03_connection::msgs::ConnectionMsg;
use ibc::ics03_connection::version::Version;
use ibc::ics04_channel::context::ChannelReader;
use ibc::ics18_relayer::context::Ics18Context;
use ibc::ics18_relayer::error as relayer_error;
use ibc::ics23_commitment::commitment::{CommitmentPrefix, CommitmentProofBytes};
use ibc::ics24_host::identifier::{ChainId, ClientId, ConnectionId};
use ibc::ics26_routing::error as routing_error;
use ibc::ics26_routing::msgs::Ics26Envelope;
use ibc::mock::client_state::{MockClientState, MockConsensusState};
use ibc::mock::context::MockContext;
use ibc::mock::header::MockHeader;
use ibc::mock::host::HostType;
use ibc::proofs::{ConsensusProof, Proofs};
use ibc::signer::Signer;
use ibc::timestamp::ZERO_DURATION;
use ibc::Height;
use step::{Action, ActionOutcome, Chain, Step};
#[derive(Debug, Clone)]
pub struct IbcTestRunner {
// mapping from chain identifier to its context
contexts: HashMap<ChainId, MockContext>,
}
impl IbcTestRunner {
pub fn new() -> Self {
Self {
contexts: Default::default(),
}
}
/// Create a `MockContext` for a given `chain_id`.
pub fn init_chain_context(&mut self, chain_id: String, initial_height: u64) {
let chain_id = Self::chain_id(chain_id);
// never GC blocks
let max_history_size = usize::MAX;
let ctx = MockContext::new(
chain_id.clone(),
HostType::Mock,
max_history_size,
Height::new(Self::revision(), initial_height),
);
self.contexts.insert(chain_id, ctx);
}
/// Returns a reference to the `MockContext` of a given `chain_id`.
/// Panic if the context for `chain_id` is not found.
pub fn chain_context(&self, chain_id: String) -> &MockContext {
self.contexts
.get(&Self::chain_id(chain_id))
.expect("chain context should have been initialized")
}
/// Returns a mutable reference to the `MockContext` of a given `chain_id`.
/// Panic if the context for `chain_id` is not found.
pub fn chain_context_mut(&mut self, chain_id: String) -> &mut MockContext {
self.contexts
.get_mut(&Self::chain_id(chain_id))
.expect("chain context should have been initialized")
}
pub fn extract_ics02_error_kind(
ics18_result: Result<(), relayer_error::Error>,
) -> client_error::ErrorDetail {
let ics18_error = ics18_result.expect_err("ICS18 error expected");
match ics18_error.0 {
relayer_error::ErrorDetail::TransactionFailed(e) => match e.source {
routing_error::ErrorDetail::Ics02Client(e) => e.source,
e => {
panic!("Expected Ics02Client error, instead got {:?}", e);
}
},
e => {
panic!("Expected TransactionFailed error, instead got {:?}", e);
}
}
}
pub fn extract_ics03_error_kind(
ics18_result: Result<(), relayer_error::Error>,
) -> connection_error::ErrorDetail {
let ics18_error = ics18_result.expect_err("ICS18 error expected");
match ics18_error.0 {
relayer_error::ErrorDetail::TransactionFailed(e) => match e.source {
routing_error::ErrorDetail::Ics03Connection(e) => e.source,
e => {
panic!("Expected Ics02Client error, instead got {:?}", e);
}
},
e => {
panic!("Expected TransactionFailed error, instead got {:?}", e);
}
}
}
pub fn chain_id(chain_id: String) -> ChainId {
ChainId::new(chain_id, Self::revision())
}
pub fn revision() -> u64 {
0
}
pub fn version() -> Version {
Version::default()
}
pub fn versions() -> Vec<Version> {
vec![Self::version()]
}
pub fn client_id(client_id: u64) -> ClientId {
ClientId::new(ClientType::Mock, client_id)
.expect("it should be possible to create the client identifier")
}
pub fn connection_id(connection_id: u64) -> ConnectionId {
ConnectionId::new(connection_id)
}
pub fn height(height: u64) -> Height {
Height::new(Self::revision(), height)
}
fn mock_header(height: u64) -> MockHeader {
MockHeader::new(Self::height(height))
}
pub fn header(height: u64) -> AnyHeader {
AnyHeader::Mock(Self::mock_header(height))
}
pub fn client_state(height: u64) -> AnyClientState {
AnyClientState::Mock(MockClientState(Self::mock_header(height)))
}
pub fn consensus_state(height: u64) -> AnyConsensusState {
AnyConsensusState::Mock(MockConsensusState::new(Self::mock_header(height)))
}
fn signer() -> Signer {
Signer::new("")
}
pub fn counterparty(client_id: u64, connection_id: Option<u64>) -> Counterparty {
let client_id = Self::client_id(client_id);
let connection_id = connection_id.map(Self::connection_id);
let prefix = Self::commitment_prefix();
Counterparty::new(client_id, connection_id, prefix)
}
pub fn delay_period() -> Duration {
ZERO_DURATION
}
pub fn commitment_prefix() -> CommitmentPrefix {
vec![0].into()
}
pub fn commitment_proof_bytes() -> CommitmentProofBytes |
pub fn consensus_proof(height: u64) -> ConsensusProof {
let consensus_proof = Self::commitment_proof_bytes();
let consensus_height = Self::height(height);
ConsensusProof::new(consensus_proof, consensus_height)
.expect("it should be possible to create the consensus proof")
}
pub fn proofs(height: u64) -> Proofs {
let object_proof = Self::commitment_proof_bytes();
let client_proof = None;
let consensus_proof = Some(Self::consensus_proof(height));
let other_proof = None;
let height = Self::height(height);
Proofs::new(
object_proof,
client_proof,
consensus_proof,
other_proof,
height,
)
.expect("it should be possible to create the proofs")
}
/// Check that chain heights match the ones in the model.
pub fn validate_chains(&self) -> bool {
self.contexts.values().all(|ctx| ctx.validate().is_ok())
}
/// Check that chain states match the ones in the model.
pub fn check_chain_states(&self, chains: HashMap<String, Chain>) -> bool {
chains.into_iter().all(|(chain_id, chain)| {
let ctx = self.chain_context(chain_id);
// check that heights match
let heights_match = ctx.query_latest_height() == Self::height(chain.height);
// check that clients match
let clients_match = chain.clients.into_iter().all(|(client_id, client)| {
// compute the highest consensus state in the model and check
// that it matches the client state
let client_state = ClientReader::client_state(ctx, &Self::client_id(client_id));
let client_state_matches = match client.heights.iter().max() {
Some(max_height) => {
// if the model has consensus states (encoded simply as
// heights in the model), then the highest one should
// match the height in the client state
client_state.is_some()
&& client_state.unwrap().latest_height() == Self::height(*max_height)
}
None => {
// if the model doesn't have any consensus states
// (heights), then the client state should not exist
client_state.is_none()
}
};
// check that each consensus state from the model exists
// TODO: check that no other consensus state exists (i.e. the
// only existing consensus states are those in that also
// exist in the model)
let consensus_states_match = client.heights.into_iter().all(|height| {
ctx.consensus_state(&Self::client_id(client_id), Self::height(height))
.is_some()
});
client_state_matches && consensus_states_match
});
// check that connections match
let connections_match =
chain
.connections
.into_iter()
.all(|(connection_id, connection)| {
if connection.state == ConnectionState::Uninitialized {
// if the connection has not yet been initialized, then
// there's nothing to check
true
} else if let Some(connection_end) =
ctx.connection_end(&Self::connection_id(connection_id))
{
// states must match
let states_match = *connection_end.state() == connection.state;
// client ids must match
let client_ids = *connection_end.client_id()
== Self::client_id(connection.client_id.unwrap());
// counterparty client ids must match
let counterparty_client_ids =
*connection_end.counterparty().client_id()
== Self::client_id(connection.counterparty_client_id.unwrap());
// counterparty connection ids must match
let counterparty_connection_ids =
connection_end.counterparty().connection_id()
== connection
.counterparty_connection_id
.map(Self::connection_id)
.as_ref();
states_match
&& client_ids
&& counterparty_client_ids
&& counterparty_connection_ids
} else {
// if the connection exists in the model, then it must
// also exist in the implementation; in this case it
// doesn't, so we fail the verification
false
}
});
heights_match && clients_match && connections_match
})
}
pub fn apply(&mut self, action: Action) -> Result<(), relayer_error::Error> {
match action {
Action::None => panic!("unexpected action type"),
Action::Ics02CreateClient {
chain_id,
client_state,
consensus_state,
} => {
// get chain's context
let ctx = self.chain_context_mut(chain_id);
// create ICS26 message and deliver it
let msg = Ics26Envelope::Ics2Msg(ClientMsg::CreateClient(MsgCreateAnyClient {
client_state: Self::client_state(client_state),
consensus_state: Self::consensus_state(consensus_state),
signer: Self::signer(),
}));
ctx.deliver(msg)
}
Action::Ics02UpdateClient {
chain_id,
client_id,
header,
} => {
// get chain's context
let ctx = self.chain_context_mut(chain_id);
// create ICS26 message and deliver it
let msg = Ics26Envelope::Ics2Msg(ClientMsg::UpdateClient(MsgUpdateAnyClient {
client_id: Self::client_id(client_id),
header: Self::header(header),
signer: Self::signer(),
}));
ctx.deliver(msg)
}
Action::Ics03ConnectionOpenInit {
chain_id,
client_id,
counterparty_chain_id: _,
counterparty_client_id,
} => {
// get chain's context
let ctx = self.chain_context_mut(chain_id);
// create ICS26 message and deliver it
let msg = Ics26Envelope::Ics3Msg(ConnectionMsg::ConnectionOpenInit(
MsgConnectionOpenInit {
client_id: Self::client_id(client_id),
counterparty: Self::counterparty(counterparty_client_id, None),
version: Self::version(),
delay_period: Self::delay_period(),
signer: Self::signer(),
},
));
ctx.deliver(msg)
}
Action::Ics03ConnectionOpenTry {
chain_id,
previous_connection_id,
client_id,
client_state,
counterparty_chain_id: _,
counterparty_client_id,
counterparty_connection_id,
} => {
// get chain's context
let ctx = self.chain_context_mut(chain_id);
// create ICS26 message and deliver it
let msg = Ics26Envelope::Ics3Msg(ConnectionMsg::ConnectionOpenTry(Box::new(
MsgConnectionOpenTry {
previous_connection_id: previous_connection_id.map(Self::connection_id),
client_id: Self::client_id(client_id),
// TODO: is this ever needed?
client_state: None,
counterparty: Self::counterparty(
counterparty_client_id,
Some(counterparty_connection_id),
),
counterparty_versions: Self::versions(),
proofs: Self::proofs(client_state),
delay_period: Self::delay_period(),
signer: Self::signer(),
},
)));
ctx.deliver(msg)
}
Action::Ics03ConnectionOpenAck {
chain_id,
connection_id,
client_state,
counterparty_chain_id: _,
counterparty_connection_id,
} => {
// get chain's context
let ctx = self.chain_context_mut(chain_id);
// create ICS26 message and deliver it
let msg = Ics26Envelope::Ics3Msg(ConnectionMsg::ConnectionOpenAck(Box::new(
MsgConnectionOpenAck {
connection_id: Self::connection_id(connection_id),
counterparty_connection_id: Self::connection_id(counterparty_connection_id),
// TODO: is this ever needed?
client_state: None,
proofs: Self::proofs(client_state),
version: Self::version(),
signer: Self::signer(),
},
)));
ctx.deliver(msg)
}
Action::Ics03ConnectionOpenConfirm {
chain_id,
connection_id,
client_state,
counterparty_chain_id: _,
counterparty_connection_id: _,
} => {
// get chain's context
let ctx = self.chain_context_mut(chain_id);
// create ICS26 message and deliver it
let msg = Ics26Envelope::Ics3Msg(ConnectionMsg::ConnectionOpenConfirm(
MsgConnectionOpenConfirm {
connection_id: Self::connection_id(connection_id),
proofs: Self::proofs(client_state),
signer: Self::signer(),
},
));
ctx.deliver(msg)
}
}
}
}
impl modelator::step_runner::StepRunner<Step> for IbcTestRunner {
fn initial_step(&mut self, step: Step) -> Result<(), String> {
assert_eq!(step.action, Action::None, "unexpected action type");
assert_eq!(
step.action_outcome,
ActionOutcome::None,
"unexpected action outcome"
);
// initiliaze all chains
for (chain_id, chain) in step.chains {
self.init_chain_context(chain_id, chain.height);
}
Ok(())
}
fn next_step(&mut self, step: Step) -> Result<(), String> {
let result = self.apply(step.action);
let outcome_matches = match step.action_outcome {
ActionOutcome::None => panic!("unexpected action outcome"),
ActionOutcome::Ics02CreateOk => result.is_ok(),
ActionOutcome::Ics02UpdateOk => result.is_ok(),
ActionOutcome::Ics02ClientNotFound => matches!(
Self::extract_ics02_error_kind(result),
client_error::ErrorDetail::ClientNotFound(_)
),
ActionOutcome::Ics02HeaderVerificationFailure => matches!(
Self::extract_ics02_error_kind(result),
client_error::ErrorDetail::HeaderVerificationFailure(_)
),
ActionOutcome::Ics03ConnectionOpenInitOk => result.is_ok(),
ActionOutcome::Ics03MissingClient => matches!(
Self::extract_ics03_error_kind(result),
connection_error::ErrorDetail::MissingClient(_)
),
ActionOutcome::Ics03ConnectionOpenTryOk => result.is_ok(),
ActionOutcome::Ics03InvalidConsensusHeight => matches!(
Self::extract_ics03_error_kind(result),
connection_error::ErrorDetail::InvalidConsensusHeight(_)
),
ActionOutcome::Ics03ConnectionNotFound => matches!(
Self::extract_ics03_error_kind(result),
connection_error::ErrorDetail::ConnectionNotFound(_)
),
ActionOutcome::Ics03ConnectionMismatch => matches!(
Self::extract_ics03_error_kind(result),
connection_error::ErrorDetail::ConnectionMismatch(_)
),
ActionOutcome::Ics03MissingClientConsensusState => matches!(
Self::extract_ics03_error_kind(result),
connection_error::ErrorDetail::MissingClientConsensusState(_)
),
ActionOutcome::Ics03InvalidProof => matches!(
Self::extract_ics03_error_kind(result),
connection_error::ErrorDetail::InvalidProof(_)
),
ActionOutcome::Ics03ConnectionOpenAckOk => result.is_ok(),
ActionOutcome::Ics03UninitializedConnection => matches!(
Self::extract_ics03_error_kind(result),
connection_error::ErrorDetail::UninitializedConnection(_)
),
ActionOutcome::Ics03ConnectionOpenConfirmOk => result.is_ok(),
};
// also check the state of chains
if outcome_matches && self.validate_chains() && self.check_chain_states(step.chains) {
Ok(())
} else {
Err("next_step did not conclude successfully".into())
}
}
}
| {
vec![0].into()
} |
diff.py | import datetime
import decimal
from platform import python_version
import re
import uuid
try:
from bson import decimal128, Regex
_HAVE_PYMONGO = True
except ImportError:
_HAVE_PYMONGO = False
class _NO_VALUE(object):
pass
# we don't use NOTHING because it might be returned from various APIs
NO_VALUE = _NO_VALUE()
_SUPPORTED_BASE_TYPES = (
float, bool, str, datetime.datetime, type(None), uuid.UUID, int, bytes, type,
type(re.compile('')),)
if _HAVE_PYMONGO:
_SUPPORTED_TYPES = _SUPPORTED_BASE_TYPES + (decimal.Decimal, decimal128.Decimal128)
else:
_SUPPORTED_TYPES = _SUPPORTED_BASE_TYPES
if python_version() < '3.0':
dict_type = dict
else:
from collections import abc
dict_type = abc.Mapping
def diff(a, b, path=None):
path = _make_path(path)
if isinstance(a, (list, tuple)) and isinstance(b, (list, tuple)):
return _diff_sequences(a, b, path)
if type(a).__name__ == 'SON':
a = dict(a)
if type(b).__name__ == 'SON':
b = dict(b)
if type(a).__name__ == 'DBRef':
a = a.as_doc()
if type(b).__name__ == 'DBRef':
b = b.as_doc()
if isinstance(a, dict_type) and isinstance(b, dict_type):
return _diff_dicts(a, b, path)
if type(a).__name__ == 'ObjectId':
a = str(a)
if type(b).__name__ == 'ObjectId':
b = str(b)
if type(a).__name__ == 'Int64':
a = int(a)
if type(b).__name__ == 'Int64':
b = int(b)
if _HAVE_PYMONGO and isinstance(a, Regex):
a = a.try_compile()
if _HAVE_PYMONGO and isinstance(b, Regex):
b = b.try_compile()
if isinstance(a, (list, tuple)) or isinstance(b, (list, tuple)) or \
isinstance(a, dict_type) or isinstance(b, dict_type):
return [(path[:], a, b)] | if not isinstance(b, _SUPPORTED_TYPES):
raise NotImplementedError(
'Unsupported diff type: {0}'.format(type(b))) # pragma: no cover
if a != b:
return [(path[:], a, b)]
return []
def _diff_dicts(a, b, path):
if not isinstance(a, type(b)):
return [(path[:], type(a), type(b))]
returned = []
for key in set(a) | set(b):
a_value = a.get(key, NO_VALUE)
b_value = b.get(key, NO_VALUE)
path.append(key)
if a_value is NO_VALUE or b_value is NO_VALUE:
returned.append((path[:], a_value, b_value))
else:
returned.extend(diff(a_value, b_value, path))
path.pop()
return returned
def _diff_sequences(a, b, path):
if len(a) != len(b):
return [(path[:], a, b)]
returned = []
for i, a_i in enumerate(a):
path.append(i)
returned.extend(diff(a_i, b[i], path))
path.pop()
return returned
def _make_path(path):
if path is None:
return []
return path | if not isinstance(a, _SUPPORTED_TYPES):
raise NotImplementedError(
'Unsupported diff type: {0}'.format(type(a))) # pragma: no cover |
mod.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty;
use middle::def;
use std::env;
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use rustc::ast_map::NodeItem;
use syntax::{attr};
use syntax::ast::{self, NodeId, DefId};
use syntax::ast_util;
use syntax::codemap::*;
use syntax::parse::token::{self, keywords};
use syntax::visit::{self, Visitor};
use syntax::print::pprust::ty_to_string;
use self::span_utils::SpanUtils;
mod span_utils;
mod recorder;
mod dump_csv;
pub struct SaveContext<'l, 'tcx: 'l> {
tcx: &'l ty::ctxt<'tcx>,
span_utils: SpanUtils<'l>,
}
pub struct CrateData {
pub name: String,
pub number: u32,
}
/// Data for any entity in the Rust language. The actual data contained varied
/// with the kind of entity being queried. See the nested structs for details.
#[derive(Debug)]
pub enum Data {
/// Data for all kinds of functions and methods.
FunctionData(FunctionData),
/// Data for local and global variables (consts and statics), and fields.
VariableData(VariableData),
/// Data for modules.
ModData(ModData),
/// Data for Enums.
EnumData(EnumData),
/// Data for impls.
ImplData(ImplData),
/// Data for the use of some variable (e.g., the use of a local variable, which
/// will refere to that variables declaration).
VariableRefData(VariableRefData),
/// Data for a reference to a type or trait.
TypeRefData(TypeRefData),
/// Data for a reference to a module.
ModRefData(ModRefData),
/// Data about a function call.
FunctionCallData(FunctionCallData),
/// Data about a method call.
MethodCallData(MethodCallData),
}
/// Data for all kinds of functions and methods.
#[derive(Debug)]
pub struct FunctionData {
pub id: NodeId,
pub name: String,
pub qualname: String,
pub declaration: Option<DefId>,
pub span: Span,
pub scope: NodeId,
}
/// Data for local and global variables (consts and statics).
#[derive(Debug)]
pub struct VariableData {
pub id: NodeId,
pub name: String,
pub qualname: String,
pub span: Span,
pub scope: NodeId,
pub value: String,
pub type_value: String,
}
/// Data for modules.
#[derive(Debug)]
pub struct ModData {
pub id: NodeId,
pub name: String,
pub qualname: String,
pub span: Span,
pub scope: NodeId,
pub filename: String,
}
/// Data for enum declarations.
#[derive(Debug)]
pub struct EnumData {
pub id: NodeId,
pub value: String,
pub qualname: String,
pub span: Span,
pub scope: NodeId,
}
#[derive(Debug)]
pub struct ImplData {
pub id: NodeId,
pub span: Span,
pub scope: NodeId,
// FIXME: I'm not really sure inline data is the best way to do this. Seems
// OK in this case, but generalising leads to returning chunks of AST, which
// feels wrong.
pub trait_ref: Option<TypeRefData>,
pub self_ref: Option<TypeRefData>,
}
/// Data for the use of some item (e.g., the use of a local variable, which
/// will refer to that variables declaration (by ref_id)).
#[derive(Debug)]
pub struct VariableRefData {
pub name: String,
pub span: Span,
pub scope: NodeId,
pub ref_id: DefId,
}
/// Data for a reference to a type or trait.
#[derive(Debug)]
pub struct TypeRefData {
pub span: Span,
pub scope: NodeId,
pub ref_id: DefId,
}
/// Data for a reference to a module.
#[derive(Debug)]
pub struct ModRefData {
pub span: Span,
pub scope: NodeId,
pub ref_id: DefId,
}
/// Data about a function call.
#[derive(Debug)]
pub struct FunctionCallData {
pub span: Span,
pub scope: NodeId,
pub ref_id: DefId,
}
/// Data about a method call.
#[derive(Debug)]
pub struct MethodCallData {
pub span: Span,
pub scope: NodeId,
pub ref_id: Option<DefId>,
pub decl_id: Option<DefId>,
}
impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
pub fn new(tcx: &'l ty::ctxt<'tcx>) -> SaveContext <'l, 'tcx> {
let span_utils = SpanUtils::new(&tcx.sess);
SaveContext::from_span_utils(tcx, span_utils)
}
pub fn from_span_utils(tcx: &'l ty::ctxt<'tcx>,
span_utils: SpanUtils<'l>)
-> SaveContext<'l, 'tcx> {
SaveContext {
tcx: tcx,
span_utils: span_utils,
}
}
// List external crates used by the current crate.
pub fn get_external_crates(&self) -> Vec<CrateData> {
let mut result = Vec::new();
self.tcx.sess.cstore.iter_crate_data(|n, cmd| {
result.push(CrateData { name: cmd.name.clone(), number: n });
});
result
}
pub fn get_item_data(&self, item: &ast::Item) -> Data {
match item.node {
ast::ItemFn(..) => {
let name = self.tcx.map.path_to_string(item.id);
let qualname = format!("::{}", name);
let sub_span = self.span_utils.sub_span_after_keyword(item.span, keywords::Fn);
Data::FunctionData(FunctionData {
id: item.id,
name: name,
qualname: qualname,
declaration: None,
span: sub_span.unwrap(),
scope: self.enclosing_scope(item.id),
})
}
ast::ItemStatic(ref typ, mt, ref expr) => {
let qualname = format!("::{}", self.tcx.map.path_to_string(item.id));
// If the variable is immutable, save the initialising expression.
let (value, keyword) = match mt {
ast::MutMutable => (String::from("<mutable>"), keywords::Mut),
ast::MutImmutable => (self.span_utils.snippet(expr.span), keywords::Static),
};
let sub_span = self.span_utils.sub_span_after_keyword(item.span, keyword);
Data::VariableData(VariableData {
id: item.id,
name: item.ident.to_string(),
qualname: qualname,
span: sub_span.unwrap(),
scope: self.enclosing_scope(item.id),
value: value,
type_value: ty_to_string(&typ),
})
}
ast::ItemConst(ref typ, ref expr) => {
let qualname = format!("::{}", self.tcx.map.path_to_string(item.id));
let sub_span = self.span_utils.sub_span_after_keyword(item.span, keywords::Const);
Data::VariableData(VariableData {
id: item.id,
name: item.ident.to_string(),
qualname: qualname,
span: sub_span.unwrap(),
scope: self.enclosing_scope(item.id),
value: self.span_utils.snippet(expr.span),
type_value: ty_to_string(&typ),
})
}
ast::ItemMod(ref m) => {
let qualname = format!("::{}", self.tcx.map.path_to_string(item.id));
let cm = self.tcx.sess.codemap();
let filename = cm.span_to_filename(m.inner);
let sub_span = self.span_utils.sub_span_after_keyword(item.span, keywords::Mod);
Data::ModData(ModData {
id: item.id,
name: item.ident.to_string(),
qualname: qualname,
span: sub_span.unwrap(),
scope: self.enclosing_scope(item.id),
filename: filename,
})
},
ast::ItemEnum(..) => {
let enum_name = format!("::{}", self.tcx.map.path_to_string(item.id));
let val = self.span_utils.snippet(item.span);
let sub_span = self.span_utils.sub_span_after_keyword(item.span, keywords::Enum);
Data::EnumData(EnumData {
id: item.id,
value: val,
span: sub_span.unwrap(),
qualname: enum_name,
scope: self.enclosing_scope(item.id),
})
},
ast::ItemImpl(_, _, _, ref trait_ref, ref typ, _) => {
let mut type_data = None;
let sub_span;
let parent = self.enclosing_scope(item.id);
match typ.node {
// Common case impl for a struct or something basic.
ast::TyPath(None, ref path) => {
sub_span = self.span_utils.sub_span_for_type_name(path.span).unwrap();
type_data = self.lookup_ref_id(typ.id).map(|id| TypeRefData {
span: sub_span,
scope: parent,
ref_id: id,
});
},
_ => {
// Less useful case, impl for a compound type.
let span = typ.span;
sub_span = self.span_utils.sub_span_for_type_name(span).unwrap_or(span);
}
}
let trait_data =
trait_ref.as_ref().and_then(|tr| self.get_trait_ref_data(tr, parent));
Data::ImplData(ImplData {
id: item.id,
span: sub_span,
scope: parent,
trait_ref: trait_data,
self_ref: type_data,
})
}
_ => {
// FIXME
unimplemented!();
}
}
}
pub fn get_field_data(&self, field: &ast::StructField, scope: NodeId) -> Option<VariableData> {
match field.node.kind {
ast::NamedField(ident, _) => {
let qualname = format!("::{}::{}",
self.tcx.map.path_to_string(scope),
ident);
let typ = self.tcx.node_types().get(&field.node.id).unwrap()
.to_string();
let sub_span = self.span_utils.sub_span_before_token(field.span, token::Colon);
Some(VariableData {
id: field.node.id,
name: ident.to_string(),
qualname: qualname,
span: sub_span.unwrap(),
scope: scope,
value: "".to_owned(),
type_value: typ,
})
},
_ => None,
}
}
// FIXME would be nice to take a MethodItem here, but the ast provides both
// trait and impl flavours, so the caller must do the disassembly.
pub fn get_method_data(&self,
id: ast::NodeId,
name: ast::Name,
span: Span) -> FunctionData {
// The qualname for a method is the trait name or name of the struct in an impl in
// which the method is declared in, followed by the method's name.
let qualname = match self.tcx.impl_of_method(ast_util::local_def(id)) {
Some(impl_id) => match self.tcx.map.get(impl_id.node) {
NodeItem(item) => {
match item.node {
ast::ItemImpl(_, _, _, _, ref ty, _) => {
let mut result = String::from("<");
result.push_str(&ty_to_string(&**ty));
match self.tcx.trait_of_item(ast_util::local_def(id)) {
Some(def_id) => {
result.push_str(" as ");
result.push_str(
&self.tcx.item_path_str(def_id));
},
None => {}
}
result.push_str(">");
result
}
_ => {
self.tcx.sess.span_bug(span,
&format!("Container {} for method {} not an impl?",
impl_id.node, id));
},
}
},
_ => {
self.tcx.sess.span_bug(span,
&format!("Container {} for method {} is not a node item {:?}",
impl_id.node, id, self.tcx.map.get(impl_id.node)));
},
},
None => match self.tcx.trait_of_item(ast_util::local_def(id)) {
Some(def_id) => {
match self.tcx.map.get(def_id.node) {
NodeItem(_) => {
format!("::{}", self.tcx.item_path_str(def_id))
}
_ => {
self.tcx.sess.span_bug(span,
&format!("Could not find container {} for method {}",
def_id.node, id));
}
}
},
None => {
self.tcx.sess.span_bug(span,
&format!("Could not find container for method {}", id));
},
},
};
let qualname = format!("{}::{}", qualname, name);
let decl_id = self.tcx.trait_item_of_item(ast_util::local_def(id))
.and_then(|new_id| {
let def_id = new_id.def_id();
if def_id.node != 0 && def_id != ast_util::local_def(id) {
Some(def_id)
} else {
None
}
});
let sub_span = self.span_utils.sub_span_after_keyword(span, keywords::Fn);
FunctionData {
id: id,
name: name.to_string(),
qualname: qualname,
declaration: decl_id,
span: sub_span.unwrap(),
scope: self.enclosing_scope(id),
}
}
pub fn | (&self,
trait_ref: &ast::TraitRef,
parent: NodeId)
-> Option<TypeRefData> {
self.lookup_ref_id(trait_ref.ref_id).map(|def_id| {
let span = trait_ref.path.span;
let sub_span = self.span_utils.sub_span_for_type_name(span).unwrap_or(span);
TypeRefData {
span: sub_span,
scope: parent,
ref_id: def_id,
}
})
}
pub fn get_expr_data(&self, expr: &ast::Expr) -> Option<Data> {
match expr.node {
ast::ExprField(ref sub_ex, ident) => {
let ty = &self.tcx.expr_ty_adjusted(&sub_ex).sty;
match *ty {
ty::TyStruct(def_id, _) => {
let fields = self.tcx.lookup_struct_fields(def_id);
for f in &fields {
if f.name == ident.node.name {
let sub_span = self.span_utils.span_for_last_ident(expr.span);
return Some(Data::VariableRefData(VariableRefData {
name: ident.node.to_string(),
span: sub_span.unwrap(),
scope: self.enclosing_scope(expr.id),
ref_id: f.id,
}));
}
}
self.tcx.sess.span_bug(expr.span,
&format!("Couldn't find field {} on {:?}",
ident.node, ty))
}
_ => {
debug!("Expected struct type, found {:?}", ty);
None
}
}
}
ast::ExprStruct(ref path, _, _) => {
let ty = &self.tcx.expr_ty_adjusted(expr).sty;
match *ty {
ty::TyStruct(def_id, _) => {
let sub_span = self.span_utils.span_for_last_ident(path.span);
Some(Data::TypeRefData(TypeRefData {
span: sub_span.unwrap(),
scope: self.enclosing_scope(expr.id),
ref_id: def_id,
}))
}
_ => {
// FIXME ty could legitimately be a TyEnum, but then we will fail
// later if we try to look up the fields.
debug!("expected TyStruct, found {:?}", ty);
None
}
}
}
ast::ExprMethodCall(..) => {
let method_call = ty::MethodCall::expr(expr.id);
let method_id = self.tcx.tables.borrow().method_map[&method_call].def_id;
let (def_id, decl_id) = match self.tcx.impl_or_trait_item(method_id).container() {
ty::ImplContainer(_) => (Some(method_id), None),
ty::TraitContainer(_) => (None, Some(method_id))
};
let sub_span = self.span_utils.sub_span_for_meth_name(expr.span);
let parent = self.enclosing_scope(expr.id);
Some(Data::MethodCallData(MethodCallData {
span: sub_span.unwrap(),
scope: parent,
ref_id: def_id,
decl_id: decl_id,
}))
}
ast::ExprPath(_, ref path) => {
self.get_path_data(expr.id, path)
}
_ => {
// FIXME
unimplemented!();
}
}
}
pub fn get_path_data(&self,
id: NodeId,
path: &ast::Path)
-> Option<Data> {
let def_map = self.tcx.def_map.borrow();
if !def_map.contains_key(&id) {
self.tcx.sess.span_bug(path.span,
&format!("def_map has no key for {} in visit_expr", id));
}
let def = def_map.get(&id).unwrap().full_def();
let sub_span = self.span_utils.span_for_last_ident(path.span);
match def {
def::DefUpvar(..) |
def::DefLocal(..) |
def::DefStatic(..) |
def::DefConst(..) |
def::DefAssociatedConst(..) |
def::DefVariant(..) => {
Some(Data::VariableRefData(VariableRefData {
name: self.span_utils.snippet(sub_span.unwrap()),
span: sub_span.unwrap(),
scope: self.enclosing_scope(id),
ref_id: def.def_id(),
}))
}
def::DefStruct(def_id) |
def::DefTy(def_id, _) |
def::DefTrait(def_id) |
def::DefTyParam(_, _, def_id, _) => {
Some(Data::TypeRefData(TypeRefData {
span: sub_span.unwrap(),
ref_id: def_id,
scope: self.enclosing_scope(id),
}))
}
def::DefMethod(decl_id) => {
let sub_span = self.span_utils.sub_span_for_meth_name(path.span);
let def_id = if decl_id.krate == ast::LOCAL_CRATE {
let ti = self.tcx.impl_or_trait_item(decl_id);
match ti.container() {
ty::TraitContainer(def_id) => {
self.tcx.trait_items(def_id)
.iter()
.find(|mr| {
mr.name() == ti.name() && self.trait_method_has_body(mr)
})
.map(|mr| mr.def_id())
}
ty::ImplContainer(def_id) => {
let impl_items = self.tcx.impl_items.borrow();
Some(impl_items.get(&def_id)
.unwrap()
.iter()
.find(|mr| {
self.tcx.impl_or_trait_item(mr.def_id()).name()
== ti.name()
})
.unwrap()
.def_id())
}
}
} else {
None
};
Some(Data::MethodCallData(MethodCallData {
span: sub_span.unwrap(),
scope: self.enclosing_scope(id),
ref_id: def_id,
decl_id: Some(decl_id),
}))
},
def::DefFn(def_id, _) => {
Some(Data::FunctionCallData(FunctionCallData {
ref_id: def_id,
span: sub_span.unwrap(),
scope: self.enclosing_scope(id),
}))
}
def::DefMod(def_id) => {
Some(Data::ModRefData(ModRefData {
ref_id: def_id,
span: sub_span.unwrap(),
scope: self.enclosing_scope(id),
}))
}
_ => None,
}
}
fn trait_method_has_body(&self, mr: &ty::ImplOrTraitItem) -> bool {
let def_id = mr.def_id();
if def_id.krate != ast::LOCAL_CRATE {
return false;
}
let trait_item = self.tcx.map.expect_trait_item(def_id.node);
if let ast::TraitItem_::MethodTraitItem(_, Some(_)) = trait_item.node {
true
} else {
false
}
}
pub fn get_field_ref_data(&self,
field_ref: &ast::Field,
struct_id: DefId,
parent: NodeId)
-> VariableRefData {
let fields = self.tcx.lookup_struct_fields(struct_id);
let field_name = field_ref.ident.node.to_string();
for f in &fields {
if f.name == field_ref.ident.node.name {
// We don't really need a sub-span here, but no harm done
let sub_span = self.span_utils.span_for_last_ident(field_ref.ident.span);
return VariableRefData {
name: field_name,
span: sub_span.unwrap(),
scope: parent,
ref_id: f.id,
};
}
}
self.tcx.sess.span_bug(field_ref.span,
&format!("Couldn't find field {}", field_name));
}
pub fn get_data_for_id(&self, _id: &NodeId) -> Data {
// FIXME
unimplemented!();
}
fn lookup_ref_id(&self, ref_id: NodeId) -> Option<DefId> {
if !self.tcx.def_map.borrow().contains_key(&ref_id) {
self.tcx.sess.bug(&format!("def_map has no key for {} in lookup_type_ref",
ref_id));
}
let def = self.tcx.def_map.borrow().get(&ref_id).unwrap().full_def();
match def {
def::DefPrimTy(_) => None,
_ => Some(def.def_id()),
}
}
#[inline]
fn enclosing_scope(&self, id: NodeId) -> NodeId {
self.tcx.map.get_enclosing_scope(id).unwrap_or(0)
}
}
// An AST visitor for collecting paths from patterns.
struct PathCollector {
// The Row field identifies the kind of pattern.
collected_paths: Vec<(NodeId, ast::Path, ast::Mutability, recorder::Row)>,
}
impl PathCollector {
fn new() -> PathCollector {
PathCollector {
collected_paths: vec![],
}
}
}
impl<'v> Visitor<'v> for PathCollector {
fn visit_pat(&mut self, p: &ast::Pat) {
if generated_code(p.span) {
return;
}
match p.node {
ast::PatStruct(ref path, _, _) => {
self.collected_paths.push((p.id,
path.clone(),
ast::MutMutable,
recorder::TypeRef));
}
ast::PatEnum(ref path, _) |
ast::PatQPath(_, ref path) => {
self.collected_paths.push((p.id, path.clone(), ast::MutMutable, recorder::VarRef));
}
ast::PatIdent(bm, ref path1, _) => {
debug!("PathCollector, visit ident in pat {}: {:?} {:?}",
path1.node,
p.span,
path1.span);
let immut = match bm {
// Even if the ref is mut, you can't change the ref, only
// the data pointed at, so showing the initialising expression
// is still worthwhile.
ast::BindByRef(_) => ast::MutImmutable,
ast::BindByValue(mt) => mt,
};
// collect path for either visit_local or visit_arm
let path = ast_util::ident_to_path(path1.span, path1.node);
self.collected_paths.push((p.id, path, immut, recorder::VarRef));
}
_ => {}
}
visit::walk_pat(self, p);
}
}
#[allow(deprecated)]
pub fn process_crate(tcx: &ty::ctxt,
analysis: &ty::CrateAnalysis,
odir: Option<&Path>) {
let krate = tcx.map.krate();
if generated_code(krate.span) {
return;
}
assert!(analysis.glob_map.is_some());
let cratename = match attr::find_crate_name(&krate.attrs) {
Some(name) => name.to_string(),
None => {
info!("Could not find crate name, using 'unknown_crate'");
String::from("unknown_crate")
},
};
info!("Dumping crate {}", cratename);
// find a path to dump our data to
let mut root_path = match env::var_os("DXR_RUST_TEMP_FOLDER") {
Some(val) => PathBuf::from(val),
None => match odir {
Some(val) => val.join("dxr"),
None => PathBuf::from("dxr-temp"),
},
};
if let Err(e) = fs::create_dir_all(&root_path) {
tcx.sess.err(&format!("Could not create directory {}: {}",
root_path.display(), e));
}
{
let disp = root_path.display();
info!("Writing output to {}", disp);
}
// Create output file.
let mut out_name = cratename.clone();
out_name.push_str(".csv");
root_path.push(&out_name);
let output_file = match File::create(&root_path) {
Ok(f) => box f,
Err(e) => {
let disp = root_path.display();
tcx.sess.fatal(&format!("Could not open {}: {}", disp, e));
}
};
root_path.pop();
let mut visitor = dump_csv::DumpCsvVisitor::new(tcx, analysis, output_file);
visitor.dump_crate_info(&cratename, krate);
visit::walk_crate(&mut visitor, krate);
}
// Utility functions for the module.
// Helper function to escape quotes in a string
fn escape(s: String) -> String {
s.replace("\"", "\"\"")
}
// If the expression is a macro expansion or other generated code, run screaming
// and don't index.
pub fn generated_code(span: Span) -> bool {
span.expn_id != NO_EXPANSION || span == DUMMY_SP
}
| get_trait_ref_data |
router_test.go | package router
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/rkusa/web"
)
func TestMiddleware(t *testing.T) {
r := New()
r.GET("/foo", func(rw http.ResponseWriter, r *http.Request) {
rw.Write([]byte("bar"))
})
rec := request(t, r, "GET", "/foo")
if rec.Code != http.StatusOK {
t.Errorf("request failed")
}
if rec.Body.String() != "bar" {
t.Errorf("unexpected body")
}
}
func TestNotFound(t *testing.T) {
r := New()
r.GET("/foo", func(rw http.ResponseWriter, r *http.Request) {
rw.Write([]byte("bar"))
})
rec := request(t, r, "GET", "/bar")
if rec.Code != http.StatusNotFound {
t.Errorf("expected status 404, got: %d", rec.Code)
}
}
func TestGroup(t *testing.T) |
func request(t *testing.T, router *Router, method, path string) *httptest.ResponseRecorder {
app := web.New()
app.Use(router.Middleware())
rec := httptest.NewRecorder()
req, err := http.NewRequest(method, path, nil)
if err != nil {
t.Fatal(err)
}
app.ServeHTTP(rec, req)
return rec
}
| {
r := New()
a := r.Group("/a")
{
a.GET("/", func(rw http.ResponseWriter, r *http.Request) {
rw.Write([]byte("/a"))
})
a.GET("/b", func(rw http.ResponseWriter, r *http.Request) {
rw.Write([]byte("/a/b"))
})
a.GET("c", func(rw http.ResponseWriter, r *http.Request) {
rw.Write([]byte("/a/c"))
})
d := a.Group("d")
{
d.GET("", func(rw http.ResponseWriter, r *http.Request) {
rw.Write([]byte("/a/d"))
})
d.GET("/e", func(rw http.ResponseWriter, r *http.Request) {
rw.Write([]byte("/a/d/e"))
})
}
f := a.Group("f/")
{
f.GET("/g", func(rw http.ResponseWriter, r *http.Request) {
rw.Write([]byte("/a/f/g"))
})
}
}
paths := []string{
"/a", "/a/b", "/a/c", "/a/d", "/a/d/e", "/a/f/g",
}
for _, path := range paths {
rec := request(t, r, "GET", path)
if rec.Code != http.StatusOK || rec.Body.String() != path {
t.Errorf(
"path %s not working as expected: status: %d, body: %s",
path, rec.Code, rec.Body.String(),
)
}
}
rec := request(t, r, "GET", "/a/f")
if rec.Code != http.StatusNotFound {
t.Errorf("expected status 404, got: %d", rec.Code)
}
} |
order.use-case.unit.js | /*
Unit tests for the Order Use Case library.
*/
// Public npm libraries
const assert = require('chai').assert
const sinon = require('sinon')
// Local support libraries
// const testUtils = require('../../utils/test-utils')
// Unit under test (uut)
const OrderLib = require('../../../src/use-cases/order')
const adapters = require('../mocks/adapters')
describe('#order-use-case', () => {
let uut
let sandbox
before(async () => {
// Delete all previous users in the database.
// await testUtils.deleteAllUsers()
})
beforeEach(() => {
sandbox = sinon.createSandbox()
uut = new OrderLib({ adapters })
})
afterEach(() => sandbox.restore())
describe('#constructor', () => {
it('should throw an error if adapters are not passed in', () => {
try {
uut = new OrderLib()
assert.fail('Unexpected code path')
console.log(uut) // linter
} catch (err) {
assert.include(
err.message,
'Instance of adapters must be passed in when instantiating Order Use Cases library.'
)
} | })
describe('#ensureFunds', () => {
it('should return true if wallet has enough funds for a sell order', async () => {
const orderEntity = {
lokadId: 'SWP',
messageType: 1,
messageClass: 1,
tokenId: 'a4fb5c2da1aa064e25018a43f9165040071d9e984ba190c222a7f59053af84b2',
buyOrSell: 'sell',
rateInSats: 1000,
minSatsToExchange: 0,
numTokens: 1
}
const result = await uut.ensureFunds(orderEntity)
assert.equal(result, true)
})
})
// describe('#moveTokens', () => {
// it('should move tokens to the holding address', async () => {
// // Mock dependencies
// // sandbox
// // .stub(uut.adapters.wallet.bchWallet, 'sendTokens')
// // .resolves('fakeTxid')
//
// const orderEntity = {
// lokadId: 'SWP',
// messageType: 1,
// messageClass: 1,
// tokenId: 'a4fb5c2da1aa064e25018a43f9165040071d9e984ba190c222a7f59053af84b2',
// buyOrSell: 'sell',
// rateInBaseUnit: 1000,
// minUnitsToExchange: 0,
// numTokens: 1
// }
//
// const result = await uut.moveTokens(orderEntity)
// // console.log('result: ', result)
//
// assert.property(result, 'txid')
// assert.property(result, 'vout')
//
// assert.equal(result.txid, 'fakeTxid')
// assert.equal(result.vout, 1)
// })
// })
describe('#createOrder', () => {
it('should create an order and return the hash', async () => {
const entryObj = {
lokadId: 'SWP',
messageType: 1,
messageClass: 1,
tokenId: 'token-id',
buyOrSell: 'sell',
rateInBaseUnit: 1000,
minUnitsToExchange: 1250,
numTokens: 1
}
// Mock dependencies
// sandbox.stub(uut.adapters.wallet, 'burnPsf').resolves('fakeTxid')
// sandbox.stub(uut.adapters.wallet.bchWallet, 'getTxData').resolves({ tokenTicker: 'TROUT' })
sandbox.stub(uut.orderEntity, 'validate').returns(entryObj)
sandbox.stub(uut, 'ensureFunds').resolves()
sandbox.stub(uut.adapters.wallet.bchWallet.bchjs.Util, 'sleep').resolves()
sandbox.stub(uut.adapters.wallet, 'moveTokens').resolves({ txid: 'fakeTxid', vout: 0, hdIndex: 1 })
sandbox.stub(uut.adapters.wallet.bchWallet, 'getUtxos').resolves()
sandbox.stub(uut.adapters.p2wdb, 'write').resolves('fakeHash')
const result = await uut.createOrder(entryObj)
console.log('result: ', result)
assert.isString(result)
})
it('should catch and throw an error', async () => {
try {
await uut.createOrder()
assert.fail('Unexpected code path')
} catch (err) {
assert.include(err.message, 'Cannot set')
}
})
})
}) | }) |
nicovideo.py | #!/usr/bin/env python
__all__ = ['nicovideo_download']
from ..common import *
def nicovideo_login(user, password):
data = "current_form=login&mail=" + user +"&password=" + password + "&login_submit=Log+In"
response = request.urlopen(request.Request("https://secure.nicovideo.jp/secure/login?site=niconico", headers=fake_headers, data=data.encode('utf-8')))
return response.headers
def | (url, output_dir='.', merge=True, info_only=False):
import ssl
ssl_context = request.HTTPSHandler(
context=ssl.SSLContext(ssl.PROTOCOL_TLSv1))
cookie_handler = request.HTTPCookieProcessor()
opener = request.build_opener(ssl_context, cookie_handler)
request.install_opener(opener)
import netrc, getpass
try:
info = netrc.netrc().authenticators('nicovideo')
except FileNotFoundError:
info = None
if info is None:
user = input("User: ")
password = getpass.getpass("Password: ")
else:
user, password = info[0], info[2]
print("Logging in...")
nicovideo_login(user, password)
html = get_html(url) # necessary!
title = unicodize(r1(r'<span class="videoHeaderTitle"[^>]*>([^<]+)</span>', html))
vid = url.split('/')[-1].split('?')[0]
api_html = get_html('http://www.nicovideo.jp/api/getflv?v=%s' % vid)
real_url = parse.unquote(r1(r'url=([^&]+)&', api_html))
type, ext, size = url_info(real_url)
print_info(site_info, title, type, size)
if not info_only:
download_urls([real_url], title, ext, size, output_dir, merge = merge)
site_info = "Nicovideo.jp"
download = nicovideo_download
download_playlist = playlist_not_supported('nicovideo')
| nicovideo_download |
language_transaction_execution.rs | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::FuzzTargetImpl;
use language_e2e_tests::account_universe::{
all_transactions_strategy, log_balance_strategy, run_and_assert_universe, AccountUniverseGen,
};
use libra_proptest_helpers::ValueGenerator;
use proptest::{collection::vec, test_runner};
#[derive(Clone, Debug, Default)]
pub struct LanguageTransactionExecution;
impl FuzzTargetImpl for LanguageTransactionExecution {
fn | (&self) -> &'static str {
module_name!()
}
fn description(&self) -> &'static str {
"Language execute randomly generated transactions"
}
fn fuzz(&self, data: &[u8]) {
let passthrough_rng =
test_runner::TestRng::from_seed(test_runner::RngAlgorithm::PassThrough, &data);
let mut generator = ValueGenerator::new_with_rng(passthrough_rng);
let txn_strategy = vec(all_transactions_strategy(0, 1_000_000), 1..40);
let txns = generator.generate(txn_strategy);
let universe_strategy =
AccountUniverseGen::strategy(2..20, log_balance_strategy(10_000_000));
let universe = generator.generate(universe_strategy);
run_and_assert_universe(universe, txns).unwrap();
}
}
| name |
result4_pre.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
MY_CONSTANT = 12124
def | (arg1, arg2):
"""This is my doc string of things"""
ans = arg1 * arg2
return ans
a = 10
b = 20
if __name__ == '__main__':
print(function1(5, 6))
print(function2(5, 6))
| my_new_first_function |
type_registry.rs | use crate::serde::Serializable;
use crate::{Reflect, TypeInfo, Typed};
use bevy_utils::{HashMap, HashSet};
use downcast_rs::{impl_downcast, Downcast};
use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard};
use serde::Deserialize;
use std::{any::TypeId, fmt::Debug, sync::Arc};
/// A registry of reflected types.
pub struct TypeRegistry {
registrations: HashMap<TypeId, TypeRegistration>,
short_name_to_id: HashMap<String, TypeId>,
full_name_to_id: HashMap<String, TypeId>,
ambiguous_names: HashSet<String>,
}
// TODO: remove this wrapper once we migrate to Atelier Assets and the Scene AssetLoader doesn't
// need a TypeRegistry ref
/// A synchronized wrapper around a [`TypeRegistry`].
#[derive(Clone, Default)]
pub struct TypeRegistryArc {
pub internal: Arc<RwLock<TypeRegistry>>,
}
impl Debug for TypeRegistryArc {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.internal.read().full_name_to_id.keys().fmt(f)
}
}
/// A trait which allows a type to generate its [`TypeRegistration`].
///
/// This trait is automatically implemented for types which derive [`Reflect`].
pub trait GetTypeRegistration {
fn get_type_registration() -> TypeRegistration;
}
impl Default for TypeRegistry {
fn default() -> Self {
Self::new()
}
}
impl TypeRegistry {
/// Create a type registry with *no* registered types.
pub fn empty() -> Self {
Self {
registrations: Default::default(),
short_name_to_id: Default::default(),
full_name_to_id: Default::default(),
ambiguous_names: Default::default(),
}
}
/// Create a type registry with default registrations for primitive types.
pub fn new() -> Self {
let mut registry = Self::empty();
registry.register::<bool>();
registry.register::<u8>();
registry.register::<u16>();
registry.register::<u32>();
registry.register::<u64>();
registry.register::<u128>();
registry.register::<usize>();
registry.register::<i8>();
registry.register::<i16>();
registry.register::<i32>();
registry.register::<i64>();
registry.register::<i128>();
registry.register::<isize>();
registry.register::<f32>();
registry.register::<f64>();
registry
}
/// Registers the type `T`.
pub fn register<T>(&mut self)
where
T: GetTypeRegistration,
{
self.add_registration(T::get_type_registration());
}
/// Registers the type described by `registration`.
pub fn add_registration(&mut self, registration: TypeRegistration) {
let short_name = registration.short_name.to_string();
if self.short_name_to_id.contains_key(&short_name)
|| self.ambiguous_names.contains(&short_name)
{
// name is ambiguous. fall back to long names for all ambiguous types
self.short_name_to_id.remove(&short_name);
self.ambiguous_names.insert(short_name);
} else {
self.short_name_to_id
.insert(short_name, registration.type_id());
}
self.full_name_to_id
.insert(registration.type_name().to_string(), registration.type_id());
self.registrations
.insert(registration.type_id(), registration);
}
/// Returns a reference to the [`TypeRegistration`] of the type with the
/// given [`TypeId`].
///
/// If the specified type has not been registered, returns `None`.
///
/// [`TypeId`]: std::any::TypeId
pub fn get(&self, type_id: TypeId) -> Option<&TypeRegistration> {
self.registrations.get(&type_id)
}
/// Returns a mutable reference to the [`TypeRegistration`] of the type with
/// the given [`TypeId`].
///
/// If the specified type has not been registered, returns `None`.
///
/// [`TypeId`]: std::any::TypeId
pub fn get_mut(&mut self, type_id: TypeId) -> Option<&mut TypeRegistration> {
self.registrations.get_mut(&type_id)
}
/// Returns a reference to the [`TypeRegistration`] of the type with the
/// given name.
///
/// If no type with the given name has been registered, returns `None`.
pub fn get_with_name(&self, type_name: &str) -> Option<&TypeRegistration> {
self.full_name_to_id
.get(type_name)
.and_then(|id| self.get(*id))
}
/// Returns a mutable reference to the [`TypeRegistration`] of the type with
/// the given name.
///
/// If no type with the given name has been registered, returns `None`.
pub fn get_with_name_mut(&mut self, type_name: &str) -> Option<&mut TypeRegistration> {
self.full_name_to_id
.get(type_name)
.cloned()
.and_then(move |id| self.get_mut(id))
}
/// Returns a reference to the [`TypeRegistration`] of the type with
/// the given short name.
///
/// If the short name is ambiguous, or if no type with the given short name
/// has been registered, returns `None`.
pub fn get_with_short_name(&self, short_type_name: &str) -> Option<&TypeRegistration> {
self.short_name_to_id
.get(short_type_name)
.and_then(|id| self.registrations.get(id))
}
/// Returns a mutable reference to the [`TypeRegistration`] of the type with
/// the given short name.
///
/// If the short name is ambiguous, or if no type with the given short name
/// has been registered, returns `None`.
pub fn get_with_short_name_mut(
&mut self,
short_type_name: &str,
) -> Option<&mut TypeRegistration> {
self.short_name_to_id
.get(short_type_name)
.and_then(|id| self.registrations.get_mut(id))
}
/// Returns a reference to the [`TypeData`] of type `T` associated with the given `TypeId`.
///
/// The returned value may be used to downcast [`Reflect`] trait objects to
/// trait objects of the trait used to generate `T`, provided that the
/// underlying reflected type has the proper `#[reflect(DoThing)]`
/// attribute.
///
/// If the specified type has not been registered, or if `T` is not present
/// in its type registration, returns `None`.
pub fn get_type_data<T: TypeData>(&self, type_id: TypeId) -> Option<&T> {
self.get(type_id)
.and_then(|registration| registration.data::<T>())
}
/// Returns a mutable reference to the [`TypeData`] of type `T` associated with the given `TypeId`.
///
/// If the specified type has not been registered, or if `T` is not present
/// in its type registration, returns `None`.
pub fn get_type_data_mut<T: TypeData>(&mut self, type_id: TypeId) -> Option<&mut T> {
self.get_mut(type_id)
.and_then(|registration| registration.data_mut::<T>())
}
/// Returns the [`TypeInfo`] associated with the given `TypeId`.
///
/// If the specified type has not been registered, returns `None`.
pub fn get_type_info(&self, type_id: TypeId) -> Option<&'static TypeInfo> {
self.get(type_id)
.map(|registration| registration.type_info())
}
/// Returns an iterator over the [`TypeRegistration`]s of the registered
/// types.
pub fn iter(&self) -> impl Iterator<Item = &TypeRegistration> {
self.registrations.values()
}
/// Returns a mutable iterator over the [`TypeRegistration`]s of the registered
/// types.
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut TypeRegistration> {
self.registrations.values_mut()
}
}
impl TypeRegistryArc {
/// Takes a read lock on the underlying [`TypeRegistry`].
pub fn read(&self) -> RwLockReadGuard<'_, TypeRegistry> {
self.internal.read()
}
/// Takes a write lock on the underlying [`TypeRegistry`].
pub fn write(&self) -> RwLockWriteGuard<'_, TypeRegistry> {
self.internal.write()
}
}
/// A record of data about a type.
///
/// This contains the [`TypeInfo`] of the type, as well as its [short name].
///
/// For each trait specified by the [`#[reflect(_)]`][0] attribute of
/// [`#[derive(Reflect)]`][1] on the registered type, this record also contains
/// a [`TypeData`] which can be used to downcast [`Reflect`] trait objects of
/// this type to trait objects of the relevant trait.
///
/// [short name]: TypeRegistration::get_short_name
/// [`TypeInfo`]: crate::TypeInfo
/// [0]: crate::Reflect
/// [1]: crate::Reflect
pub struct TypeRegistration {
short_name: String,
data: HashMap<TypeId, Box<dyn TypeData>>,
type_info: &'static TypeInfo,
}
impl TypeRegistration {
/// Returns the [`TypeId`] of the type.
///
/// [`TypeId`]: std::any::TypeId
#[inline]
pub fn type_id(&self) -> TypeId {
self.type_info.type_id()
}
/// Returns a reference to the value of type `T` in this registration's type
/// data.
///
/// Returns `None` if no such value exists.
pub fn data<T: TypeData>(&self) -> Option<&T> {
self.data
.get(&TypeId::of::<T>())
.and_then(|value| value.downcast_ref())
}
/// Returns a mutable reference to the value of type `T` in this
/// registration's type data.
///
/// Returns `None` if no such value exists.
pub fn | <T: TypeData>(&mut self) -> Option<&mut T> {
self.data
.get_mut(&TypeId::of::<T>())
.and_then(|value| value.downcast_mut())
}
/// Returns a reference to the registration's [`TypeInfo`]
pub fn type_info(&self) -> &'static TypeInfo {
self.type_info
}
/// Inserts an instance of `T` into this registration's type data.
///
/// If another instance of `T` was previously inserted, it is replaced.
pub fn insert<T: TypeData>(&mut self, data: T) {
self.data.insert(TypeId::of::<T>(), Box::new(data));
}
/// Creates type registration information for `T`.
pub fn of<T: Reflect + Typed>() -> Self {
let type_name = std::any::type_name::<T>();
Self {
data: HashMap::default(),
short_name: Self::get_short_name(type_name),
type_info: T::type_info(),
}
}
/// Returns the [short name] of the type.
///
/// [short name]: TypeRegistration::get_short_name
pub fn short_name(&self) -> &str {
&self.short_name
}
/// Returns the [name] of the type.
///
/// [name]: std::any::type_name
pub fn type_name(&self) -> &'static str {
self.type_info.type_name()
}
/// Calculates the short name of a type.
///
/// The short name of a type is its full name as returned by
/// [`std::any::type_name`], but with the prefix of all paths removed. For
/// example, the short name of `alloc::vec::Vec<core::option::Option<u32>>`
/// would be `Vec<Option<u32>>`.
pub fn get_short_name(full_name: &str) -> String {
let mut short_name = String::new();
{
// A typename may be a composition of several other type names (e.g. generic parameters)
// separated by the characters that we try to find below.
// Then, each individual typename is shortened to its last path component.
//
// Note: Instead of `find`, `split_inclusive` would be nice but it's still unstable...
let mut remainder = full_name;
while let Some(index) = remainder.find(&['<', '>', '(', ')', '[', ']', ',', ';'][..]) {
let (path, new_remainder) = remainder.split_at(index);
// Push the shortened path in front of the found character
short_name.push_str(path.rsplit(':').next().unwrap());
// Push the character that was found
let character = new_remainder.chars().next().unwrap();
short_name.push(character);
// Advance the remainder
if character == ',' || character == ';' {
// A comma or semicolon is always followed by a space
short_name.push(' ');
remainder = &new_remainder[2..];
} else {
remainder = &new_remainder[1..];
}
}
// The remainder will only be non-empty if there were no matches at all
if !remainder.is_empty() {
// Then, the full typename is a path that has to be shortened
short_name.push_str(remainder.rsplit(':').next().unwrap());
}
}
short_name
}
}
impl Clone for TypeRegistration {
fn clone(&self) -> Self {
let mut data = HashMap::default();
for (id, type_data) in &self.data {
data.insert(*id, (*type_data).clone_type_data());
}
TypeRegistration {
data,
short_name: self.short_name.clone(),
type_info: self.type_info,
}
}
}
/// A trait for types generated by the [`#[reflect_trait]`][0] attribute macro.
///
/// [0]: crate::reflect_trait
pub trait TypeData: Downcast + Send + Sync {
fn clone_type_data(&self) -> Box<dyn TypeData>;
}
impl_downcast!(TypeData);
impl<T: 'static + Send + Sync> TypeData for T
where
T: Clone,
{
fn clone_type_data(&self) -> Box<dyn TypeData> {
Box::new(self.clone())
}
}
/// Trait used to generate [`TypeData`] for trait reflection.
///
/// This is used by the `#[derive(Reflect)]` macro to generate an implementation
/// of [`TypeData`] to pass to [`TypeRegistration::insert`].
pub trait FromType<T> {
fn from_type() -> Self;
}
/// A struct used to serialize reflected instances of a type.
///
/// A `ReflectSerialize` for type `T` can be obtained via
/// [`FromType::from_type`].
#[derive(Clone)]
pub struct ReflectSerialize {
get_serializable: for<'a> fn(value: &'a dyn Reflect) -> Serializable,
}
impl<T: Reflect + erased_serde::Serialize> FromType<T> for ReflectSerialize {
fn from_type() -> Self {
ReflectSerialize {
get_serializable: |value| {
let value = value.downcast_ref::<T>().unwrap_or_else(|| {
panic!("ReflectSerialize::get_serialize called with type `{}`, even though it was created for `{}`", value.type_name(), std::any::type_name::<T>())
});
Serializable::Borrowed(value)
},
}
}
}
impl ReflectSerialize {
/// Turn the value into a serializable representation
pub fn get_serializable<'a>(&self, value: &'a dyn Reflect) -> Serializable<'a> {
(self.get_serializable)(value)
}
}
/// A struct used to deserialize reflected instances of a type.
///
/// A `ReflectDeserialize` for type `T` can be obtained via
/// [`FromType::from_type`].
#[derive(Clone)]
pub struct ReflectDeserialize {
pub func: fn(
deserializer: &mut dyn erased_serde::Deserializer,
) -> Result<Box<dyn Reflect>, erased_serde::Error>,
}
impl ReflectDeserialize {
/// Deserializes a reflected value.
///
/// The underlying type of the reflected value, and thus the expected
/// structure of the serialized data, is determined by the type used to
/// construct this `ReflectDeserialize` value.
pub fn deserialize<'de, D>(&self, deserializer: D) -> Result<Box<dyn Reflect>, D::Error>
where
D: serde::Deserializer<'de>,
{
let mut erased = <dyn erased_serde::Deserializer>::erase(deserializer);
(self.func)(&mut erased)
.map_err(<<D as serde::Deserializer<'de>>::Error as serde::de::Error>::custom)
}
}
impl<T: for<'a> Deserialize<'a> + Reflect> FromType<T> for ReflectDeserialize {
fn from_type() -> Self {
ReflectDeserialize {
func: |deserializer| Ok(Box::new(T::deserialize(deserializer)?)),
}
}
}
#[cfg(test)]
mod test {
use crate::TypeRegistration;
use bevy_utils::HashMap;
#[test]
fn test_get_short_name() {
assert_eq!(
TypeRegistration::get_short_name(std::any::type_name::<f64>()),
"f64"
);
assert_eq!(
TypeRegistration::get_short_name(std::any::type_name::<String>()),
"String"
);
assert_eq!(
TypeRegistration::get_short_name(std::any::type_name::<(u32, f64)>()),
"(u32, f64)"
);
assert_eq!(
TypeRegistration::get_short_name(std::any::type_name::<(String, String)>()),
"(String, String)"
);
assert_eq!(
TypeRegistration::get_short_name(std::any::type_name::<[f64]>()),
"[f64]"
);
assert_eq!(
TypeRegistration::get_short_name(std::any::type_name::<[String]>()),
"[String]"
);
assert_eq!(
TypeRegistration::get_short_name(std::any::type_name::<[f64; 16]>()),
"[f64; 16]"
);
assert_eq!(
TypeRegistration::get_short_name(std::any::type_name::<[String; 16]>()),
"[String; 16]"
);
}
#[test]
fn test_property_type_registration() {
assert_eq!(
TypeRegistration::of::<Option<f64>>().short_name,
"Option<f64>"
);
assert_eq!(
TypeRegistration::of::<HashMap<u32, String>>().short_name,
"HashMap<u32, String>"
);
assert_eq!(
TypeRegistration::of::<Option<HashMap<u32, String>>>().short_name,
"Option<HashMap<u32, String>>"
);
assert_eq!(
TypeRegistration::of::<Option<HashMap<u32, Option<String>>>>().short_name,
"Option<HashMap<u32, Option<String>>>"
);
assert_eq!(
TypeRegistration::of::<Option<HashMap<String, Option<String>>>>().short_name,
"Option<HashMap<String, Option<String>>>"
);
assert_eq!(
TypeRegistration::of::<Option<HashMap<Option<String>, Option<String>>>>().short_name,
"Option<HashMap<Option<String>, Option<String>>>"
);
assert_eq!(
TypeRegistration::of::<Option<HashMap<Option<String>, (String, Option<String>)>>>()
.short_name,
"Option<HashMap<Option<String>, (String, Option<String>)>>"
);
}
}
| data_mut |
get_tag_default_request_response.go | // Copyright (c) 2016, 2018, 2020, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
package identity
import (
"github.com/oracle/oci-go-sdk/v31/common"
"net/http"
)
// GetTagDefaultRequest wrapper for the GetTagDefault operation
//
// See also
//
// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/identity/GetTagDefault.go.html to see an example of how to use GetTagDefaultRequest.
type GetTagDefaultRequest struct {
// The OCID of the tag default.
TagDefaultId *string `mandatory:"true" contributesTo:"path" name:"tagDefaultId"`
// Unique Oracle-assigned identifier for the request.
// If you need to contact Oracle about a particular request, please provide the request ID.
OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"`
// Metadata about the request. This information will not be transmitted to the service, but
// represents information that the SDK will consume to drive retry behavior.
RequestMetadata common.RequestMetadata
}
func (request GetTagDefaultRequest) String() string {
return common.PointerString(request)
}
// HTTPRequest implements the OCIRequest interface
func (request GetTagDefaultRequest) HTTPRequest(method, path string) (http.Request, error) {
return common.MakeDefaultHTTPRequestWithTaggedStruct(method, path, request)
}
// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy.
func (request GetTagDefaultRequest) RetryPolicy() *common.RetryPolicy {
return request.RequestMetadata.RetryPolicy
}
// GetTagDefaultResponse wrapper for the GetTagDefault operation
type GetTagDefaultResponse struct {
// The underlying http response
RawResponse *http.Response
// The TagDefault instance
TagDefault `presentIn:"body"`
// Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
// particular request, please provide the request ID.
OpcRequestId *string `presentIn:"header" name:"opc-request-id"`
// For optimistic concurrency control. See `if-match`.
Etag *string `presentIn:"header" name:"etag"`
}
func (response GetTagDefaultResponse) String() string {
return common.PointerString(response)
}
// HTTPResponse implements the OCIResponse interface | func (response GetTagDefaultResponse) HTTPResponse() *http.Response {
return response.RawResponse
} |
|
relay.go | package relay
import (
"fmt"
"github.com/2d0g/influxdb-srelay/cluster"
"github.com/2d0g/influxdb-srelay/config"
)
// Relay is an HTTP or UDP endpoint
/*type Relay interface {
Name() string
Run() error
Stop() error
}*/
var (
mainConfig *config.Config
logDir string
clusters map[string]*cluster.Cluster
)
func SetConfig(cfg *config.Config) {
mainConfig = cfg
}
func SetLogdir(ld string) {
logDir = ld
}
func InitClusters() error | {
clusters = make(map[string]*cluster.Cluster)
for _, cfg := range mainConfig.Influxcluster {
c, err := cluster.NewCluster(cfg)
if err != nil {
return err
}
if clusters[cfg.Name] != nil {
return fmt.Errorf("duplicate cluster: %q", cfg.Name)
}
clusters[cfg.Name] = c
}
return nil
} |
|
msic.py | from __future__ import division, print_function
import numpy as np
from shapely.geometry import Polygon
import cv2
from collections import defaultdict
from kitti import Calibration
def camera_to_lidar(points, r_rect, velo2cam):
points_shape = list(points.shape[0:-1])
if points.shape[-1] == 3:
points = np.concatenate([points, np.ones(points_shape + [1])], axis=-1)
lidar_points = np.dot(points, np.linalg.inv(np.dot(r_rect, velo2cam).T))
return lidar_points[..., :3]
def lidar_to_camera(points, r_rect, velo2cam):
points_shape = list(points.shape[:-1])
if points.shape[-1] == 3:
points = np.concatenate([points, np.ones(points_shape + [1])], axis=-1)
camera_points = np.dot(points, np.dot(r_rect, velo2cam).T)
return camera_points[..., :3]
def box_lidar_to_camera(data, r_rect, velo2cam):
xyz_lidar = data[:, 0:3]
w, l, h = data[:, 3:4], data[:, 4:5], data[:, 5:6]
r = data[:, 6:7]
xyz = lidar_to_camera(xyz_lidar, r_rect, velo2cam)
return np.concatenate([xyz, l, h, w, r], axis=1)
def box_camera_to_lidar(data, r_rect, velo2cam):
xyz = data[:, 0:3]
l, h, w = data[:, 3:4], data[:, 4:5], data[:, 5:6]
r = data[:, 6:7]
xyz_lidar = camera_to_lidar(xyz, r_rect, velo2cam)
return np.concatenate([xyz_lidar, w, l, h, r], axis=1) |
def cuboid_to_corners(cuboid):
(cls_id, x, y, z, w, l, h, theta) = cuboid
theta = (theta + np.pi / 2) # (theta + np.pi / 2)
cos_t = np.cos(theta)
sin_t = np.sin(theta)
centre_x = x
centre_y = y
rear_left_x = centre_x - l / 2 * cos_t - w / 2 * sin_t
rear_left_y = centre_y - l / 2 * sin_t + w / 2 * cos_t
rear_right_x = centre_x - l / 2 * cos_t + w / 2 * sin_t
rear_right_y = centre_y - l / 2 * sin_t - w / 2 * cos_t
front_right_x = centre_x + l / 2 * cos_t + w / 2 * sin_t
front_right_y = centre_y + l / 2 * sin_t - w / 2 * cos_t
front_left_x = centre_x + l / 2 * cos_t - w / 2 * sin_t
front_left_y = centre_y + l / 2 * sin_t + w / 2 * cos_t
corners = np.array([rear_left_x, rear_left_y, rear_right_x, rear_right_y,
front_right_x, front_right_y, front_left_x, front_left_y]).reshape((4, 2))
return corners
def get_corners_list(reg_list):
corners_list = []
for reg in reg_list:
(prob, w, l, h, centre_x, centre_y, z, theta) = reg
cos_t = np.cos(theta)
sin_t = np.sin(theta)
rear_left_x = centre_x - l / 2 * cos_t - w / 2 * sin_t
rear_left_y = centre_y - l / 2 * sin_t + w / 2 * cos_t
rear_right_x = centre_x - l / 2 * cos_t + w / 2 * sin_t
rear_right_y = centre_y - l / 2 * sin_t - w / 2 * cos_t
front_right_x = centre_x + l / 2 * cos_t + w / 2 * sin_t
front_right_y = centre_y + l / 2 * sin_t - w / 2 * cos_t
front_left_x = centre_x + l / 2 * cos_t - w / 2 * sin_t
front_left_y = centre_y + l / 2 * sin_t + w / 2 * cos_t
corners = np.array([rear_left_x, rear_left_y, rear_right_x, rear_right_y,
front_right_x, front_right_y, front_left_x, front_left_y]).reshape((4, 2))
corners_list.append(corners)
return corners_list
def roty(t):
''' Rotation about the y-axis. '''
c = np.cos(t)
s = np.sin(t)
return np.array([[c, 0, s],
[0, 1, 0],
[-s, 0, c]])
def rotz(t):
''' Rotation about the z-axis. '''
c = np.cos(t)
s = np.sin(t)
return np.array([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
def get_corners_3d(reg_list):
corners_list = []
for reg in reg_list:
(prob, w, l, h, centre_x, centre_y, z, theta) = reg
R = rotz(-theta-np.pi/2)
x_corners = [l / 2, l / 2, -l / 2, -l / 2, l / 2, l / 2, -l / 2, -l / 2]
y_corners = [w / 2, -w / 2, -w / 2, w / 2, w / 2, -w / 2, -w / 2, w / 2]
z_corners = [0, 0, 0, 0, h, h, h, h]
# z_corners = [-h/2, -h/2, -h/2, -h/2, h/2, h/2, h/2, h/2]
corners_3d = np.dot(R, np.vstack([x_corners, y_corners, z_corners]))
# print corners_3d.shape
corners_3d[0, :] = corners_3d[0, :] + centre_x
corners_3d[1, :] = corners_3d[1, :] + centre_y
corners_3d[2, :] = corners_3d[2, :] + z
corners_3d = corners_3d.transpose(1, 0)
corners_list.append(corners_3d)
corners_list = np.array(corners_list)
return corners_list
def decode_output_box3d(prediction, rpn_mode=False, anchors=None):
reg_list, cls_list = get_reg_list_rpn(prediction, anchors)
corners_3d = get_corners_3d(reg_list)
# corners_list = get_corners_list(reg_list)
return corners_3d, reg_list, cls_list
def get_det_info(prediction, bev_data, img_path, rpn_mode=False, anchors=None):
if not rpn_mode:
reg_list, cls_list = get_reg_list(prediction)
else:
reg_list, cls_list = get_reg_list_rpn(prediction, anchors)
calib_path = img_path.replace('velodyne', 'calib')
calib_path = calib_path.replace('.bin', '.txt')
calib = Calibration(calib_path)
reg_list[:, [5, 6, 4]] = calib.project_velo_to_rect(reg_list[:, 4:7])
reg_list[:, 5] *= -1
corners_list = get_corners_list(reg_list)
prob_list = []
for i in range(len(reg_list)):
prob_list.append(reg_list[i][0])
return corners_list, reg_list, prob_list, cls_list
def convert_format(boxes_array):
"""
:param array: an array of shape [# bboxs, 4, 2]
:return: a shapely.geometry.Polygon object
"""
polygons = [Polygon([(box[i, 0], box[i, 1]) for i in range(4)]) for box in boxes_array]
return np.array(polygons)
def compute_iou(box1, box2):
"""Calculates IoU of the given box with the array of the given boxes.
box: a polygon
boxes: a vector of polygons
Note: the areas are passed in rather than calculated here for
efficiency. Calculate once in the caller to avoid duplicate work.
"""
# Calculate intersection areas
iou = box1.intersection(box2).area / box1.union(box2).area
return iou
def merge_mini_batch(batch_list, _unused=False):
batch_size = len(batch_list)
example_merged = defaultdict(list)
for example in batch_list:
for k, v in example.items():
example_merged[k].append(v)
ret = {}
for key, elems in example_merged.items():
if key in ['pillar']:
print('pillar shape', elems[0].shape)
ret[key] = np.concatenate(elems, axis=0)
elif key == 'coords':
coors = []
for i, coor in enumerate(elems):
print('coor shape', coor.shape)
coor_pad = np.pad(
coor, ((0, 0), (1, 0)),
mode='constant',
constant_values=i)
coors.append(coor_pad)
ret[key] = np.concatenate(coors, axis=0)
else:
ret[key] = np.stack(elems, axis=0)
return ret | |
client.go | // Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
package vmext
import (
"context"
"encoding/json"
compute "github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2018-10-01/compute"
azurev1alpha1 "github.com/Azure/azure-service-operator/api/v1alpha1"
"github.com/Azure/azure-service-operator/pkg/resourcemanager/config"
"github.com/Azure/azure-service-operator/pkg/resourcemanager/iam"
"github.com/Azure/azure-service-operator/pkg/secrets"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
)
type AzureVirtualMachineExtensionClient struct {
Creds config.Credentials
SecretClient secrets.SecretClient
Scheme *runtime.Scheme
}
func NewAzureVirtualMachineExtensionClient(creds config.Credentials, secretclient secrets.SecretClient, scheme *runtime.Scheme) *AzureVirtualMachineExtensionClient {
return &AzureVirtualMachineExtensionClient{
Creds: creds,
SecretClient: secretclient,
Scheme: scheme,
}
}
func getVirtualMachineExtensionClient(creds config.Credentials) compute.VirtualMachineExtensionsClient |
func (c *AzureVirtualMachineExtensionClient) CreateVirtualMachineExtension(ctx context.Context, location string, resourceGroupName string, vmName string, extName string, autoUpgradeMinorVersion bool, forceUpdateTag string, publisher string, typeName string, typeHandlerVersion string, settings string, protectedSettings string) (future compute.VirtualMachineExtensionsCreateOrUpdateFuture, err error) {
client := getVirtualMachineExtensionClient(c.Creds)
var extensionSettings map[string]*string
err = json.Unmarshal([]byte(settings), &extensionSettings)
if err != nil {
return future, err
}
var extensionProtectedSettings map[string]*string
err = json.Unmarshal([]byte(protectedSettings), &extensionProtectedSettings)
if err != nil {
return future, err
}
future, err = client.CreateOrUpdate(
ctx,
resourceGroupName,
vmName,
extName,
compute.VirtualMachineExtension{
Location: &location,
VirtualMachineExtensionProperties: &compute.VirtualMachineExtensionProperties{
ForceUpdateTag: &forceUpdateTag,
Publisher: &publisher,
Type: &typeName,
TypeHandlerVersion: &typeHandlerVersion,
AutoUpgradeMinorVersion: &autoUpgradeMinorVersion,
Settings: &extensionSettings,
ProtectedSettings: &extensionProtectedSettings,
},
},
)
return future, err
}
func (c *AzureVirtualMachineExtensionClient) DeleteVirtualMachineExtension(ctx context.Context, extName string, vmName string, resourcegroup string) (status string, err error) {
client := getVirtualMachineExtensionClient(c.Creds)
_, err = client.Get(ctx, resourcegroup, vmName, extName, "")
if err == nil { // vm present, so go ahead and delete
future, err := client.Delete(ctx, resourcegroup, vmName, extName)
return future.Status(), err
}
// VM extension not present so return success anyway
return "VM extension not present", nil
}
func (c *AzureVirtualMachineExtensionClient) GetVirtualMachineExtension(ctx context.Context, resourcegroup string, vmName string, extName string) (vm compute.VirtualMachineExtension, err error) {
client := getVirtualMachineExtensionClient(c.Creds)
return client.Get(ctx, resourcegroup, vmName, extName, "")
}
func (p *AzureVirtualMachineExtensionClient) AddVirtualMachineExtensionCredsToSecrets(ctx context.Context, secretName string, data map[string][]byte, instance *azurev1alpha1.AzureVirtualMachineExtension) error {
key := types.NamespacedName{
Name: secretName,
Namespace: instance.Namespace,
}
err := p.SecretClient.Upsert(ctx,
key,
data,
secrets.WithOwner(instance),
secrets.WithScheme(p.Scheme),
)
if err != nil {
return err
}
return nil
}
func (p *AzureVirtualMachineExtensionClient) GetOrPrepareSecret(ctx context.Context, instance *azurev1alpha1.AzureVirtualMachineExtension) (map[string][]byte, error) {
name := instance.Name
secret := map[string][]byte{}
key := types.NamespacedName{Name: name, Namespace: instance.Namespace}
if stored, err := p.SecretClient.Get(ctx, key); err == nil {
return stored, nil
}
emptyProtectedSettings := "{}"
secret["protectedSettings"] = []byte(emptyProtectedSettings)
return secret, nil
}
| {
computeClient := compute.NewVirtualMachineExtensionsClientWithBaseURI(config.BaseURI(), creds.SubscriptionID())
a, _ := iam.GetResourceManagementAuthorizer(creds)
computeClient.Authorizer = a
computeClient.AddToUserAgent(config.UserAgent())
return computeClient
} |
home_test.go | // Copyright 2017 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package user
import (
"net/http"
"testing"
"github.com/masoodkamyab/gitea/models"
"github.com/masoodkamyab/gitea/modules/test"
"github.com/masoodkamyab/gitea/modules/setting"
"github.com/stretchr/testify/assert"
)
func TestIssues(t *testing.T) {
setting.UI.IssuePagingNum = 1
assert.NoError(t, models.LoadFixtures())
ctx := test.MockContext(t, "issues")
test.LoadUser(t, ctx, 2)
ctx.SetParams(":type", "issues")
ctx.Req.Form.Set("state", "closed")
Issues(ctx) |
assert.EqualValues(t, map[int64]int64{1: 1}, ctx.Data["Counts"])
assert.EqualValues(t, true, ctx.Data["IsShowClosed"])
assert.Len(t, ctx.Data["Issues"], 1)
assert.Len(t, ctx.Data["Repos"], 1)
} | assert.EqualValues(t, http.StatusOK, ctx.Resp.Status()) |
test_lock.py | """
Test for the SmartThings lock platform.
The only mocking required is of the underlying SmartThings API object so
real HTTP calls are not initiated during testing.
"""
from pysmartthings import Attribute, Capability
from pysmartthings.device import Status
from openpeerpower.components.lock import DOMAIN as LOCK_DOMAIN
from openpeerpower.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE
from openpeerpower.config_entries import ConfigEntryState
from openpeerpower.const import STATE_UNAVAILABLE
from openpeerpower.helpers import device_registry as dr, entity_registry as er
from openpeerpower.helpers.dispatcher import async_dispatcher_send
from .conftest import setup_platform
async def test_entity_and_device_attributes(opp, device_factory):
"""Test the attributes of the entity are correct."""
# Arrange
device = device_factory("Lock_1", [Capability.lock], {Attribute.lock: "unlocked"})
entity_registry = er.async_get(opp)
device_registry = dr.async_get(opp)
# Act
await setup_platform(opp, LOCK_DOMAIN, devices=[device])
# Assert
entry = entity_registry.async_get("lock.lock_1")
assert entry
assert entry.unique_id == device.device_id
entry = device_registry.async_get_device({(DOMAIN, device.device_id)})
assert entry
assert entry.name == device.label
assert entry.model == device.device_type_name
assert entry.manufacturer == "Unavailable"
async def test_lock(opp, device_factory):
"""Test the lock locks successfully."""
# Arrange
device = device_factory("Lock_1", [Capability.lock])
device.status.attributes[Attribute.lock] = Status(
"unlocked",
None,
{
"method": "Manual",
"codeId": None,
"codeName": "Code 1",
"lockName": "Front Door",
"usedCode": "Code 2",
},
)
await setup_platform(opp, LOCK_DOMAIN, devices=[device])
# Act
await opp.services.async_call(
LOCK_DOMAIN, "lock", {"entity_id": "lock.lock_1"}, blocking=True
)
# Assert
state = opp.states.get("lock.lock_1")
assert state is not None
assert state.state == "locked"
assert state.attributes["method"] == "Manual"
assert state.attributes["lock_state"] == "locked"
assert state.attributes["code_name"] == "Code 1"
assert state.attributes["used_code"] == "Code 2"
assert state.attributes["lock_name"] == "Front Door"
assert "code_id" not in state.attributes
async def test_unlock(opp, device_factory):
"""Test the lock unlocks successfully."""
# Arrange
device = device_factory("Lock_1", [Capability.lock], {Attribute.lock: "locked"})
await setup_platform(opp, LOCK_DOMAIN, devices=[device])
# Act
await opp.services.async_call(
LOCK_DOMAIN, "unlock", {"entity_id": "lock.lock_1"}, blocking=True
)
# Assert
state = opp.states.get("lock.lock_1")
assert state is not None
assert state.state == "unlocked"
async def test_update_from_signal(opp, device_factory):
"""Test the lock updates when receiving a signal."""
# Arrange
device = device_factory("Lock_1", [Capability.lock], {Attribute.lock: "unlocked"})
await setup_platform(opp, LOCK_DOMAIN, devices=[device])
await device.lock(True)
# Act
async_dispatcher_send(opp, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await opp.async_block_till_done()
state = opp.states.get("lock.lock_1")
assert state is not None
assert state.state == "locked"
async def test_unload_config_entry(opp, device_factory):
"""Test the lock is removed when the config entry is unloaded."""
# Arrange | config_entry = await setup_platform(opp, LOCK_DOMAIN, devices=[device])
config_entry.state = ConfigEntryState.LOADED
# Act
await opp.config_entries.async_forward_entry_unload(config_entry, "lock")
# Assert
assert opp.states.get("lock.lock_1").state == STATE_UNAVAILABLE | device = device_factory("Lock_1", [Capability.lock], {Attribute.lock: "locked"}) |
client_monitored_item_base_impl.ts | /**
* @module node-opcua-client-private
*/
import { EventEmitter } from "events";
import { assert } from "node-opcua-assert";
import { AttributeIds } from "node-opcua-data-model";
import { DataValue } from "node-opcua-data-value";
import { checkDebugFlag, make_debugLog } from "node-opcua-debug";
import { ExtensionObject } from "node-opcua-extension-object";
import { EventFilter } from "node-opcua-service-filter";
import { ReadValueId, ReadValueIdOptions } from "node-opcua-service-read";
import {
MonitoredItemCreateResult,
MonitoringMode, MonitoringParameters, MonitoringParametersOptions
} from "node-opcua-service-subscription";
import { StatusCode, StatusCodes } from "node-opcua-status-code";
import { Variant } from "node-opcua-variant";
import { ClientMonitoredItemBase } from "../client_monitored_item_base";
import { ClientSubscription } from "../client_subscription";
import { ClientSubscriptionImpl } from "./client_subscription_impl";
const debugLog = make_debugLog(__filename);
const doDebug = checkDebugFlag(__filename);
export type PrepareForMonitoringResult = { error: string } | {
error?: null;
itemToMonitor: ReadValueIdOptions;
monitoringMode: MonitoringMode;
requestedParameters: MonitoringParameters;
};
export class | extends EventEmitter implements ClientMonitoredItemBase {
public itemToMonitor: ReadValueId;
public monitoringParameters: MonitoringParameters;
public subscription: ClientSubscriptionImpl;
public monitoringMode: MonitoringMode;
public statusCode: StatusCode;
public monitoredItemId?: any;
public result?: MonitoredItemCreateResult;
public filterResult?: ExtensionObject;
constructor(
subscription: ClientSubscription,
itemToMonitor: ReadValueIdOptions,
monitoringParameters: MonitoringParametersOptions
) {
super();
this.statusCode = StatusCodes.BadDataUnavailable;
assert(subscription.constructor.name === "ClientSubscriptionImpl");
this.subscription = subscription as ClientSubscriptionImpl;
this.itemToMonitor = new ReadValueId(itemToMonitor);
this.monitoringParameters = new MonitoringParameters(monitoringParameters);
this.monitoringMode = MonitoringMode.Reporting;
assert(this.monitoringParameters.clientHandle === 0xFFFFFFFF, "should not have a client handle yet");
}
/**
* @internal
* @param value
* @private
*/
public _notify_value_change(value: DataValue) {
/**
* Notify the observers that the MonitoredItem value has changed on the server side.
* @event changed
* @param value
*/
try {
this.emit("changed", value);
} catch (err) {
debugLog("Exception raised inside the event handler called by ClientMonitoredItem.on('change')", err);
debugLog("Please verify the application using this node-opcua client");
}
}
/**
* @internal
* @param eventFields
* @private
*/
public _notify_event(eventFields: Variant[]) {
/**
* Notify the observers that the MonitoredItem value has changed on the server side.
* @event changed
* @param value
*/
try {
this.emit("changed", eventFields);
} catch (err) {
debugLog("Exception raised inside the event handler called by ClientMonitoredItem.on('change')", err);
debugLog("Please verify the application using this node-opcua client");
}
}
/**
* @internal
* @private
*/
public _prepare_for_monitoring() { // : PrepareForMonitoringResult {
assert(this.monitoringParameters.clientHandle === 4294967295, "should not have a client handle yet");
const subscription = this.subscription as ClientSubscriptionImpl;
this.monitoringParameters.clientHandle = subscription.nextClientHandle();
assert(this.monitoringParameters.clientHandle > 0
&& this.monitoringParameters.clientHandle !== 4294967295);
// If attributeId is EventNotifier then monitoring parameters need a filter.
// The filter must then either be DataChangeFilter, EventFilter or AggregateFilter.
// todo can be done in another way?
// todo implement AggregateFilter
// todo support DataChangeFilter
// todo support whereClause
if (this.itemToMonitor.attributeId === AttributeIds.EventNotifier) {
//
// see OPCUA Spec 1.02 part 4 page 65 : 5.12.1.4 Filter
// see part 4 page 130: 7.16.3 EventFilter
// part 3 page 11 : 4.6 Event Model
// To monitor for Events, the attributeId element of the ReadValueId structure is the
// the id of the EventNotifierAttribute
// OPC Unified Architecture 1.02, Part 4 5.12.1.2 Sampling interval page 64:
// "A Client shall define a sampling interval of 0 if it subscribes for Events."
// toDO
// note : the EventFilter is used when monitoring Events.
// @ts-ignore
this.monitoringParameters.filter = this.monitoringParameters.filter || new EventFilter({});
const filter = this.monitoringParameters.filter;
if (!filter) {
return {error: "Internal Error"};
}
if (filter.schema.name !== "EventFilter") {
return {
error: "Mismatch between attributeId and filter in monitoring parameters : " +
"Got a " + filter.schema.name + " but a EventFilter object is required " +
"when itemToMonitor.attributeId== AttributeIds.EventNotifier"
};
}
} else if (this.itemToMonitor.attributeId === AttributeIds.Value) {
// the DataChangeFilter and the AggregateFilter are used when monitoring Variable Values
// The Value Attribute is used when monitoring Variables. Variable values are monitored for a change
// in value or a change in their status. The filters defined in this standard (see 7.16.2) and in Part 8 are
// used to determine if the value change is large enough to cause a Notification to be generated for the
// to do : check 'DataChangeFilter' && 'AggregateFilter'
} else {
if (this.monitoringParameters.filter) {
return {
error: "Mismatch between attributeId and filter in monitoring parameters : " +
"no filter expected when attributeId is not Value or EventNotifier"
};
}
}
return {
itemToMonitor: this.itemToMonitor,
monitoringMode: this.monitoringMode,
requestedParameters: this.monitoringParameters
};
}
/**
* @internal
* @param monitoredItemResult
* @private
*/
public _applyResult(monitoredItemResult: MonitoredItemCreateResult) {
this.statusCode = monitoredItemResult.statusCode;
/* istanbul ignore else */
if (monitoredItemResult.statusCode === StatusCodes.Good) {
this.result = monitoredItemResult;
this.monitoredItemId = monitoredItemResult.monitoredItemId;
this.monitoringParameters.samplingInterval = monitoredItemResult.revisedSamplingInterval;
this.monitoringParameters.queueSize = monitoredItemResult.revisedQueueSize;
this.filterResult = monitoredItemResult.filterResult || undefined;
}
}
/**
* @internal
* @param monitoredItemResult
* @private
*/
public _after_create(monitoredItemResult: MonitoredItemCreateResult) {
this._applyResult(monitoredItemResult);
if (this.statusCode === StatusCodes.Good) {
const subscription = this.subscription as ClientSubscriptionImpl;
subscription._add_monitored_item(this.monitoringParameters.clientHandle, this);
/**
* Notify the observers that the monitored item is now fully initialized.
* @event initialized
*/
this.emit("initialized");
} else {
/**
* Notify the observers that the monitored item has failed to initialized.
* @event err
* @param statusCode {StatusCode}
*/
const err = new Error(monitoredItemResult.statusCode.toString());
this.emit("err", err.message);
this.emit("terminated");
}
}
}
| ClientMonitoredItemBaseImpl |
notify.go | // Copyright (c) 2021 Terminus, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package endpoints
import (
"context"
"encoding/json"
"errors"
"net/http"
"net/url"
"strconv"
"strings"
"unicode/utf8"
"github.com/erda-project/erda/apistructs"
"github.com/erda-project/erda/modules/core-services/services/apierrors"
"github.com/erda-project/erda/pkg/http/httpserver"
)
// CreateNotify 创建通知
func (e *Endpoints) CreateNotify(ctx context.Context, r *http.Request, vars map[string]string) (
httpserver.Responser, error) {
locale := e.GetLocale(r)
orgID, err := strconv.ParseInt(r.Header.Get("Org-ID"), 10, 64)
if err != nil {
return apierrors.ErrCreateNotify.MissingParameter("Org-ID header is nil").ToResp(), nil
}
if r.Body == nil {
return apierrors.ErrCreateNotify.MissingParameter("body is nil").ToResp(), nil
}
var notifyCreateReq apistructs.CreateNotifyRequest
if err := json.NewDecoder(r.Body).Decode(¬ifyCreateReq); err != nil {
return apierrors.ErrCreateNotify.InvalidParameter("can't decode body").ToResp(), nil
}
if strings.TrimSpace(notifyCreateReq.Name) == "" {
return apierrors.ErrCreateNotify.InvalidParameter("name is empty").ToResp(), nil
}
if utf8.RuneCountInString(notifyCreateReq.Name) > 50 {
return apierrors.ErrCreateNotify.InvalidParameter(locale.Get("ErrCreateNotifyGroup.NameTooLong")).ToResp(), nil
}
notifyCreateReq.Creator = r.Header.Get("User-Id")
notifyCreateReq.OrgID = orgID
if notifyCreateReq.WithGroup == false && notifyCreateReq.NotifyGroupID == 0 {
return apierrors.ErrCreateNotify.InvalidParameter("notifyGroupId is null").ToResp(), nil
}
err = e.notifyGroup.CheckNotifyChannels(notifyCreateReq.Channels)
if err != nil {
return apierrors.ErrCreateNotify.InvalidParameter(err.Error()).ToResp(), nil
}
err = e.checkNotifyPermission(r, notifyCreateReq.ScopeType, notifyCreateReq.ScopeID, apistructs.CreateAction)
if err != nil {
return apierrors.ErrCreateNotify.AccessDenied().ToResp(), nil
}
notifyID, err := e.notifyGroup.CreateNotify(locale, ¬ifyCreateReq)
if err != nil {
return apierrors.ErrCreateNotify.InternalError(err).ToResp(), nil
}
notify, err := e.notifyGroup.GetNotify(notifyID, orgID)
if err != nil {
return apierrors.ErrCreateNotify.InternalError(err).ToResp(), nil
}
return httpserver.OkResp(notify)
}
// GetNotify 获取通知详情
func (e *Endpoints) GetNotify(ctx context.Context, r *http.Request, vars map[string]string) (
httpserver.Responser, error) {
idStr := vars["notifyID"]
notifyID, err := strconv.ParseInt(idStr, 10, 64)
if err != nil {
return apierrors.ErrGetNotify.InvalidParameter(err).ToResp(), nil
}
orgID, err := strconv.ParseInt(r.Header.Get("Org-ID"), 10, 64)
if err != nil {
return apierrors.ErrGetNotify.MissingParameter("Org-ID header is nil").ToResp(), nil
}
notify, err := e.notifyGroup.GetNotify(notifyID, orgID)
if err != nil {
return apierrors.ErrGetNotify.InternalError(err).ToResp(), nil
}
err = e.checkNotifyPermission(r, notify.ScopeType, notify.ScopeID, apistructs.GetAction)
if err != nil {
return apierrors.ErrGetNotify.AccessDenied().ToResp(), nil
}
var userIDs []string
if notify.NotifyGroup != nil {
if notify.NotifyGroup.Creator != "" {
userIDs = append(userIDs, notify.NotifyGroup.Creator)
}
for _, target := range notify.NotifyGroup.Targets {
if target.Type == apistructs.UserNotifyTarget {
for _, t := range target.Values {
userIDs = append(userIDs, t.Receiver)
}
}
}
}
if notify.Creator != "" {
userIDs = append(userIDs, notify.Creator)
}
return httpserver.OkResp(notify, userIDs)
}
// DeleteNotify 删除通知
func (e *Endpoints) DeleteNotify(ctx context.Context, r *http.Request, vars map[string]string) (
httpserver.Responser, error) {
idStr := vars["notifyID"]
notifyID, err := strconv.ParseInt(idStr, 10, 64)
if err != nil {
return apierrors.ErrDeleteNotify.InvalidParameter(err).ToResp(), nil
}
orgID, err := strconv.ParseInt(r.Header.Get("Org-ID"), 10, 64)
if err != nil {
return apierrors.ErrDeleteNotify.MissingParameter("Org-ID header is nil").ToResp(), nil
}
notify, err := e.notifyGroup.GetNotify(notifyID, orgID)
if err != nil {
return apierrors.ErrDeleteNotify.InternalError(err).ToResp(), nil
}
err = e.checkNotifyPermission(r, notify.ScopeType, notify.ScopeID, apistructs.DeleteAction)
if err != nil {
return apierrors.ErrDeleteNotify.AccessDenied().ToResp(), nil
}
deleteGroup := r.URL.Query().Get("withGroup") == "true"
if err = e.notifyGroup.DeleteNotify(notifyID, deleteGroup, orgID); err != nil {
return apierrors.ErrDeleteNotify.InternalError(err).ToResp(), nil
}
return httpserver.OkResp(notify)
}
// UpdateNotify 更新通知
func (e *Endpoints) UpdateNotify(ctx context.Context, r *http.Request, vars map[string]string) (
httpserver.Responser, error) {
idStr := vars["notifyID"]
notifyID, err := strconv.ParseInt(idStr, 10, 64)
if err != nil {
return apierrors.ErrUpdateNotify.InvalidParameter(err).ToResp(), nil
}
orgID, err := strconv.ParseInt(r.Header.Get("Org-ID"), 10, 64)
if err != nil {
return apierrors.ErrUpdateNotify.MissingParameter("Org-ID header is nil").ToResp(), nil
}
if r.Body == nil {
return apierrors.ErrUpdateNotify.MissingParameter("body is nil").ToResp(), nil
}
var notifyUpdateReq apistructs.UpdateNotifyRequest
if err := json.NewDecoder(r.Body).Decode(¬ifyUpdateReq); err != nil {
return apierrors.ErrUpdateNotify.InvalidParameter("can't decode body").ToResp(), nil
}
err = e.notifyGroup.CheckNotifyChannels(notifyUpdateReq.Channels)
if err != nil {
return apierrors.ErrUpdateNotify.InvalidParameter(err.Error()).ToResp(), nil
}
notifyUpdateReq.ID = notifyID
notifyUpdateReq.OrgID = orgID
notify, err := e.notifyGroup.GetNotify(notifyID, orgID)
if err != nil {
return apierrors.ErrUpdateNotify.InternalError(err).ToResp(), nil
}
err = e.checkNotifyPermission(r, notify.ScopeType, notify.ScopeID, apistructs.UpdateAction)
if err != nil {
return apierrors.ErrUpdateNotify.AccessDenied().ToResp(), nil
}
err = e.notifyGroup.UpdateNotify(¬ifyUpdateReq)
if err != nil {
return apierrors.ErrUpdateNotify.InternalError(err).ToResp(), nil
}
notify, err = e.notifyGroup.GetNotify(notifyID, orgID)
if err != nil {
return apierrors.ErrUpdateNotify.InternalError(err).ToResp(), nil
}
return httpserver.OkResp(notify)
}
// NotifyEnable 启用通知
func (e *Endpoints) NotifyEnable(ctx context.Context, r *http.Request, vars map[string]string) (
httpserver.Responser, error) {
idStr := vars["notifyID"]
notifyID, err := strconv.ParseInt(idStr, 10, 64)
if err != nil {
return apierrors.ErrNotifyEnable.InvalidParameter(err).ToResp(), nil
}
orgID, err := strconv.ParseInt(r.Header.Get("Org-ID"), 10, 64)
if err != nil {
return apierrors.ErrNotifyEnable.MissingParameter("Org-ID header is nil").ToResp(), nil
}
notify, err := e.notifyGroup.GetNotify(notifyID, orgID)
if err != nil {
return apierrors.ErrNotifyEnable.InternalError(err).ToResp(), nil
}
err = e.checkNotifyPermission(r, notify.ScopeType, notify.ScopeID, apistructs.OperateAction)
if err != nil {
return apierrors.ErrNotifyEnable.AccessDenied().ToResp(), nil
}
err = e.notifyGroup.UpdateNotifyEnable(notifyID, true, orgID)
if err != nil {
return apierrors.ErrNotifyEnable.InternalError(err).ToResp(), nil
}
return httpserver.OkResp(notify)
}
// NotifyDisable 禁用通知
func (e *Endpoints) NotifyDisable(ctx context.Context, r *http.Request, vars map[string]string) (
httpserver.Responser, error) {
idStr := vars["notifyID"]
notifyID, err := strconv.ParseInt(idStr, 10, 64)
if err != nil {
return apierrors.ErrNotifyDisable.InvalidParameter(err).ToResp(), nil
}
orgID, err := strconv.ParseInt(r.Header.Get("Org-ID"), 10, 64)
if err != nil {
return apierrors.ErrNotifyDisable.MissingParameter("Org-ID header is nil").ToResp(), nil
}
notify, err := e.notifyGroup.GetNotify(notifyID, orgID)
if err != nil {
return apierrors.ErrNotifyDisable.InternalError(err).ToResp(), nil
}
err = e.checkNotifyPermission(r, notify.ScopeType, notify.ScopeID, apistructs.OperateAction)
if err != nil {
return apierrors.ErrNotifyDisable.AccessDenied().ToResp(), nil
}
err = e.notifyGroup.UpdateNotifyEnable(notifyID, false, orgID)
if err != nil {
return apierrors.ErrNotifyDisable.InternalError(err).ToResp(), nil
}
return httpserver.OkResp(notify)
}
// QueryNotifies 查询通知
func (e *Endpoints) QueryNotifies(ctx context.Context, r *http.Request, vars map[string]string) (
httpserver.Responser, error) {
orgID, err := strconv.ParseInt(r.Header.Get("Org-ID"), 10, 64)
if err != nil {
return apierrors.ErrQueryNotify.MissingParameter("Org-ID header is nil").ToResp(), nil
}
pageNo := getInt(r.URL, "pageNo", 1)
pageSize := getInt(r.URL, "pageSize", 10)
queryReq := apistructs.QueryNotifyRequest{
PageSize: pageSize,
PageNo: pageNo,
ScopeType: r.URL.Query().Get("scopeType"),
ScopeID: r.URL.Query().Get("scopeId"),
Label: r.URL.Query().Get("label"),
ClusterName: r.URL.Query().Get("clusterName"),
OrgID: orgID,
}
err = e.checkNotifyPermission(r, queryReq.ScopeType, queryReq.ScopeID, apistructs.ListAction)
if err != nil {
return apierrors.ErrQueryNotify.AccessDenied().ToResp(), nil
}
result, err := e.notifyGroup.QueryNotifies(&queryReq)
if err != nil {
return apierrors.ErrQueryNotify.InternalError(err).ToResp(), nil
}
var userIDs []string
for _, notify := range result.List {
if notify.Creator != "" {
userIDs = append(userIDs, notify.Creator)
}
if notify.NotifyGroup != nil {
for _, target := range notify.NotifyGroup.Targets {
if target.Type == apistructs.UserNotifyTarget {
for _, t := range target.Values {
userIDs = append(userIDs, t.Receiver)
}
}
}
}
}
return httpserver.OkResp(result, userIDs)
}
// QueryNotifiesBySource 根据source关联的通知
func (e *Endpoints) QueryNotifiesBySource(ctx context.Context, r *http.Request, vars map[string]string) (
httpserver.Responser, error) {
sourceType := r.URL.Query().Get("sourceType")
sourceID := r.URL.Query().Get("sourceId")
itemName := r.URL.Query().Get("itemName")
clusterName := r.URL.Query().Get("clusterName")
orgIdStr := r.URL.Query().Get("orgId")
label := r.URL.Query().Get("label")
orgId, err := strconv.ParseInt(orgIdStr, 10, 64)
if err != nil {
return apierrors.ErrQueryNotify.InternalError(err).ToResp(), nil
}
localeName := ""
orgInfo, err := e.org.Get(orgId)
if err == nil {
localeName = orgInfo.Locale
}
locale := e.bdl.GetLocale(localeName)
result, err := e.notifyGroup.QueryNotifiesBySource(locale, sourceType, sourceID, itemName, orgId, clusterName, label)
if err != nil {
return apierrors.ErrQueryNotify.InternalError(err).ToResp(), nil
}
return httpserver.OkResp(result)
}
// FuzzyQueryNotifiesBySource 模糊查询根据source关联的通知
func (e *Endpoints) FuzzyQueryNotifiesBySource(ctx context.Context, r *http.Request, vars map[string]string) (
httpserver.Responser, error) {
orgIDStr := r.URL.Query().Get("orgId")
orgID, err := strconv.ParseInt(orgIDStr, 10, 64)
if err != nil {
return apierrors.ErrQueryNotify.InternalError(err).ToResp(), nil
}
localeName := ""
orgInfo, err := e.org.Get(orgID)
if err == nil {
localeName = orgInfo.Locale
}
locale := e.bdl.GetLocale(localeName)
req := apistructs.FuzzyQueryNotifiesBySourceRequest{
SourceType: r.URL.Query().Get("sourceType"),
OrgID: orgID,
Label: r.URL.Query().Get("label"),
Locale: locale,
PageNo: getInt(r.URL, "pageNo", 1),
PageSize: getInt(r.URL, "pageSize", 10),
ClusterName: r.URL.Query().Get("clusterName"),
SourceName: r.URL.Query().Get("sourceName"),
NotifyName: r.URL.Query().Get("notifyName"),
ItemName: r.URL.Query().Get("itemName"),
Channel: r.URL.Query().Get("channel"),
}
result, err := e.notifyGroup.FuzzyQueryNotifiesBySource(req)
if err != nil {
return apierrors.ErrQueryNotify.InternalError(err).ToResp(), nil
}
return httpserver.OkResp(result)
}
func (e *Endpoints) checkNotifyPermission(r *http.Request, scopeType, scopeID, action string) error {
if scopeType == apistructs.MSPScope {
return nil
}
userID := r.Header.Get("User-ID")
if userID == "" {
return errors.New("failed to get permission(User-ID is empty)")
}
var scope apistructs.ScopeType
if scopeType == "org" {
scope = apistructs.OrgScope
}
if scopeType == "project" {
scope = apistructs.ProjectScope
}
if scopeType == "app" {
scope = apistructs.AppScope
}
id, err := strconv.ParseInt(scopeID, 10, 64)
if err != nil {
return err
}
access, err := e.permission.CheckPermission(&apistructs.PermissionCheckRequest{
UserID: userID,
Scope: scope,
ScopeID: uint64(id),
Action: action,
Resource: apistructs.NotifyResource,
})
if err != nil {
return err
}
if !access {
return errors.New("no permission")
}
return nil
}
func getInt(url *url.URL, key string, defaultValue int64) int64 {
valueStr := url.Query().Get(key)
| err := strconv.ParseInt(valueStr, 10, 32)
if err != nil {
return defaultValue
}
return value
}
| value, |
genurl.go | package base
import (
"fmt"
"time"
)
// GenAccessUrl ...
func GenAccessUrl(uid string) string {
return fmt.Sprintf("https://api-user.huami.com/registrations/+86%s/tokens", uid)
}
// GenLoginUrl ...
func GenLoginUrl() string {
return "https://account.huami.com/v2/client/login"
}
// GenSetStepUrl ...
func GenSetStepUrl() string | {
return fmt.Sprintf("https://api-mifit-cn.huami.com/v1/data/band_data.json?&t=%d", time.Now().Unix())
} |
|
pin.py | from discord.ext.commands import Cog, command
from discord import Embed, File
from discord.ext import commands
import os, discord
class Pin(Cog):
def | (self, bot):
self.bot = bot
self.emoji = "📌"
@Cog.listener()
async def on_raw_reaction_add(self, payload):
if payload.emoji.name == self.emoji:
channel = self.bot.get_channel(payload.channel_id)
if int(payload.guild_id) == 699224778824745003:
member_roles = list(a.name for a in payload.member.roles)
if 'sabitleyici' in member_roles:
message = await channel.fetch_message(payload.message_id)
if not message.pinned:
await message.pin()
async for x in channel.history(limit = 1):
await x.delete()
else:
pers = list(a for a in channel.permissions_for(payload.member))
if pers[13][1] == True:
message = await channel.fetch_message(payload.message_id)
if not message.pinned:
await message.pin()
async for x in channel.history(limit = 1):
await x.delete()
@Cog.listener()
async def on_raw_reaction_remove(self, payload):
if payload.emoji.name == self.emoji:
channel = self.bot.get_channel(payload.channel_id)
member = discord.utils.get(self.bot.get_all_members(), id=payload.user_id)
if int(payload.guild_id) == 699224778824745003:
member_roles = list(a.name for a in member.roles)
print(member_roles)
if 'sabitleyici' in member_roles:
message = await channel.fetch_message(payload.message_id)
if message.pinned:
await message.unpin()
else:
pers = list(a for a in channel.permissions_for(member))
if pers[13][1] == True:
message = await channel.fetch_message(payload.message_id)
if message.pinned:
await message.unpin()
| __init__ |
metrics_test.go | // Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package metrics // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/metrics"
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/pmetric"
"github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common"
)
func Test_newPathGetSetter_NumberDataPoint(t *testing.T) {
refNumberDataPoint := createNumberDataPointTelemetry(pmetric.NumberDataPointValueTypeInt)
newExemplars, newAttrs, newArrStr, newArrBool, newArrInt, newArrFloat, newArrBytes := createNewTelemetry()
tests := []struct {
name string
path []common.Field
orig interface{}
new interface{}
modified func(pmetric.NumberDataPoint)
valueType pmetric.NumberDataPointValueType
}{
{
name: "start_time_unix_nano",
path: []common.Field{
{
Name: "start_time_unix_nano",
},
},
orig: int64(100_000_000),
new: int64(200_000_000),
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(200)))
},
},
{
name: "time_unix_nano",
path: []common.Field{
{
Name: "time_unix_nano",
},
},
orig: int64(500_000_000),
new: int64(200_000_000),
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(200)))
},
},
{
name: "value_double",
path: []common.Field{
{
Name: "value_double",
},
},
orig: 1.1,
new: 2.2,
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.SetDoubleVal(2.2)
},
valueType: pmetric.NumberDataPointValueTypeDouble,
},
{
name: "value_int",
path: []common.Field{
{
Name: "value_int",
},
},
orig: int64(1),
new: int64(2),
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.SetIntVal(2)
},
},
{
name: "flags",
path: []common.Field{
{
Name: "flags",
},
},
orig: pmetric.NewMetricDataPointFlags(),
new: pmetric.NewMetricDataPointFlags(pmetric.MetricDataPointFlagNoRecordedValue),
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.SetFlags(pmetric.NewMetricDataPointFlags(pmetric.MetricDataPointFlagNoRecordedValue))
},
},
{
name: "exemplars",
path: []common.Field{
{
Name: "exemplars",
},
},
orig: refNumberDataPoint.Exemplars(),
new: newExemplars,
modified: func(datapoint pmetric.NumberDataPoint) {
newExemplars.CopyTo(datapoint.Exemplars())
},
},
{
name: "attributes",
path: []common.Field{
{
Name: "attributes",
},
},
orig: refNumberDataPoint.Attributes(),
new: newAttrs,
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().Clear()
newAttrs.CopyTo(datapoint.Attributes())
},
},
{
name: "attributes string",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("str"),
},
},
orig: "val",
new: "newVal",
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().UpsertString("str", "newVal")
},
},
{
name: "attributes bool",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("bool"),
},
},
orig: true,
new: false,
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().UpsertBool("bool", false)
},
},
{
name: "attributes int",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("int"),
},
},
orig: int64(10),
new: int64(20),
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().UpsertInt("int", 20)
},
},
{
name: "attributes float",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("double"),
},
},
orig: float64(1.2),
new: float64(2.4),
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().UpsertDouble("double", 2.4)
},
},
{
name: "attributes bytes",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("bytes"),
},
},
orig: []byte{1, 3, 2},
new: []byte{2, 3, 4},
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().UpsertMBytes("bytes", []byte{2, 3, 4})
},
},
{
name: "attributes array string",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_str"),
},
},
orig: func() pcommon.Slice {
val, _ := refNumberDataPoint.Attributes().Get("arr_str")
return val.SliceVal()
}(),
new: []string{"new"},
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().Upsert("arr_str", newArrStr)
},
},
{
name: "attributes array bool",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_bool"),
},
},
orig: func() pcommon.Slice {
val, _ := refNumberDataPoint.Attributes().Get("arr_bool")
return val.SliceVal()
}(),
new: []bool{false},
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().Upsert("arr_bool", newArrBool)
},
},
{
name: "attributes array int",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_int"),
},
},
orig: func() pcommon.Slice {
val, _ := refNumberDataPoint.Attributes().Get("arr_int")
return val.SliceVal()
}(),
new: []int64{20},
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().Upsert("arr_int", newArrInt)
},
},
{
name: "attributes array float",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_float"),
},
},
orig: func() pcommon.Slice {
val, _ := refNumberDataPoint.Attributes().Get("arr_float")
return val.SliceVal()
}(),
new: []float64{2.0},
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().Upsert("arr_float", newArrFloat)
},
},
{
name: "attributes array bytes",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_bytes"),
},
},
orig: func() pcommon.Slice {
val, _ := refNumberDataPoint.Attributes().Get("arr_bytes")
return val.SliceVal()
}(),
new: [][]byte{{9, 6, 4}},
modified: func(datapoint pmetric.NumberDataPoint) {
datapoint.Attributes().Upsert("arr_bytes", newArrBytes)
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
accessor, err := newPathGetSetter(tt.path)
assert.NoError(t, err)
numberDataPoint := createNumberDataPointTelemetry(tt.valueType)
ctx := metricTransformContext{
dataPoint: numberDataPoint,
metric: pmetric.NewMetric(),
il: pcommon.NewInstrumentationScope(),
resource: pcommon.NewResource(),
}
got := accessor.Get(ctx)
assert.Equal(t, tt.orig, got)
accessor.Set(ctx, tt.new)
exNumberDataPoint := createNumberDataPointTelemetry(tt.valueType)
tt.modified(exNumberDataPoint)
assert.Equal(t, exNumberDataPoint, numberDataPoint)
})
}
}
func createNumberDataPointTelemetry(valueType pmetric.NumberDataPointValueType) pmetric.NumberDataPoint {
numberDataPoint := pmetric.NewNumberDataPoint()
numberDataPoint.SetFlags(pmetric.NewMetricDataPointFlags())
numberDataPoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(100)))
numberDataPoint.SetTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(500)))
if valueType == pmetric.NumberDataPointValueTypeDouble {
numberDataPoint.SetDoubleVal(1.1)
} else {
numberDataPoint.SetIntVal(1)
}
createAttributeTelemetry(numberDataPoint.Attributes())
numberDataPoint.Exemplars().AppendEmpty().SetIntVal(0)
return numberDataPoint
}
func Test_newPathGetSetter_HistogramDataPoint(t *testing.T) {
refHistogramDataPoint := createHistogramDataPointTelemetry()
newExemplars, newAttrs, newArrStr, newArrBool, newArrInt, newArrFloat, newArrBytes := createNewTelemetry()
tests := []struct {
name string
path []common.Field
orig interface{}
new interface{}
modified func(pmetric.HistogramDataPoint)
}{
{
name: "start_time_unix_nano",
path: []common.Field{
{
Name: "start_time_unix_nano",
},
},
orig: int64(100_000_000),
new: int64(200_000_000),
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(200)))
},
},
{
name: "time_unix_nano",
path: []common.Field{
{
Name: "time_unix_nano",
},
},
orig: int64(500_000_000),
new: int64(200_000_000),
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(200)))
},
},
{
name: "flags",
path: []common.Field{
{
Name: "flags",
},
},
orig: pmetric.NewMetricDataPointFlags(),
new: pmetric.NewMetricDataPointFlags(pmetric.MetricDataPointFlagNoRecordedValue),
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.SetFlags(pmetric.NewMetricDataPointFlags(pmetric.MetricDataPointFlagNoRecordedValue))
},
},
{
name: "count",
path: []common.Field{
{
Name: "count",
},
},
orig: uint64(2),
new: uint64(3),
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.SetCount(3)
},
},
{
name: "sum",
path: []common.Field{
{
Name: "sum",
},
},
orig: 10.1,
new: 10.2,
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.SetSum(10.2)
},
},
{
name: "bucket_counts",
path: []common.Field{
{
Name: "bucket_counts",
},
},
orig: []uint64{1, 1},
new: []uint64{1, 2},
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.SetMBucketCounts([]uint64{1, 2})
},
},
{
name: "explicit_bounds",
path: []common.Field{
{
Name: "explicit_bounds",
},
},
orig: []float64{1, 2},
new: []float64{1, 2, 3},
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.SetMExplicitBounds([]float64{1, 2, 3})
},
},
{
name: "exemplars",
path: []common.Field{
{
Name: "exemplars",
},
},
orig: refHistogramDataPoint.Exemplars(),
new: newExemplars,
modified: func(datapoint pmetric.HistogramDataPoint) {
newExemplars.CopyTo(datapoint.Exemplars())
},
},
{
name: "attributes",
path: []common.Field{
{
Name: "attributes",
},
},
orig: refHistogramDataPoint.Attributes(),
new: newAttrs,
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().Clear()
newAttrs.CopyTo(datapoint.Attributes())
},
},
{
name: "attributes string",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("str"),
},
},
orig: "val",
new: "newVal",
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().UpsertString("str", "newVal")
},
},
{
name: "attributes bool",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("bool"),
},
},
orig: true,
new: false,
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().UpsertBool("bool", false)
},
},
{
name: "attributes int",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("int"),
},
},
orig: int64(10),
new: int64(20),
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().UpsertInt("int", 20)
},
},
{
name: "attributes float",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("double"),
},
},
orig: float64(1.2),
new: float64(2.4),
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().UpsertDouble("double", 2.4)
},
},
{
name: "attributes bytes",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("bytes"),
},
},
orig: []byte{1, 3, 2},
new: []byte{2, 3, 4},
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().UpsertMBytes("bytes", []byte{2, 3, 4})
},
},
{
name: "attributes array string",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_str"),
},
},
orig: func() pcommon.Slice {
val, _ := refHistogramDataPoint.Attributes().Get("arr_str")
return val.SliceVal()
}(),
new: []string{"new"},
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().Upsert("arr_str", newArrStr)
},
},
{
name: "attributes array bool",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_bool"),
},
},
orig: func() pcommon.Slice {
val, _ := refHistogramDataPoint.Attributes().Get("arr_bool")
return val.SliceVal()
}(),
new: []bool{false},
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().Upsert("arr_bool", newArrBool)
},
},
{
name: "attributes array int",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_int"),
},
},
orig: func() pcommon.Slice {
val, _ := refHistogramDataPoint.Attributes().Get("arr_int")
return val.SliceVal()
}(),
new: []int64{20},
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().Upsert("arr_int", newArrInt)
},
},
{
name: "attributes array float",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_float"),
},
},
orig: func() pcommon.Slice {
val, _ := refHistogramDataPoint.Attributes().Get("arr_float")
return val.SliceVal()
}(),
new: []float64{2.0},
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().Upsert("arr_float", newArrFloat)
},
},
{
name: "attributes array bytes",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_bytes"),
},
},
orig: func() pcommon.Slice {
val, _ := refHistogramDataPoint.Attributes().Get("arr_bytes")
return val.SliceVal()
}(),
new: [][]byte{{9, 6, 4}},
modified: func(datapoint pmetric.HistogramDataPoint) {
datapoint.Attributes().Upsert("arr_bytes", newArrBytes)
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
accessor, err := newPathGetSetter(tt.path)
assert.NoError(t, err)
numberDataPoint := createHistogramDataPointTelemetry()
ctx := metricTransformContext{
dataPoint: numberDataPoint,
metric: pmetric.NewMetric(),
il: pcommon.NewInstrumentationScope(),
resource: pcommon.NewResource(),
}
got := accessor.Get(ctx)
assert.Equal(t, tt.orig, got)
accessor.Set(ctx, tt.new)
exNumberDataPoint := createHistogramDataPointTelemetry()
tt.modified(exNumberDataPoint)
assert.Equal(t, exNumberDataPoint, numberDataPoint)
})
}
}
func createHistogramDataPointTelemetry() pmetric.HistogramDataPoint {
histogramDataPoint := pmetric.NewHistogramDataPoint()
histogramDataPoint.SetFlags(pmetric.NewMetricDataPointFlags())
histogramDataPoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(100)))
histogramDataPoint.SetTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(500)))
histogramDataPoint.SetCount(2)
histogramDataPoint.SetSum(10.1)
histogramDataPoint.SetMBucketCounts([]uint64{1, 1})
histogramDataPoint.SetMExplicitBounds([]float64{1, 2})
createAttributeTelemetry(histogramDataPoint.Attributes())
histogramDataPoint.Exemplars().AppendEmpty().SetIntVal(0)
return histogramDataPoint
}
func Test_newPathGetSetter_ExpoHistogramDataPoint(t *testing.T) {
refExpoHistogramDataPoint := createExpoHistogramDataPointTelemetry()
newExemplars, newAttrs, newArrStr, newArrBool, newArrInt, newArrFloat, newArrBytes := createNewTelemetry()
newPositive := pmetric.NewBuckets()
newPositive.SetOffset(10)
newPositive.SetMBucketCounts([]uint64{4, 5})
newNegative := pmetric.NewBuckets()
newNegative.SetOffset(10)
newNegative.SetMBucketCounts([]uint64{4, 5})
tests := []struct {
name string
path []common.Field
orig interface{}
new interface{}
modified func(pmetric.ExponentialHistogramDataPoint)
}{
{
name: "start_time_unix_nano",
path: []common.Field{
{
Name: "start_time_unix_nano",
},
},
orig: int64(100_000_000),
new: int64(200_000_000),
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(200)))
},
},
{
name: "time_unix_nano",
path: []common.Field{
{
Name: "time_unix_nano",
},
},
orig: int64(500_000_000),
new: int64(200_000_000),
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(200)))
},
},
{
name: "flags",
path: []common.Field{
{
Name: "flags",
},
},
orig: pmetric.NewMetricDataPointFlags(),
new: pmetric.NewMetricDataPointFlags(pmetric.MetricDataPointFlagNoRecordedValue),
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.SetFlags(pmetric.NewMetricDataPointFlags(pmetric.MetricDataPointFlagNoRecordedValue))
},
},
{
name: "count",
path: []common.Field{
{
Name: "count",
},
},
orig: uint64(2),
new: uint64(3),
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.SetCount(3)
},
},
{
name: "sum",
path: []common.Field{
{
Name: "sum",
},
},
orig: 10.1,
new: 10.2,
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.SetSum(10.2)
},
},
{
name: "scale",
path: []common.Field{
{
Name: "scale",
},
},
orig: int32(1),
new: int32(2),
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.SetScale(2)
},
},
{
name: "zero_count",
path: []common.Field{
{
Name: "zero_count",
},
},
orig: uint64(1),
new: uint64(2),
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.SetZeroCount(2)
},
},
{
name: "positive",
path: []common.Field{
{
Name: "positive",
},
},
orig: refExpoHistogramDataPoint.Positive(),
new: newPositive,
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
newPositive.CopyTo(datapoint.Positive())
},
},
{
name: "positive offset",
path: []common.Field{
{
Name: "positive",
},
{
Name: "offset",
},
},
orig: int32(1),
new: int32(2),
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Positive().SetOffset(2)
},
},
{
name: "positive bucket_counts",
path: []common.Field{
{
Name: "positive",
},
{
Name: "bucket_counts",
},
},
orig: []uint64{1, 1},
new: []uint64{0, 1, 2},
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Positive().SetMBucketCounts([]uint64{0, 1, 2})
},
},
{
name: "negative",
path: []common.Field{
{
Name: "negative",
},
},
orig: refExpoHistogramDataPoint.Negative(),
new: newPositive,
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
newPositive.CopyTo(datapoint.Negative())
},
},
{
name: "negative offset",
path: []common.Field{
{
Name: "negative",
},
{
Name: "offset",
},
},
orig: int32(1),
new: int32(2),
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Negative().SetOffset(2)
},
},
{
name: "negative bucket_counts",
path: []common.Field{
{
Name: "negative",
},
{
Name: "bucket_counts",
},
},
orig: []uint64{1, 1},
new: []uint64{0, 1, 2},
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Negative().SetMBucketCounts([]uint64{0, 1, 2})
},
},
{
name: "exemplars",
path: []common.Field{
{
Name: "exemplars",
},
},
orig: refExpoHistogramDataPoint.Exemplars(),
new: newExemplars,
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
newExemplars.CopyTo(datapoint.Exemplars())
},
},
{
name: "attributes",
path: []common.Field{
{
Name: "attributes",
},
},
orig: refExpoHistogramDataPoint.Attributes(),
new: newAttrs,
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().Clear()
newAttrs.CopyTo(datapoint.Attributes())
},
},
{
name: "attributes string",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("str"),
},
},
orig: "val",
new: "newVal",
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().UpsertString("str", "newVal")
},
},
{
name: "attributes bool",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("bool"),
},
},
orig: true,
new: false,
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().UpsertBool("bool", false)
},
},
{
name: "attributes int",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("int"),
},
},
orig: int64(10),
new: int64(20),
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().UpsertInt("int", 20)
},
},
{
name: "attributes float",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("double"),
},
},
orig: 1.2,
new: 2.4,
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().UpsertDouble("double", 2.4)
},
},
{
name: "attributes bytes",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("bytes"),
},
},
orig: []byte{1, 3, 2},
new: []byte{2, 3, 4},
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().UpsertMBytes("bytes", []byte{2, 3, 4})
},
},
{
name: "attributes array string",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_str"),
},
},
orig: func() pcommon.Slice {
val, _ := refExpoHistogramDataPoint.Attributes().Get("arr_str")
return val.SliceVal()
}(),
new: []string{"new"},
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().Upsert("arr_str", newArrStr)
},
},
{
name: "attributes array bool",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_bool"),
},
},
orig: func() pcommon.Slice {
val, _ := refExpoHistogramDataPoint.Attributes().Get("arr_bool")
return val.SliceVal()
}(),
new: []bool{false},
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().Upsert("arr_bool", newArrBool)
},
},
{
name: "attributes array int",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_int"),
},
},
orig: func() pcommon.Slice {
val, _ := refExpoHistogramDataPoint.Attributes().Get("arr_int")
return val.SliceVal()
}(),
new: []int64{20},
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().Upsert("arr_int", newArrInt)
},
},
{
name: "attributes array float",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_float"),
},
},
orig: func() pcommon.Slice {
val, _ := refExpoHistogramDataPoint.Attributes().Get("arr_float")
return val.SliceVal()
}(),
new: []float64{2.0},
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().Upsert("arr_float", newArrFloat)
},
},
{
name: "attributes array bytes",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_bytes"),
},
},
orig: func() pcommon.Slice {
val, _ := refExpoHistogramDataPoint.Attributes().Get("arr_bytes")
return val.SliceVal()
}(),
new: [][]byte{{9, 6, 4}},
modified: func(datapoint pmetric.ExponentialHistogramDataPoint) {
datapoint.Attributes().Upsert("arr_bytes", newArrBytes)
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
accessor, err := newPathGetSetter(tt.path)
assert.NoError(t, err)
numberDataPoint := createExpoHistogramDataPointTelemetry()
ctx := metricTransformContext{
dataPoint: numberDataPoint,
metric: pmetric.NewMetric(),
il: pcommon.NewInstrumentationScope(),
resource: pcommon.NewResource(),
}
got := accessor.Get(ctx)
assert.Equal(t, tt.orig, got)
accessor.Set(ctx, tt.new)
exNumberDataPoint := createExpoHistogramDataPointTelemetry()
tt.modified(exNumberDataPoint)
assert.Equal(t, exNumberDataPoint, numberDataPoint)
})
}
}
func createExpoHistogramDataPointTelemetry() pmetric.ExponentialHistogramDataPoint {
expoHistogramDataPoint := pmetric.NewExponentialHistogramDataPoint()
expoHistogramDataPoint.SetFlags(pmetric.NewMetricDataPointFlags())
expoHistogramDataPoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(100)))
expoHistogramDataPoint.SetTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(500)))
expoHistogramDataPoint.SetCount(2)
expoHistogramDataPoint.SetSum(10.1)
expoHistogramDataPoint.SetScale(1)
expoHistogramDataPoint.SetZeroCount(1)
expoHistogramDataPoint.Positive().SetMBucketCounts([]uint64{1, 1})
expoHistogramDataPoint.Positive().SetOffset(1)
expoHistogramDataPoint.Negative().SetMBucketCounts([]uint64{1, 1})
expoHistogramDataPoint.Negative().SetOffset(1)
createAttributeTelemetry(expoHistogramDataPoint.Attributes())
expoHistogramDataPoint.Exemplars().AppendEmpty().SetIntVal(0)
return expoHistogramDataPoint
}
func Test_newPathGetSetter_SummaryDataPoint(t *testing.T) {
refExpoHistogramDataPoint := createSummaryDataPointTelemetry()
_, newAttrs, newArrStr, newArrBool, newArrInt, newArrFloat, newArrBytes := createNewTelemetry()
newQuartileValues := pmetric.NewValueAtQuantileSlice()
newQuartileValues.AppendEmpty().SetValue(100)
tests := []struct {
name string
path []common.Field
orig interface{}
new interface{}
modified func(pmetric.SummaryDataPoint)
}{
{
name: "start_time_unix_nano",
path: []common.Field{
{
Name: "start_time_unix_nano",
},
},
orig: int64(100_000_000),
new: int64(200_000_000),
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(200)))
},
},
{
name: "time_unix_nano",
path: []common.Field{
{
Name: "time_unix_nano",
},
},
orig: int64(500_000_000),
new: int64(200_000_000),
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(200)))
},
},
{
name: "flags",
path: []common.Field{
{
Name: "flags",
},
},
orig: pmetric.NewMetricDataPointFlags(),
new: pmetric.NewMetricDataPointFlags(pmetric.MetricDataPointFlagNoRecordedValue),
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.SetFlags(pmetric.NewMetricDataPointFlags(pmetric.MetricDataPointFlagNoRecordedValue))
},
},
{
name: "count",
path: []common.Field{
{
Name: "count",
},
},
orig: uint64(2),
new: uint64(3),
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.SetCount(3)
},
},
{
name: "sum",
path: []common.Field{
{
Name: "sum",
},
},
orig: 10.1,
new: 10.2,
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.SetSum(10.2)
},
},
{
name: "quantile_values",
path: []common.Field{
{
Name: "quantile_values",
},
},
orig: refExpoHistogramDataPoint.QuantileValues(),
new: newQuartileValues,
modified: func(datapoint pmetric.SummaryDataPoint) {
newQuartileValues.CopyTo(datapoint.QuantileValues())
},
},
{
name: "attributes",
path: []common.Field{
{
Name: "attributes",
},
},
orig: refExpoHistogramDataPoint.Attributes(),
new: newAttrs,
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().Clear()
newAttrs.CopyTo(datapoint.Attributes())
},
},
{
name: "attributes string",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("str"),
},
},
orig: "val",
new: "newVal",
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().UpsertString("str", "newVal")
},
},
{
name: "attributes bool",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("bool"),
},
},
orig: true,
new: false,
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().UpsertBool("bool", false)
},
},
{
name: "attributes int",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("int"),
},
},
orig: int64(10),
new: int64(20),
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().UpsertInt("int", 20)
},
},
{
name: "attributes float",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("double"),
},
},
orig: 1.2,
new: 2.4,
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().UpsertDouble("double", 2.4)
},
},
{
name: "attributes bytes",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("bytes"),
},
},
orig: []byte{1, 3, 2},
new: []byte{2, 3, 4},
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().UpsertMBytes("bytes", []byte{2, 3, 4})
},
},
{
name: "attributes array string",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_str"),
},
},
orig: func() pcommon.Slice {
val, _ := refExpoHistogramDataPoint.Attributes().Get("arr_str")
return val.SliceVal()
}(),
new: []string{"new"},
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().Upsert("arr_str", newArrStr)
},
},
{
name: "attributes array bool",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_bool"),
},
},
orig: func() pcommon.Slice {
val, _ := refExpoHistogramDataPoint.Attributes().Get("arr_bool")
return val.SliceVal()
}(),
new: []bool{false},
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().Upsert("arr_bool", newArrBool)
},
},
{
name: "attributes array int",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_int"),
},
},
orig: func() pcommon.Slice {
val, _ := refExpoHistogramDataPoint.Attributes().Get("arr_int")
return val.SliceVal()
}(),
new: []int64{20},
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().Upsert("arr_int", newArrInt)
},
},
{
name: "attributes array float",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_float"),
},
},
orig: func() pcommon.Slice {
val, _ := refExpoHistogramDataPoint.Attributes().Get("arr_float")
return val.SliceVal()
}(),
new: []float64{2.0},
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().Upsert("arr_float", newArrFloat)
},
},
{
name: "attributes array bytes",
path: []common.Field{
{
Name: "attributes",
MapKey: strp("arr_bytes"),
},
},
orig: func() pcommon.Slice {
val, _ := refExpoHistogramDataPoint.Attributes().Get("arr_bytes")
return val.SliceVal()
}(),
new: [][]byte{{9, 6, 4}},
modified: func(datapoint pmetric.SummaryDataPoint) {
datapoint.Attributes().Upsert("arr_bytes", newArrBytes)
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
accessor, err := newPathGetSetter(tt.path)
assert.NoError(t, err)
numberDataPoint := createSummaryDataPointTelemetry()
ctx := metricTransformContext{
dataPoint: numberDataPoint,
metric: pmetric.NewMetric(),
il: pcommon.NewInstrumentationScope(),
resource: pcommon.NewResource(),
}
got := accessor.Get(ctx)
assert.Equal(t, tt.orig, got)
accessor.Set(ctx, tt.new)
exNumberDataPoint := createSummaryDataPointTelemetry()
tt.modified(exNumberDataPoint)
assert.Equal(t, exNumberDataPoint, numberDataPoint)
})
}
}
func createSummaryDataPointTelemetry() pmetric.SummaryDataPoint {
summaryDataPoint := pmetric.NewSummaryDataPoint()
summaryDataPoint.SetFlags(pmetric.NewMetricDataPointFlags())
summaryDataPoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(100)))
summaryDataPoint.SetTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(500)))
summaryDataPoint.SetCount(2)
summaryDataPoint.SetSum(10.1)
summaryDataPoint.QuantileValues().AppendEmpty().SetValue(1)
createAttributeTelemetry(summaryDataPoint.Attributes())
return summaryDataPoint
}
func createAttributeTelemetry(attributes pcommon.Map) {
attributes.UpsertString("str", "val")
attributes.UpsertBool("bool", true)
attributes.UpsertInt("int", 10)
attributes.UpsertDouble("double", 1.2)
attributes.UpsertMBytes("bytes", []byte{1, 3, 2})
arrStr := pcommon.NewValueSlice()
arrStr.SliceVal().AppendEmpty().SetStringVal("one")
arrStr.SliceVal().AppendEmpty().SetStringVal("two")
attributes.Upsert("arr_str", arrStr)
arrBool := pcommon.NewValueSlice()
arrBool.SliceVal().AppendEmpty().SetBoolVal(true)
arrBool.SliceVal().AppendEmpty().SetBoolVal(false)
attributes.Upsert("arr_bool", arrBool)
arrInt := pcommon.NewValueSlice()
arrInt.SliceVal().AppendEmpty().SetIntVal(2)
arrInt.SliceVal().AppendEmpty().SetIntVal(3)
attributes.Upsert("arr_int", arrInt)
arrFloat := pcommon.NewValueSlice()
arrFloat.SliceVal().AppendEmpty().SetDoubleVal(1.0)
arrFloat.SliceVal().AppendEmpty().SetDoubleVal(2.0)
attributes.Upsert("arr_float", arrFloat)
arrBytes := pcommon.NewValueSlice()
arrBytes.SliceVal().AppendEmpty().SetMBytesVal([]byte{1, 2, 3})
arrBytes.SliceVal().AppendEmpty().SetMBytesVal([]byte{2, 3, 4})
attributes.Upsert("arr_bytes", arrBytes)
}
func Test_newPathGetSetter_Metric(t *testing.T) {
refMetric := createMetricTelemetry()
newMetric := pmetric.NewMetric()
newMetric.SetName("new name")
tests := []struct {
name string
path []common.Field
orig interface{}
new interface{}
modified func(metric pmetric.Metric)
}{
{
name: "metric",
path: []common.Field{
{
Name: "metric",
},
},
orig: refMetric,
new: newMetric,
modified: func(metric pmetric.Metric) {
newMetric.CopyTo(metric)
},
},
{
name: "metric name",
path: []common.Field{
{
Name: "metric",
},
{
Name: "name",
},
},
orig: "name",
new: "new name",
modified: func(metric pmetric.Metric) {
metric.SetName("new name")
},
},
{
name: "metric description",
path: []common.Field{
{
Name: "metric",
},
{
Name: "description",
},
},
orig: "description",
new: "new description",
modified: func(metric pmetric.Metric) {
metric.SetDescription("new description")
},
},
{
name: "metric unit",
path: []common.Field{
{
Name: "metric",
},
{
Name: "unit",
},
},
orig: "unit",
new: "new unit",
modified: func(metric pmetric.Metric) {
metric.SetUnit("new unit")
},
},
{
name: "metric type",
path: []common.Field{
{
Name: "metric",
},
{
Name: "type",
},
},
orig: "Sum",
new: "Sum",
modified: func(metric pmetric.Metric) {
},
},
{
name: "metric aggregation_temporality",
path: []common.Field{
{
Name: "metric",
},
{
Name: "aggregation_temporality",
}, | modified: func(metric pmetric.Metric) {
metric.Sum().SetAggregationTemporality(pmetric.MetricAggregationTemporalityDelta)
},
},
{
name: "metric is_monotonic",
path: []common.Field{
{
Name: "metric",
},
{
Name: "is_monotonic",
},
},
orig: "true",
new: "false",
modified: func(metric pmetric.Metric) {
metric.Sum().SetIsMonotonic(false)
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
accessor, err := newPathGetSetter(tt.path)
assert.NoError(t, err)
metric := createMetricTelemetry()
ctx := metricTransformContext{
dataPoint: pmetric.NewNumberDataPoint(),
metric: metric,
il: pcommon.NewInstrumentationScope(),
resource: pcommon.NewResource(),
}
got := accessor.Get(ctx)
assert.Equal(t, tt.orig, got)
accessor.Set(ctx, tt.new)
exMetric := createMetricTelemetry()
tt.modified(exMetric)
assert.Equal(t, exMetric, metric)
})
}
}
func createMetricTelemetry() pmetric.Metric {
metric := pmetric.NewMetric()
metric.SetName("name")
metric.SetDescription("description")
metric.SetUnit("unit")
metric.SetDataType(pmetric.MetricDataTypeSum)
metric.Sum().SetAggregationTemporality(pmetric.MetricAggregationTemporalityCumulative)
metric.Sum().SetIsMonotonic(true)
return metric
}
func createNewTelemetry() (pmetric.ExemplarSlice, pcommon.Map, pcommon.Value, pcommon.Value, pcommon.Value, pcommon.Value, pcommon.Value) {
newExemplars := pmetric.NewExemplarSlice()
newExemplars.AppendEmpty().SetIntVal(4)
newAttrs := pcommon.NewMap()
newAttrs.UpsertString("hello", "world")
newArrStr := pcommon.NewValueSlice()
newArrStr.SliceVal().AppendEmpty().SetStringVal("new")
newArrBool := pcommon.NewValueSlice()
newArrBool.SliceVal().AppendEmpty().SetBoolVal(false)
newArrInt := pcommon.NewValueSlice()
newArrInt.SliceVal().AppendEmpty().SetIntVal(20)
newArrFloat := pcommon.NewValueSlice()
newArrFloat.SliceVal().AppendEmpty().SetDoubleVal(2.0)
newArrBytes := pcommon.NewValueSlice()
newArrBytes.SliceVal().AppendEmpty().SetMBytesVal([]byte{9, 6, 4})
return newExemplars, newAttrs, newArrStr, newArrBool, newArrInt, newArrFloat, newArrBytes
}
func strp(s string) *string {
return &s
}
func intp(i int64) *int64 {
return &i
} | },
orig: int64(2),
new: int64(1), |
main.rs | // this is Official Sample
// import commonly used items from the prelude:
use rand::prelude::*;
fn main() | {
// We can use random() immediately. It can produce values of many common types:
let x: u8 = random();
println!("{}", x);
if random() {
// generates a boolean
println!("Heads!");
}
// If we want to be a bit more explicit (and a little more efficient) we can
// make a handle to the thread-local generator:
let mut rng = thread_rng();
if rng.gen() {
// random bool
let x: f64 = rng.gen(); // random number in range [0, 1)
let y = rng.gen_range(-10.0..10.0);
println!("x is: {}", x);
println!("y is: {}", y);
}
println!("Die roll: {}", rng.gen_range(1..=6));
println!("Number from 0 to 9: {}", rng.gen_range(0..10));
// Sometimes it's useful to use distributions directly:
let distr = rand::distributions::Uniform::new_inclusive(1, 100);
let mut nums = [0i32; 3];
for x in &mut nums {
*x = rng.sample(distr);
}
println!("Some numbers: {:?}", nums);
// We can also interact with iterators and slices:
let arrows_iter = "➡⬈⬆⬉⬅⬋⬇⬊".chars();
println!(
"Lets go in this direction: {}",
arrows_iter.choose(&mut rng).unwrap()
);
let mut nums = [1, 2, 3, 4, 5];
nums.shuffle(&mut rng);
println!("I shuffled my {:?}", nums);
}
|
|
storage.go | package postgres
import (
"context"
"github.com/go-pg/pg/v10"
"github.com/spf13/viper"
)
func InitPostgres() *pg.DB {
db := pg.Connect(&pg.Options{
Addr: viper.GetString("db.host") + viper.GetString("db.port"),
User: viper.GetString("db.user"),
Password: viper.GetString("db.pass"),
Database: viper.GetString("db.name"),
})
ctx := context.Background()
if err := db.Ping(ctx); err != nil |
return db
}
| {
panic(err)
} |
write_ttl.py | #!/usr/bin/env python3
"""
This program contains generic functions to build a Turtle (Terse RDF Triple Language) document.
Authors:
- Arno Klein, 2017-2020 ([email protected]) http://binarybottle.com
- Jon Clucas, 2017–2018 ([email protected])
Copyright 2020, Child Mind Institute (http://childmind.org), Apache v2.0 License
"""
import os
import sys
top_dir = os.path.abspath(os.path.join(
(__file__),
os.pardir,
os.pardir
))
if top_dir not in sys.path:
sys.path.append(top_dir)
import numpy as np
def language_string(s, lang="en"):
"""
Function to encode a literal as being in a specific language.
Parameters
----------
s : string
lang : string
ISO character code, default="en"
Returns
-------
s : string
triple quoted Turtle literal with language encoding
Example
-------
>>> print(language_string("Canada goose"))
\"""Canada goose\"""@en
"""
return(
"\"\"\"{0}\"\"\"@{1}".format(
return_string(
s,
[
'"'
],
[
"'"
]
),
lang
)
)
def return_string(input_string, replace=[], replace_with=[]):
"""
Return a stripped string with optional character replacements.
Parameters
----------
input_string : string
arbitrary string
replace : list of strings
strings to substitute
replace_with : list of strings
strings with which to substitute 'replace' strings
Returns
-------
output_string : string
stripped input_string
"""
if input_string:
if not isinstance(input_string, str):
input_string = str(input_string)
output_string = input_string.replace(
"\n",
" "
).replace(
"\"",
"\\\""
).strip()
if replace:
if len(replace) == len(replace_with):
for i, s in enumerate(replace):
output_string = output_string.replace(s, replace_with[i])
return output_string
else:
raise Exception("replace and replace_with should be the same length.")
else:
return output_string
else:
return ""
def create_label(input_string):
"""
Clean up a string and create a corresponding (shortened) label.
Parameters
----------
input_string : string
arbitrary string
Returns
-------
output_string : string
stripped input_string
label_string : string
alphanumeric characters of input_string
"""
from mhdb.spreadsheet_io import return_string
from mhdb.spreadsheet_io import convert_string_to_label
if input_string:
if isinstance(input_string, str):
output_string = return_string(input_string,
replace=['"', '\n'],
replace_with=['', ''])
if output_string:
label_string = convert_string_to_label(output_string)
return output_string, label_string
else:
return '', ''
else:
raise Exception('input_string is not a string!')
else:
raise Exception('input_string is None!')
def convert_string_to_label(input_string, label_type='delimited'):
"""
Remove all non-alphanumeric characters from a string.
Parameters
----------
input_string : string
input string
label_type: string
'PascalCase', 'camelCase', or 'delimited'
('delimited' uses '_' delimiters and keeps hyphens)
Returns
-------
output_string : string
output string
"""
def toPascal(s):
"""
Usage: toPascal("WRITE this in pascalcase")
'WriteThisInPascalCase'
"""
return ''.join(x for x in s.title() if not x.isspace())
def toCamel(s):
"""
Usage: toCamel("WRITE this in camelcase")
'writeThisInCamelcase'
(from: https://stackoverflow.com/questions/8347048/
how-to-convert-string-to-title-case-in-python)
"""
ret = s.split(' ')
return ret[0].lower() + \
''.join(x.title() for x in ret[1:] if not x.isspace())
def toDelimit(s):
"""
Usage: toDelimit("WRITE this-in delimited")
'WRITE_this-in_delimited'
"""
while " " in s:
s = s.replace(" ", "_")
while "__" in s:
s = s.replace("__", "_")
s = s.replace("_-_", "-")
while "--" in s:
s = s.replace("--", "-")
return s
# input_string = return_string(input_string,
# replace=['"', '\n'],
# replace_with=['', ''])
if input_string:
if label_type == 'PascalCase':
output_string = toPascal(input_string)
elif label_type == 'camelCase':
output_string = toCamel(input_string)
elif label_type == 'delimited':
output_string = toDelimit(input_string)
else:
Exception('label_type input is incorrect')
keep_chars = ('-', '_')
output_string = "".join(c for c in str(output_string) if c.isalnum()
or c in keep_chars).rstrip()
#output_string = ''.join(x for x in output_string if not x.isspace())
return output_string
else:
raise Exception('"{0}" is not a string!'.format(input_string))
def check_iri(iri, label_type='delimited'):
"""
Function to format IRIs by type, such as <iri> or prefix:iri
Parameters
---------
iri: string
label_type: string
'PascalCase', 'camelCase', or 'delimited'
('delimited' uses '_' delimiters and keeps hyphens)
Removed:
prefixes: set of 2-or-3-tuples
prefixes={("mhdb", "mhdb-states", "mhdb-disorders", "mhdb-resources",
"mhdb-assessments", "mhdb-measures")}
Returns
-------
iri: string
"""
#prefix_strings = {"","_"} if not prefixes else {
# "",
# "_",
# *[prefix[0] for prefix in prefixes]
#}
iri = str(iri).strip()
if ":" in iri and not [x for x in iri if x.isspace()]:
if iri.endswith(":"):
return check_iri(iri[:-1], label_type) #, prefixes)
elif ":/" in iri and \
not iri.startswith('<') and not iri.endswith('>'):
return "<{0}>".format(convert_string_to_label(iri, label_type))
# elif iri.split(":")[0] in prefix_strings:
# return iri
else:
return iri
else:
return ":" + convert_string_to_label(iri, label_type)
def turtle_from_dict(ttl_dict):
"""
Function to convert a dictionary to a Terse Triple Language string
Parameters
----------
ttl_dict: dictionary
key: string
RDF subject
value: dictionary
key: string
RDF predicate
value: {string}
set of RDF objects
Returns
-------
ttl_string: str
ttl
Example
-------
>>> turtle_from_dict({
... "duck": {
... "continues": {
... "sitting"
... }
... },
... "goose": {
... "begins": {
... "chasing"
... }
... }
... })
'duck continues sitting .\\n\\ngoose begins chasing .'
"""
x = [
":None",
":nan",
"nan",
np.nan,
None
]
return(
"\n\n".join([ | predicate,
object
) for predicate in ttl_dict[
subject
] for object in ttl_dict[
subject
][
predicate
]
])
) for subject in ttl_dict
])
)
def write_about_statement(subject, predicate, object, predicates):
"""
Function to write one or more rdf statements in terse triple format.
Parameters
----------
subject: string
subject of this statement
predicate: string
predicate of this statement
object: string
object of this statement
predicates: iterable of 2-tuples
predicate: string
nth property
object: string
nth object
Returns
-------
ttl_string: string
Turtle string
Example
-------
>>> statement = {"duck": {"continues": {"sitting"}}}
>>> predicates = {
... ("source", '"Duck Duck Goose"'),
... ("statementType", "role")
... }
>>> for subject in statement:
... for predicate in statement[subject]:
... for object in statement[subject][predicate]:
... print(len(write_about_statement(
... subject, predicate, object, predicates
... )))
168
"""
return(
write_ttl(
"_:{0}".format(create_label("_".join([
subject,
predicate,
object
]))),
[
("rdf:type", "rdf:Statement"),
("rdf:subject", subject),
("rdf:predicate", predicate),
("rdf:object", object),
*predicates
]
)
)
def write_header(base_uri, base_prefix, version, label, comment, prefixes):
"""
Print out the beginning of an RDF text file.
Parameters
----------
base_uri : string
base URI
base_prefix : string
base prefix
version : string
version
label : string
label
comment : string
comment
prefixes : list
list of 2-or-3-tuples of TTL prefix strings and prefix IRIs
each tuple is
[0] a prefix string
[1] an iri string
[2] an optional import URL
eg, ("owl", "http://www.w3.org/2002/07/owl#")
REMOVED:
imports : Boolean, optional, default=False
import external ontologies?
Returns
-------
header : string
owl header
"""
header = write_header_prefixes(base_uri, base_prefix, prefixes)
header = """{4}<{0}> a owl:Ontology ;
owl:versionIRI <{0}/{1}> ;
owl:versionInfo "{1}"^^rdfs:Literal ;
rdfs:label "{2}"^^rdfs:Literal ;
rdfs:comment \"\"\"{3}\"\"\"@en .
""".format(base_uri, version, label, comment, header)
return header
def write_header_prefixes(base_uri, base_prefix, prefixes):
"""
Write turtle-formatted header prefix string for list of (prefix, iri) tuples.
Parameter
---------
base_uri : string
base URI
base_prefix : string
base prefix
prefixes: list of 2 or 3-tuples
each tuple is
[0] a prefix string
[1] an iri string
[2] an optional import URL
REMOVED:
imports : Boolean, optional, default=False
import external ontologies?
Returns
-------
header_prefix: string
"""
header_prefix = ""
for prefix in prefixes:
header_prefix="""{0}PREFIX {1}: <{2}> \n""".format(
header_prefix,
prefix[0],
prefix[1]
)
#header_prefix = """{0}\nBASE <{1}#> \n""".format(
# header_prefix, base_uri
#)
header_prefix = """{0}\nPREFIX : <{1}#> \n""".format(
header_prefix, base_uri
)
# if imports:
# header_prefix = """{0}\n<> owl:imports {1} .\n\n""".format(
# header_prefix,
# " ,\n\t".join(
# [check_iri(prefix[1])
# if ((len(prefix) < 3) or (isinstance(prefix[2], float))
# ) else check_iri(prefix[2]) for prefix in prefixes if (
# (prefix[0] not in [base_prefix]) and
# (prefix[1] not in [base_uri])
# )
# ]
# )
# )
return header_prefix
def write_ttl(subject, predicates, common_statements=None):
"""
Function to write one or more rdf statements in terse triple format.
Parameters
----------
subject: string
subject of all triples in these statements
predicates: iterable of 2-tuples
statements about subject
predicate: string
nth property
object: string
nth object
common_statements: iterable of 2-tuples, optional
statements about all previous statements
predicate: string
nth property
object: string
nth object
Returns
-------
ttl_string: string
Turtle string
"""
ttl_string = ""
if common_statements:
ttl_string = "\n\n".join([
write_about_statement(
subject,
predicate[0],
predicate[1],
common_statements
) for predicate in predicates
])
ttl_string = "{0}\n\n".format(ttl_string) if len(ttl_string) else ""
ttl_string = "".join([
ttl_string,
"{0} {1} .".format(
subject,
" ;\n\t".join([
" ".join([
predicate[0],
predicate[1]
]) for predicate in predicates
])
)
])
return(ttl_string) | "{0} {1} .".format(
subject,
" ;\n\t".join([
"{0} {1}".format( |
consumer.go | /*
Copyright IBM Corp. 2016 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package consumer
import (
"errors"
"fmt"
"io"
"sync"
"time"
"golang.org/x/net/context"
"google.golang.org/grpc"
"github.com/hyperledger/fabric/common/flogging"
"github.com/hyperledger/fabric/core/comm"
mspmgmt "github.com/hyperledger/fabric/msp/mgmt"
ehpb "github.com/hyperledger/fabric/protos/peer"
"github.com/hyperledger/fabric/protos/utils"
)
var consumerLogger = flogging.MustGetLogger("eventhub_consumer")
//EventsClient holds the stream and adapter for consumer to work with
type EventsClient struct {
sync.RWMutex
peerAddress string
regTimeout time.Duration
stream ehpb.Events_ChatClient
adapter EventAdapter
}
//NewEventsClient Returns a new grpc.ClientConn to the configured local PEER.
func NewEventsClient(peerAddress string, regTimeout time.Duration, adapter EventAdapter) (*EventsClient, error) {
var err error
if regTimeout < 100*time.Millisecond {
regTimeout = 100 * time.Millisecond
err = fmt.Errorf("regTimeout >= 0, setting to 100 msec")
} else if regTimeout > 60*time.Second {
regTimeout = 60 * time.Second
err = fmt.Errorf("regTimeout > 60, setting to 60 sec")
}
return &EventsClient{sync.RWMutex{}, peerAddress, regTimeout, nil, adapter}, err
}
//newEventsClientConnectionWithAddress Returns a new grpc.ClientConn to the configured local PEER.
func newEventsClientConnectionWithAddress(peerAddress string) (*grpc.ClientConn, error) {
if comm.TLSEnabled() {
return comm.NewClientConnectionWithAddress(peerAddress, true, true, comm.InitTLSForPeer())
}
return comm.NewClientConnectionWithAddress(peerAddress, true, false, nil)
}
func (ec *EventsClient) send(emsg *ehpb.Event) error {
ec.Lock()
defer ec.Unlock()
// obtain the default signing identity for this peer; it will be used to sign the event
localMsp := mspmgmt.GetLocalMSP()
if localMsp == nil |
signer, err := localMsp.GetDefaultSigningIdentity()
if err != nil {
return fmt.Errorf("could not obtain the default signing identity, err %s", err)
}
signedEvt, err := utils.GetSignedEvent(emsg, signer)
if err != nil {
return fmt.Errorf("could not sign outgoing event, err %s", err)
}
return ec.stream.Send(signedEvt)
}
// RegisterAsync - registers interest in a event and doesn't wait for a response
func (ec *EventsClient) RegisterAsync(ies []*ehpb.Interest) error {
emsg := &ehpb.Event{Event: &ehpb.Event_Register{Register: &ehpb.Register{Events: ies}}}
var err error
if err = ec.send(emsg); err != nil {
fmt.Printf("error on Register send %s\n", err)
}
return err
}
// register - registers interest in a event
func (ec *EventsClient) register(ies []*ehpb.Interest) error {
var err error
if err = ec.RegisterAsync(ies); err != nil {
return err
}
regChan := make(chan struct{})
go func() {
defer close(regChan)
in, inerr := ec.stream.Recv()
if inerr != nil {
err = inerr
return
}
switch in.Event.(type) {
case *ehpb.Event_Register:
case nil:
err = fmt.Errorf("invalid nil object for register")
default:
err = fmt.Errorf("invalid registration object")
}
}()
select {
case <-regChan:
case <-time.After(ec.regTimeout):
err = fmt.Errorf("timeout waiting for registration")
}
return err
}
// UnregisterAsync - Unregisters interest in a event and doesn't wait for a response
func (ec *EventsClient) UnregisterAsync(ies []*ehpb.Interest) error {
emsg := &ehpb.Event{Event: &ehpb.Event_Unregister{Unregister: &ehpb.Unregister{Events: ies}}}
var err error
if err = ec.send(emsg); err != nil {
err = fmt.Errorf("error on unregister send %s\n", err)
}
return err
}
// unregister - unregisters interest in a event
func (ec *EventsClient) unregister(ies []*ehpb.Interest) error {
var err error
if err = ec.UnregisterAsync(ies); err != nil {
return err
}
regChan := make(chan struct{})
go func() {
defer close(regChan)
in, inerr := ec.stream.Recv()
if inerr != nil {
err = inerr
return
}
switch in.Event.(type) {
case *ehpb.Event_Unregister:
case nil:
err = fmt.Errorf("invalid nil object for unregister")
default:
err = fmt.Errorf("invalid unregistration object")
}
}()
select {
case <-regChan:
case <-time.After(ec.regTimeout):
err = fmt.Errorf("timeout waiting for unregistration")
}
return err
}
// Recv recieves next event - use when client has not called Start
func (ec *EventsClient) Recv() (*ehpb.Event, error) {
in, err := ec.stream.Recv()
if err == io.EOF {
// read done.
if ec.adapter != nil {
ec.adapter.Disconnected(nil)
}
return nil, err
}
if err != nil {
if ec.adapter != nil {
ec.adapter.Disconnected(err)
}
return nil, err
}
return in, nil
}
func (ec *EventsClient) processEvents() error {
defer ec.stream.CloseSend()
for {
in, err := ec.stream.Recv()
if err == io.EOF {
// read done.
if ec.adapter != nil {
ec.adapter.Disconnected(nil)
}
return nil
}
if err != nil {
if ec.adapter != nil {
ec.adapter.Disconnected(err)
}
return err
}
if ec.adapter != nil {
cont, err := ec.adapter.Recv(in)
if !cont {
return err
}
}
}
}
//Start establishes connection with Event hub and registers interested events with it
func (ec *EventsClient) Start() error {
conn, err := newEventsClientConnectionWithAddress(ec.peerAddress)
if err != nil {
return fmt.Errorf("could not create client conn to %s:%s", ec.peerAddress, err)
}
ies, err := ec.adapter.GetInterestedEvents()
if err != nil {
return fmt.Errorf("error getting interested events:%s", err)
}
if len(ies) == 0 {
return fmt.Errorf("must supply interested events")
}
serverClient := ehpb.NewEventsClient(conn)
ec.stream, err = serverClient.Chat(context.Background())
if err != nil {
return fmt.Errorf("could not create client conn to %s:%s", ec.peerAddress, err)
}
if err = ec.register(ies); err != nil {
return err
}
go ec.processEvents()
return nil
}
//Stop terminates connection with event hub
func (ec *EventsClient) Stop() error {
if ec.stream == nil {
// in case the steam/chat server has not been established earlier, we assume that it's closed, successfully
return nil
}
return ec.stream.CloseSend()
}
| {
return errors.New("nil local MSP manager")
} |
icons.rs | use ansi_term::Style;
use fs::File;
use info::filetype::FileExtensions;
use output::file_name::FileStyle;
pub trait FileIcon {
fn icon_file(&self, file: &File) -> Option<char>;
}
pub enum Icons {
Audio,
Image,
Video,
}
impl Icons {
pub fn value(&self) -> char {
match *self {
Icons::Audio => '\u{f001}',
Icons::Image => '\u{f1c5}',
Icons::Video => '\u{f03d}',
}
}
}
pub fn painted_icon(file: &File, style: &FileStyle) -> String {
let file_icon = icon(&file).to_string();
let painted = style.exts
.colour_file(&file)
.map_or(file_icon.to_string(), |c| {
// Remove underline from icon
if c.is_underline {
match c.foreground {
Some(color) => Style::from(color).paint(file_icon).to_string(),
None => Style::default().paint(file_icon).to_string(),
}
} else {
c.paint(file_icon).to_string()
}
});
format!("{} ", painted)
}
fn icon(file: &File) -> char {
let extensions = Box::new(FileExtensions);
if file.is_directory() { '\u{f115}' }
else if let Some(icon) = extensions.icon_file(file) { icon }
else { | "ai" => '\u{e7b4}',
"android" => '\u{e70e}',
"apple" => '\u{f179}',
"avro" => '\u{e60b}',
"c" => '\u{e61e}',
"clj" => '\u{e768}',
"coffee" => '\u{f0f4}',
"conf" => '\u{e615}',
"cpp" => '\u{e61d}',
"css" => '\u{e749}',
"d" => '\u{e7af}',
"dart" => '\u{e798}',
"db" => '\u{f1c0}',
"diff" => '\u{f440}',
"doc" => '\u{f1c2}',
"ebook" => '\u{e28b}',
"env" => '\u{f462}',
"epub" => '\u{e28a}',
"erl" => '\u{e7b1}',
"font" => '\u{f031}',
"gform" => '\u{f298}',
"git" => '\u{f1d3}',
"go" => '\u{e626}',
"hs" => '\u{e777}',
"html" => '\u{f13b}',
"iml" => '\u{e7b5}',
"java" => '\u{e204}',
"js" => '\u{e74e}',
"json" => '\u{e60b}',
"jsx" => '\u{e7ba}',
"less" => '\u{e758}',
"log" => '\u{f18d}',
"lua" => '\u{e620}',
"md" => '\u{f48a}',
"mustache" => '\u{e60f}',
"npmignore" => '\u{e71e}',
"pdf" => '\u{f1c1}',
"php" => '\u{e73d}',
"pl" => '\u{e769}',
"ppt" => '\u{f1c4}',
"psd" => '\u{e7b8}',
"py" => '\u{e606}',
"r" => '\u{f25d}',
"rb" => '\u{e21e}',
"rdb" => '\u{e76d}',
"rs" => '\u{e7a8}',
"rss" => '\u{f09e}',
"rubydoc" => '\u{e73b}',
"sass" => '\u{e603}',
"scala" => '\u{e737}',
"shell" => '\u{f489}',
"sqlite3" => '\u{e7c4}',
"styl" => '\u{e600}',
"tex" => '\u{e600}',
"ts" => '\u{e628}',
"twig" => '\u{e61c}',
"txt" => '\u{f15c}',
"video" => '\u{f03d}',
"vim" => '\u{e62b}',
"xls" => '\u{f1c3}',
"xml" => '\u{e619}',
"yml" => '\u{f481}',
"zip" => '\u{f410}',
_ => '\u{f15b}'
}
} else {
'\u{f15b}'
}
}
} | if let Some(ext) = file.ext.as_ref() {
match ext.as_str() { |
10935.go | // UVa 10935 - Throwing cards away I
package main
import (
"fmt"
"os"
"strconv"
"strings"
)
func solve(n int) ([]string, string) {
var discarded []string
card := make([]string, n)
for i := range card {
card[i] = strconv.Itoa(i + 1)
}
for len(card) > 1 {
discarded = append(discarded, card[0])
card = append(card[2:], card[1]) | }
return discarded, card[0]
}
func main() {
in, _ := os.Open("10935.in")
defer in.Close()
out, _ := os.Create("10935.out")
defer out.Close()
var n int
for {
if fmt.Fscanf(in, "%d", &n); n == 0 {
break
}
discarded, remaining := solve(n)
fmt.Fprintf(out, "Discarded cards: %s\n", strings.Join(discarded, ", "))
fmt.Fprintf(out, "Remaining card: %s\n", remaining)
}
} | |
frmmain.py | from PyQt5 import QtCore, QtWidgets
from PyQt5.QtGui import *
from wired_module import *
# Generated By WiredQT for Python: by Rocky Nuarin, 2021 Phils
class Handler(QtWidgets.QWidget,usercontrol):
def __init__(self, *param):
| self.timer=QtCore.QTimer()
self.timer.timeout.connect(self.loop)
self.timer.start(10)
self.sch=Scheduler(5000)#500 ms
self.sch.Start()
def createwidget(self,prop,control,parent,event=[]):
createWidget(self,prop,control,parent,event)
def GTKForms(self):
self.createwidget("{'dbtype': 'sqllite', 'Enable': 'True', 'Height': '173', 'Text': '', 'Name': 'DataGrid1', 'Visible': 'True', 'Left': '79', 'Width': '318', 'Var': '', 'Tag': 'Activex', 'Events': [[['clicked', 'self,arg1']]], 'Help': '', 'Picture': '', 'Font': '', 'ParentsType': '', 'Top': '49', 'ForeColor': '(0,0,0,1)', 'BackColor': '(1,1,1,0.25)'}",'DataGrid','usercontrol',"[['clicked', 'self,arg1']]")
self.createwidget("{'Enable': 'True', 'Height': '100', 'Text': 'QPushButton1', 'Name': 'QPushButton1', 'Visible': 'True', 'Left': '118', 'Width': '123', 'Var': '', 'Tag': '', 'Events': '[clicked]', 'Help': '', 'Picture': '', 'Font': '', 'ParentsType': '', 'Top': '261', 'ForeColor': '(0,0,0,1)', 'BackColor': '(1,1,1,0.25)'}",'QPushButton','usercontrol',"[['clicked', 'self,arg1']]")
def Widget(self):
return self
def loop(self):
if self.form_load==False:
self.form_load=True
if self.sch.Event():#timer routine
#code here
if self.timeoutdestroy!=-1:
self.timeoutdestroy-=1
if self.timeoutdestroy==0:
pass#self.unload(None)
self.sch.Start()#restart scheduler
def connect(self,ev,evusr):
self.wiredevents.update({ev:evusr})
def activeXcreated(self,*args):
pass
def eventFilter(self, obj, event):
return super(Handler, self).eventFilter(obj, event)
def QPushButton1_clicked(self,arg1):
import sqlite3
self.db = sqlite3.connect('database.db')
self.DataGrid1.DataSource(self.db,"select * from supplier limit 10")
pass
def DataGrid1_clicked(self,arg1):
print(arg1.row(),arg1.column(),arg1.data())
pass
if __name__ == '__main__':
import sys
app = QtWidgets.QApplication(sys.argv)
w = Handler()
w.show()
sys.exit(app.exec_()) | super(Handler, self).__init__(None)
initUI(self,param,w=400,h=400,title="WiredQTv5.0",controlbox=True,startpos=(0,30),timeoutdestroy=-1)
self.GTKForms()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.