prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>maintenanceconfigurations.go<|end_file_name|><|fim▁begin|>package containerservice // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "context" "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "github.com/Azure/go-autorest/autorest/validation" "github.com/Azure/go-autorest/tracing" "net/http" ) // MaintenanceConfigurationsClient is the the Container Service Client. type MaintenanceConfigurationsClient struct { BaseClient } // NewMaintenanceConfigurationsClient creates an instance of the MaintenanceConfigurationsClient client. func NewMaintenanceConfigurationsClient(subscriptionID string) MaintenanceConfigurationsClient { return NewMaintenanceConfigurationsClientWithBaseURI(DefaultBaseURI, subscriptionID) } // NewMaintenanceConfigurationsClientWithBaseURI creates an instance of the MaintenanceConfigurationsClient client // using a custom endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign // clouds, Azure stack). func NewMaintenanceConfigurationsClientWithBaseURI(baseURI string, subscriptionID string) MaintenanceConfigurationsClient { return MaintenanceConfigurationsClient{NewWithBaseURI(baseURI, subscriptionID)} } // CreateOrUpdate sends the create or update request. // Parameters: // resourceGroupName - the name of the resource group. // resourceName - the name of the managed cluster resource. // configName - the name of the maintenance configuration. // parameters - the maintenance configuration to create or update. func (client MaintenanceConfigurationsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, resourceName string, configName string, parameters MaintenanceConfiguration) (result MaintenanceConfiguration, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/MaintenanceConfigurationsClient.CreateOrUpdate") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } if err := validation.Validate([]validation.Validation{ {TargetValue: resourceGroupName, Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}}}, {TargetValue: resourceName, Constraints: []validation.Constraint{{Target: "resourceName", Name: validation.MaxLength, Rule: 63, Chain: nil}, {Target: "resourceName", Name: validation.MinLength, Rule: 1, Chain: nil}, {Target: "resourceName", Name: validation.Pattern, Rule: `^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$`, Chain: nil}}}}); err != nil { return result, validation.NewError("containerservice.MaintenanceConfigurationsClient", "CreateOrUpdate", err.Error()) } req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, resourceName, configName, parameters) if err != nil { err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "CreateOrUpdate", nil, "Failure preparing request") return } resp, err := client.CreateOrUpdateSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "CreateOrUpdate", resp, "Failure sending request") return } result, err = client.CreateOrUpdateResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "CreateOrUpdate", resp, "Failure responding to request") return } return } // CreateOrUpdatePreparer prepares the CreateOrUpdate request. func (client MaintenanceConfigurationsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, resourceName string, configName string, parameters MaintenanceConfiguration) (*http.Request, error) { pathParameters := map[string]interface{}{ "configName": autorest.Encode("path", configName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "resourceName": autorest.Encode("path", resourceName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2021-10-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } parameters.SystemData = nil preparer := autorest.CreatePreparer( autorest.AsContentType("application/json; charset=utf-8"), autorest.AsPut(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}", pathParameters), autorest.WithJSON(parameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the // http.Response Body if it receives an error. func (client MaintenanceConfigurationsClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always // closes the http.Response Body. func (client MaintenanceConfigurationsClient) CreateOrUpdateResponder(resp *http.Response) (result MaintenanceConfiguration, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Delete sends the delete request. // Parameters: // resourceGroupName - the name of the resource group. // resourceName - the name of the managed cluster resource. // configName - the name of the maintenance configuration. func (client MaintenanceConfigurationsClient) Delete(ctx context.Context, resourceGroupName string, resourceName string, configName string) (result autorest.Response, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/MaintenanceConfigurationsClient.Delete") defer func() { sc := -1 if result.Response != nil { sc = result.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } if err := validation.Validate([]validation.Validation{ {TargetValue: resourceGroupName, Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}}}, {TargetValue: resourceName, Constraints: []validation.Constraint{{Target: "resourceName", Name: validation.MaxLength, Rule: 63, Chain: nil}, {Target: "resourceName", Name: validation.MinLength, Rule: 1, Chain: nil}, {Target: "resourceName", Name: validation.Pattern, Rule: `^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$`, Chain: nil}}}}); err != nil { return result, validation.NewError("containerservice.MaintenanceConfigurationsClient", "Delete", err.Error()) } req, err := client.DeletePreparer(ctx, resourceGroupName, resourceName, configName) if err != nil { err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "Delete", nil, "Failure preparing request") return } resp, err := client.DeleteSender(req) if err != nil { result.Response = resp err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "Delete", resp, "Failure sending request") return } result, err = client.DeleteResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "Delete", resp, "Failure responding to request") return } return } // DeletePreparer prepares the Delete request. func (client MaintenanceConfigurationsClient) DeletePreparer(ctx context.Context, resourceGroupName string, resourceName string, configName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "configName": autorest.Encode("path", configName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "resourceName": autorest.Encode("path", resourceName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2021-10-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsDelete(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // DeleteSender sends the Delete request. The method will close the // http.Response Body if it receives an error. func (client MaintenanceConfigurationsClient) DeleteSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // DeleteResponder handles the response to the Delete request. The method always // closes the http.Response Body. func (client MaintenanceConfigurationsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), autorest.ByClosing()) result.Response = resp return } // Get sends the get request. // Parameters: // resourceGroupName - the name of the resource group. // resourceName - the name of the managed cluster resource. // configName - the name of the maintenance configuration. func (client MaintenanceConfigurationsClient) Get(ctx context.Context, resourceGroupName string, resourceName string, configName string) (result MaintenanceConfiguration, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/MaintenanceConfigurationsClient.Get") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } if err := validation.Validate([]validation.Validation{ {TargetValue: resourceGroupName, Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}}}, {TargetValue: resourceName, Constraints: []validation.Constraint{{Target: "resourceName", Name: validation.MaxLength, Rule: 63, Chain: nil}, {Target: "resourceName", Name: validation.MinLength, Rule: 1, Chain: nil}, {Target: "resourceName", Name: validation.Pattern, Rule: `^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$`, Chain: nil}}}}); err != nil { return result, validation.NewError("containerservice.MaintenanceConfigurationsClient", "Get", err.Error()) } req, err := client.GetPreparer(ctx, resourceGroupName, resourceName, configName) if err != nil { err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "Get", nil, "Failure preparing request") return } resp, err := client.GetSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "Get", resp, "Failure sending request") return } result, err = client.GetResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "Get", resp, "Failure responding to request") return } return } // GetPreparer prepares the Get request. func (client MaintenanceConfigurationsClient) GetPreparer(ctx context.Context, resourceGroupName string, resourceName string, configName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "configName": autorest.Encode("path", configName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "resourceName": autorest.Encode("path", resourceName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2021-10-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // GetSender sends the Get request. The method will close the // http.Response Body if it receives an error. func (client MaintenanceConfigurationsClient) GetSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // GetResponder handles the response to the Get request. The method always // closes the http.Response Body. func (client MaintenanceConfigurationsClient) GetResponder(resp *http.Response) (result MaintenanceConfiguration, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListByManagedCluster sends the list by managed cluster request. // Parameters: // resourceGroupName - the name of the resource group. // resourceName - the name of the managed cluster resource. func (client MaintenanceConfigurationsClient) ListByManagedCluster(ctx context.Context, resourceGroupName string, resourceName string) (result MaintenanceConfigurationListResultPage, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/MaintenanceConfigurationsClient.ListByManagedCluster") defer func() { sc := -1 if result.mclr.Response.Response != nil { sc = result.mclr.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } if err := validation.Validate([]validation.Validation{ {TargetValue: resourceGroupName, Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}}}, {TargetValue: resourceName, Constraints: []validation.Constraint{{Target: "resourceName", Name: validation.MaxLength, Rule: 63, Chain: nil}, {Target: "resourceName", Name: validation.MinLength, Rule: 1, Chain: nil}, {Target: "resourceName", Name: validation.Pattern, Rule: `^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$`, Chain: nil}}}}); err != nil { return result, validation.NewError("containerservice.MaintenanceConfigurationsClient", "ListByManagedCluster", err.Error()) } result.fn = client.listByManagedClusterNextResults req, err := client.ListByManagedClusterPreparer(ctx, resourceGroupName, resourceName) if err != nil { err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "ListByManagedCluster", nil, "Failure preparing request") return } resp, err := client.ListByManagedClusterSender(req) if err != nil { result.mclr.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "ListByManagedCluster", resp, "Failure sending request") return } result.mclr, err = client.ListByManagedClusterResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "ListByManagedCluster", resp, "Failure responding to request") return } if result.mclr.hasNextLink() && result.mclr.IsEmpty() { err = result.NextWithContext(ctx) return } return } // ListByManagedClusterPreparer prepares the ListByManagedCluster request. func (client MaintenanceConfigurationsClient) ListByManagedClusterPreparer(ctx context.Context, resourceGroupName string, resourceName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "resourceName": autorest.Encode("path", resourceName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2021-10-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListByManagedClusterSender sends the ListByManagedCluster request. The method will close the // http.Response Body if it receives an error. func (client MaintenanceConfigurationsClient) ListByManagedClusterSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListByManagedClusterResponder handles the response to the ListByManagedCluster request. The method always // closes the http.Response Body. func (client MaintenanceConfigurationsClient) ListByManagedClusterResponder(resp *http.Response) (result MaintenanceConfigurationListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp}<|fim▁hole|>// listByManagedClusterNextResults retrieves the next set of results, if any. func (client MaintenanceConfigurationsClient) listByManagedClusterNextResults(ctx context.Context, lastResults MaintenanceConfigurationListResult) (result MaintenanceConfigurationListResult, err error) { req, err := lastResults.maintenanceConfigurationListResultPreparer(ctx) if err != nil { return result, autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "listByManagedClusterNextResults", nil, "Failure preparing next results request") } if req == nil { return } resp, err := client.ListByManagedClusterSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "listByManagedClusterNextResults", resp, "Failure sending next results request") } result, err = client.ListByManagedClusterResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "containerservice.MaintenanceConfigurationsClient", "listByManagedClusterNextResults", resp, "Failure responding to next results request") } return } // ListByManagedClusterComplete enumerates all values, automatically crossing page boundaries as required. func (client MaintenanceConfigurationsClient) ListByManagedClusterComplete(ctx context.Context, resourceGroupName string, resourceName string) (result MaintenanceConfigurationListResultIterator, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/MaintenanceConfigurationsClient.ListByManagedCluster") defer func() { sc := -1 if result.Response().Response.Response != nil { sc = result.page.Response().Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } result.page, err = client.ListByManagedCluster(ctx, resourceGroupName, resourceName) return }<|fim▁end|>
return }
<|file_name|>code.py<|end_file_name|><|fim▁begin|><|fim▁hole|> print "Reach for the sky but don't burn your wings!"# this will make it much easier in future problems to see that something is actually happening<|fim▁end|>
def Woody(): # complete
<|file_name|>FilePicker.js.uncompressed.js<|end_file_name|><|fim▁begin|>define( "dojox/widget/nls/he/FilePicker", ({ name: "שם", <|fim▁hole|> path: "נתיב", size: "גודל (בבתים)" }) );<|fim▁end|>
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT. // Package sqs provides the client and types for making API // requests to Amazon Simple Queue Service. // // Welcome to the Amazon Simple Queue Service API Reference. // // Amazon Simple Queue Service (Amazon SQS) is a reliable, highly-scalable hosted // queue for storing messages as they travel between applications or microservices. // Amazon SQS moves data between distributed application components and helps // you decouple these components. // // Standard queues (http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/standard-queues.html) // are available in all regions. FIFO queues (http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/FIFO-queues.html) // are available in US West (Oregon) and US East (Ohio). // // You can use AWS SDKs (http://aws.amazon.com/tools/#sdk) to access Amazon // SQS using your favorite programming language. The SDKs perform tasks such // as the following automatically: // // * Cryptographically sign your service requests // // * Retry requests // // * Handle error responses // // Additional Information // // * Amazon SQS Product Page (http://aws.amazon.com/sqs/) // // * Amazon SQS Developer Guide // // Making API Requests (http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/MakingRequestsArticle.html) // // Using Amazon SQS Message Attributes (http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-message-attributes.html) // // Using Amazon SQS Dead Letter Queues (http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html) // // * Amazon Web Services General Reference // // Regions and Endpoints (http://docs.aws.amazon.com/general/latest/gr/rande.html#sqs_region)<|fim▁hole|>// See sqs package documentation for more information. // https://docs.aws.amazon.com/sdk-for-go/api/service/sqs/ // // Using the Client // // To Amazon Simple Queue Service with the SDK use the New function to create // a new service client. With that client you can make API requests to the service. // These clients are safe to use concurrently. // // See the SDK's documentation for more information on how to use the SDK. // https://docs.aws.amazon.com/sdk-for-go/api/ // // See aws.Config documentation for more information on configuring SDK clients. // https://docs.aws.amazon.com/sdk-for-go/api/aws/#Config // // See the Amazon Simple Queue Service client SQS for more // information on creating client for this service. // https://docs.aws.amazon.com/sdk-for-go/api/service/sqs/#New package sqs //Added a line for testing //Adding another line for Git event testing part 2 //Adding another line for Git event testing part 2.1 //Adding another line for Git event testing part 2.2<|fim▁end|>
// // See https://docs.aws.amazon.com/goto/WebAPI/sqs-2012-11-05 for more information on this service. //
<|file_name|>MulticastGroup.cpp<|end_file_name|><|fim▁begin|>/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/iotwireless/model/MulticastGroup.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws {<|fim▁hole|>namespace Model { MulticastGroup::MulticastGroup() : m_idHasBeenSet(false), m_arnHasBeenSet(false), m_nameHasBeenSet(false) { } MulticastGroup::MulticastGroup(JsonView jsonValue) : m_idHasBeenSet(false), m_arnHasBeenSet(false), m_nameHasBeenSet(false) { *this = jsonValue; } MulticastGroup& MulticastGroup::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("Id")) { m_id = jsonValue.GetString("Id"); m_idHasBeenSet = true; } if(jsonValue.ValueExists("Arn")) { m_arn = jsonValue.GetString("Arn"); m_arnHasBeenSet = true; } if(jsonValue.ValueExists("Name")) { m_name = jsonValue.GetString("Name"); m_nameHasBeenSet = true; } return *this; } JsonValue MulticastGroup::Jsonize() const { JsonValue payload; if(m_idHasBeenSet) { payload.WithString("Id", m_id); } if(m_arnHasBeenSet) { payload.WithString("Arn", m_arn); } if(m_nameHasBeenSet) { payload.WithString("Name", m_name); } return payload; } } // namespace Model } // namespace IoTWireless } // namespace Aws<|fim▁end|>
namespace IoTWireless {
<|file_name|>dump_csv.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Output a CSV file containing the output from rustc's analysis. The data is //! primarily designed to be used as input to the DXR tool, specifically its //! Rust plugin. It could also be used by IDEs or other code browsing, search, or //! cross-referencing tools. //! //! Dumping the analysis is implemented by walking the AST and getting a bunch of //! info out from all over the place. We use Def IDs to identify objects. The //! tricky part is getting syntactic (span, source text) and semantic (reference //! Def IDs) information for parts of expressions which the compiler has discarded. //! E.g., in a path `foo::bar::baz`, the compiler only keeps a span for the whole //! path and a reference to `baz`, but we want spans and references for all three //! idents. //! //! SpanUtils is used to manipulate spans. In particular, to extract sub-spans //! from spans (e.g., the span for `bar` from the above example path). //! Recorder is used for recording the output in csv format. FmtStrs separates //! the format of the output away from extracting it from the compiler. //! DumpCsvVisitor walks the AST and processes it. use super::{escape, generated_code, recorder, SaveContext, PathCollector}; use session::Session; use middle::def; use middle::ty::{self, Ty}; use std::cell::Cell; use std::fs::File; use std::path::Path; use syntax::ast_util; use syntax::ast::{self, NodeId, DefId}; use syntax::ast_map::NodeItem; use syntax::codemap::*; use syntax::parse::token::{self, get_ident, keywords}; use syntax::owned_slice::OwnedSlice; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{path_to_string, ty_to_string}; use syntax::ptr::P; use super::span_utils::SpanUtils; use super::recorder::{Recorder, FmtStrs}; use util::ppaux; pub struct DumpCsvVisitor<'l, 'tcx: 'l> { save_ctxt: SaveContext<'l, 'tcx>, sess: &'l Session, analysis: &'l ty::CrateAnalysis<'tcx>, span: SpanUtils<'l>, fmt: FmtStrs<'l>, cur_scope: NodeId } impl <'l, 'tcx> DumpCsvVisitor<'l, 'tcx> { pub fn new(sess: &'l Session, analysis: &'l ty::CrateAnalysis<'tcx>, output_file: Box<File>) -> DumpCsvVisitor<'l, 'tcx> { DumpCsvVisitor { sess: sess, save_ctxt: SaveContext::new(sess, analysis, SpanUtils { sess: sess, err_count: Cell::new(0) }), analysis: analysis, span: SpanUtils { sess: sess, err_count: Cell::new(0) }, fmt: FmtStrs::new(box Recorder { out: output_file, dump_spans: false, }, SpanUtils { sess: sess, err_count: Cell::new(0) }), cur_scope: 0 } } fn nest<F>(&mut self, scope_id: NodeId, f: F) where F: FnOnce(&mut DumpCsvVisitor<'l, 'tcx>), { let parent_scope = self.cur_scope; self.cur_scope = scope_id; f(self); self.cur_scope = parent_scope; } pub fn dump_crate_info(&mut self, name: &str, krate: &ast::Crate) { // The current crate. self.fmt.crate_str(krate.span, name); // Dump info about all the external crates referenced from this crate. for c in &self.save_ctxt.get_external_crates() { self.fmt.external_crate_str(krate.span, &c.name, c.number); } self.fmt.recorder.record("end_external_crates\n"); } // Return all non-empty prefixes of a path. // For each prefix, we return the span for the last segment in the prefix and // a str representation of the entire prefix. fn process_path_prefixes(&self, path: &ast::Path) -> Vec<(Span, String)> { let spans = self.span.spans_for_path_segments(path); // Paths to enums seem to not match their spans - the span includes all the // variants too. But they seem to always be at the end, so I hope we can cope with // always using the first ones. So, only error out if we don't have enough spans. // What could go wrong...? if spans.len() < path.segments.len() { error!("Mis-calculated spans for path '{}'. \ Found {} spans, expected {}. Found spans:", path_to_string(path), spans.len(), path.segments.len()); for s in &spans { let loc = self.sess.codemap().lookup_char_pos(s.lo); error!(" '{}' in {}, line {}", self.span.snippet(*s), loc.file.name, loc.line); } return vec!(); } let mut result: Vec<(Span, String)> = vec!(); let mut segs = vec!(); for (i, (seg, span)) in path.segments.iter().zip(spans.iter()).enumerate() { segs.push(seg.clone()); let sub_path = ast::Path{span: *span, // span for the last segment global: path.global, segments: segs}; let qualname = if i == 0 && path.global { format!("::{}", path_to_string(&sub_path)) } else { path_to_string(&sub_path) }; result.push((*span, qualname)); segs = sub_path.segments; } result } // The global arg allows us to override the global-ness of the path (which // actually means 'does the path start with `::`', rather than 'is the path // semantically global). We use the override for `use` imports (etc.) where // the syntax is non-global, but the semantics are global. fn write_sub_paths(&mut self, path: &ast::Path, global: bool) { let sub_paths = self.process_path_prefixes(path); for (i, &(ref span, ref qualname)) in sub_paths.iter().enumerate() { let qualname = if i == 0 && global && !path.global { format!("::{}", qualname) } else { qualname.clone() }; self.fmt.sub_mod_ref_str(path.span, *span, &qualname[..], self.cur_scope); } } // As write_sub_paths, but does not process the last ident in the path (assuming it // will be processed elsewhere). See note on write_sub_paths about global. fn write_sub_paths_truncated(&mut self, path: &ast::Path, global: bool) { let sub_paths = self.process_path_prefixes(path); let len = sub_paths.len(); if len <= 1 { return; } let sub_paths = &sub_paths[..len-1]; for (i, &(ref span, ref qualname)) in sub_paths.iter().enumerate() { let qualname = if i == 0 && global && !path.global { format!("::{}", qualname) } else { qualname.clone() }; self.fmt.sub_mod_ref_str(path.span, *span, &qualname[..], self.cur_scope); } } // As write_sub_paths, but expects a path of the form module_path::trait::method // Where trait could actually be a struct too. fn write_sub_path_trait_truncated(&mut self, path: &ast::Path) { let sub_paths = self.process_path_prefixes(path); let len = sub_paths.len(); if len <= 1 { return; } let sub_paths = &sub_paths[.. (len-1)]; // write the trait part of the sub-path let (ref span, ref qualname) = sub_paths[len-2]; self.fmt.sub_type_ref_str(path.span, *span, &qualname[..]); // write the other sub-paths if len <= 2 { return; } let sub_paths = &sub_paths[..len-2]; for &(ref span, ref qualname) in sub_paths { self.fmt.sub_mod_ref_str(path.span, *span, &qualname[..], self.cur_scope); } } // looks up anything, not just a type fn lookup_type_ref(&self, ref_id: NodeId) -> Option<DefId> { if !self.analysis.ty_cx.def_map.borrow().contains_key(&ref_id) { self.sess.bug(&format!("def_map has no key for {} in lookup_type_ref", ref_id)); } let def = self.analysis.ty_cx.def_map.borrow().get(&ref_id).unwrap().full_def(); match def { def::DefPrimTy(_) => None, _ => Some(def.def_id()), } } fn lookup_def_kind(&self, ref_id: NodeId, span: Span) -> Option<recorder::Row> { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&ref_id) { self.sess.span_bug(span, &format!("def_map has no key for {} in lookup_def_kind", ref_id)); } let def = def_map.get(&ref_id).unwrap().full_def(); match def { def::DefMod(_) | def::DefForeignMod(_) => Some(recorder::ModRef), def::DefStruct(_) => Some(recorder::StructRef), def::DefTy(..) | def::DefAssociatedTy(..) | def::DefTrait(_) => Some(recorder::TypeRef), def::DefStatic(_, _) | def::DefConst(_) | def::DefAssociatedConst(..) | def::DefLocal(_) | def::DefVariant(_, _, _) | def::DefUpvar(..) => Some(recorder::VarRef), def::DefFn(..) => Some(recorder::FnRef), def::DefSelfTy(..) | def::DefRegion(_) | def::DefLabel(_) | def::DefTyParam(..) | def::DefUse(_) | def::DefMethod(..) | def::DefPrimTy(_) => { self.sess.span_bug(span, &format!("lookup_def_kind for unexpected item: {:?}", def)); }, } } fn process_formals(&mut self, formals: &Vec<ast::Arg>, qualname: &str) { for arg in formals { self.visit_pat(&arg.pat); let mut collector = PathCollector::new(); collector.visit_pat(&arg.pat); let span_utils = self.span.clone(); for &(id, ref p, _, _) in &collector.collected_paths { let typ = ppaux::ty_to_string( &self.analysis.ty_cx, *self.analysis.ty_cx.node_types().get(&id).unwrap()); // get the span only for the name of the variable (I hope the path is only ever a // variable name, but who knows?) self.fmt.formal_str(p.span, span_utils.span_for_last_ident(p.span), id, qualname, &path_to_string(p), &typ[..]); } } } fn process_method(&mut self, sig: &ast::MethodSig, body: Option<&ast::Block>, id: ast::NodeId, name: ast::Name, span: Span) { if generated_code(span) { return; } debug!("process_method: {}:{}", id, token::get_name(name)); let mut scope_id; // The qualname for a method is the trait name or name of the struct in an impl in // which the method is declared in, followed by the method's name. let qualname = match ty::impl_of_method(&self.analysis.ty_cx, ast_util::local_def(id)) { Some(impl_id) => match self.analysis.ty_cx.map.get(impl_id.node) { NodeItem(item) => { scope_id = item.id; match item.node { ast::ItemImpl(_, _, _, _, ref ty, _) => { let mut result = String::from_str("<"); result.push_str(&ty_to_string(&**ty)); match ty::trait_of_item(&self.analysis.ty_cx, ast_util::local_def(id)) { Some(def_id) => { result.push_str(" as "); result.push_str( &ty::item_path_str(&self.analysis.ty_cx, def_id)); }, None => {} } result.push_str(">"); result } _ => { self.sess.span_bug(span, &format!("Container {} for method {} not an impl?", impl_id.node, id)); }, } }, _ => { self.sess.span_bug(span, &format!("Container {} for method {} is not a node item {:?}", impl_id.node, id, self.analysis.ty_cx.map.get(impl_id.node))); }, }, None => match ty::trait_of_item(&self.analysis.ty_cx, ast_util::local_def(id)) { Some(def_id) => { scope_id = def_id.node; match self.analysis.ty_cx.map.get(def_id.node) { NodeItem(_) => { format!("::{}", ty::item_path_str(&self.analysis.ty_cx, def_id)) } _ => { self.sess.span_bug(span, &format!("Could not find container {} for method {}", def_id.node, id)); } } }, None => { self.sess.span_bug(span, &format!("Could not find container for method {}", id)); }, }, }; let qualname = &format!("{}::{}", qualname, &token::get_name(name)); // record the decl for this def (if it has one) let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx, ast_util::local_def(id)) .and_then(|new_id| { let def_id = new_id.def_id(); if def_id.node != 0 && def_id != ast_util::local_def(id) { Some(def_id) } else { None } }); let sub_span = self.span.sub_span_after_keyword(span, keywords::Fn); if body.is_some() { self.fmt.method_str(span, sub_span, id, qualname, decl_id, scope_id); self.process_formals(&sig.decl.inputs, qualname); } else { self.fmt.method_decl_str(span, sub_span, id, qualname, scope_id); } // walk arg and return types for arg in &sig.decl.inputs { self.visit_ty(&arg.ty); } if let ast::Return(ref ret_ty) = sig.decl.output { self.visit_ty(ret_ty); } // walk the fn body if let Some(body) = body { self.nest(id, |v| v.visit_block(body)); } self.process_generic_params(&sig.generics, span, qualname, id); } fn process_trait_ref(&mut self, trait_ref: &ast::TraitRef) { match self.lookup_type_ref(trait_ref.ref_id) { Some(id) => { let sub_span = self.span.sub_span_for_type_name(trait_ref.path.span); self.fmt.ref_str(recorder::TypeRef, trait_ref.path.span, sub_span, id, self.cur_scope); visit::walk_path(self, &trait_ref.path); }, None => () } } fn process_struct_field_def(&mut self, field: &ast::StructField, qualname: &str, scope_id: NodeId) { match field.node.kind { ast::NamedField(ident, _) => { let name = get_ident(ident); let qualname = format!("{}::{}", qualname, name); let typ = ppaux::ty_to_string( &self.analysis.ty_cx, *self.analysis.ty_cx.node_types().get(&field.node.id).unwrap()); match self.span.sub_span_before_token(field.span, token::Colon) { Some(sub_span) => self.fmt.field_str(field.span, Some(sub_span), field.node.id, &name[..], &qualname[..], &typ[..], scope_id), None => self.sess.span_bug(field.span, &format!("Could not find sub-span for field {}", qualname)), } }, _ => (), } } // Dump generic params bindings, then visit_generics fn process_generic_params(&mut self, generics:&ast::Generics, full_span: Span, prefix: &str, id: NodeId) { // We can't only use visit_generics since we don't have spans for param // bindings, so we reparse the full_span to get those sub spans. // However full span is the entire enum/fn/struct block, so we only want // the first few to match the number of generics we're looking for. let param_sub_spans = self.span.spans_for_ty_params(full_span, (generics.ty_params.len() as isize)); for (param, param_ss) in generics.ty_params.iter().zip(param_sub_spans.iter()) { // Append $id to name to make sure each one is unique let name = format!("{}::{}${}", prefix, escape(self.span.snippet(*param_ss)), id); self.fmt.typedef_str(full_span, Some(*param_ss), param.id, &name[..], ""); } self.visit_generics(generics); } fn process_fn(&mut self, item: &ast::Item, decl: &ast::FnDecl, ty_params: &ast::Generics, body: &ast::Block) { let fn_data = self.save_ctxt.get_item_data(item); if let super::Data::FunctionData(fn_data) = fn_data { self.fmt.fn_str(item.span, Some(fn_data.span), fn_data.id, &fn_data.qualname, fn_data.scope); self.process_formals(&decl.inputs, &fn_data.qualname); self.process_generic_params(ty_params, item.span, &fn_data.qualname, item.id); } else { unreachable!(); } for arg in &decl.inputs { self.visit_ty(&arg.ty); } if let ast::Return(ref ret_ty) = decl.output { self.visit_ty(&ret_ty); } self.nest(item.id, |v| v.visit_block(&body)); } fn process_static_or_const_item(&mut self, item: &ast::Item, typ: &ast::Ty, expr: &ast::Expr) { let var_data = self.save_ctxt.get_item_data(item); if let super::Data::VariableData(var_data) = var_data { self.fmt.static_str(item.span, Some(var_data.span), var_data.id, &var_data.name, &var_data.qualname, &var_data.value, &var_data.type_value, var_data.scope); } else { unreachable!(); } self.visit_ty(&typ); self.visit_expr(expr); } fn process_const(&mut self, id: ast::NodeId, ident: &ast::Ident, span: Span, typ: &ast::Ty, expr: &ast::Expr) { let qualname = format!("::{}", self.analysis.ty_cx.map.path_to_string(id)); let sub_span = self.span.sub_span_after_keyword(span, keywords::Const); self.fmt.static_str(span, sub_span, id, &get_ident((*ident).clone()), &qualname[..], &self.span.snippet(expr.span), &ty_to_string(&*typ), self.cur_scope); // walk type and init value self.visit_ty(typ); self.visit_expr(expr); } fn process_struct(&mut self, item: &ast::Item, def: &ast::StructDef, ty_params: &ast::Generics) { let qualname = format!("::{}", self.analysis.ty_cx.map.path_to_string(item.id)); let ctor_id = match def.ctor_id { Some(node_id) => node_id, None => -1, }; let val = self.span.snippet(item.span); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Struct); self.fmt.struct_str(item.span, sub_span, item.id, ctor_id, &qualname[..], self.cur_scope, &val[..]); // fields for field in &def.fields { self.process_struct_field_def(field, &qualname[..], item.id); self.visit_ty(&*field.node.ty); } self.process_generic_params(ty_params, item.span, &qualname[..], item.id); } fn process_enum(&mut self, item: &ast::Item, enum_definition: &ast::EnumDef, ty_params: &ast::Generics) { let enum_name = format!("::{}", self.analysis.ty_cx.map.path_to_string(item.id)); let val = self.span.snippet(item.span); match self.span.sub_span_after_keyword(item.span, keywords::Enum) { Some(sub_span) => self.fmt.enum_str(item.span, Some(sub_span), item.id, &enum_name[..], self.cur_scope, &val[..]), None => self.sess.span_bug(item.span, &format!("Could not find subspan for enum {}", enum_name)), } for variant in &enum_definition.variants { let name = get_ident(variant.node.name); let name = &name; let mut qualname = enum_name.clone(); qualname.push_str("::"); qualname.push_str(name); let val = self.span.snippet(variant.span); match variant.node.kind { ast::TupleVariantKind(ref args) => { // first ident in span is the variant's name self.fmt.tuple_variant_str(variant.span, self.span.span_for_first_ident(variant.span), variant.node.id, name, &qualname[..], &enum_name[..], &val[..], item.id); for arg in args { self.visit_ty(&*arg.ty); } } ast::StructVariantKind(ref struct_def) => { let ctor_id = match struct_def.ctor_id { Some(node_id) => node_id, None => -1, }; self.fmt.struct_variant_str( variant.span, self.span.span_for_first_ident(variant.span), variant.node.id, ctor_id, &qualname[..], &enum_name[..], &val[..], item.id); for field in &struct_def.fields { self.process_struct_field_def(field, &qualname, variant.node.id); self.visit_ty(&*field.node.ty); } } } } self.process_generic_params(ty_params, item.span, &enum_name[..], item.id); } fn process_impl(&mut self, item: &ast::Item, type_parameters: &ast::Generics, trait_ref: &Option<ast::TraitRef>, typ: &ast::Ty, impl_items: &[P<ast::ImplItem>]) { let trait_id = trait_ref.as_ref().and_then(|tr| self.lookup_type_ref(tr.ref_id)); match typ.node { // Common case impl for a struct or something basic. ast::TyPath(None, ref path) => { let sub_span = self.span.sub_span_for_type_name(path.span); let self_id = self.lookup_type_ref(typ.id).map(|id| { self.fmt.ref_str(recorder::TypeRef, path.span, sub_span, id, self.cur_scope); id }); self.fmt.impl_str(path.span, sub_span, item.id, self_id, trait_id, self.cur_scope); }, _ => { // Less useful case, impl for a compound type. self.visit_ty(&*typ); let sub_span = self.span.sub_span_for_type_name(typ.span); self.fmt.impl_str(typ.span, sub_span, item.id, None, trait_id, self.cur_scope); } } match *trait_ref { Some(ref trait_ref) => self.process_trait_ref(trait_ref), None => (), } self.process_generic_params(type_parameters, item.span, "", item.id); for impl_item in impl_items { self.visit_impl_item(impl_item); } } fn process_trait(&mut self, item: &ast::Item, generics: &ast::Generics, trait_refs: &OwnedSlice<ast::TyParamBound>, methods: &[P<ast::TraitItem>]) { let qualname = format!("::{}", self.analysis.ty_cx.map.path_to_string(item.id)); let val = self.span.snippet(item.span); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Trait); self.fmt.trait_str(item.span, sub_span, item.id, &qualname[..], self.cur_scope, &val[..]); // super-traits for super_bound in &**trait_refs { let trait_ref = match *super_bound { ast::TraitTyParamBound(ref trait_ref, _) => { trait_ref } ast::RegionTyParamBound(..) => { continue; } }; let trait_ref = &trait_ref.trait_ref; match self.lookup_type_ref(trait_ref.ref_id) { Some(id) => { let sub_span = self.span.sub_span_for_type_name(trait_ref.path.span); self.fmt.ref_str(recorder::TypeRef, trait_ref.path.span, sub_span, id, self.cur_scope); self.fmt.inherit_str(trait_ref.path.span, sub_span, id, item.id); }, None => () } } // walk generics and methods self.process_generic_params(generics, item.span, &qualname[..], item.id); for method in methods { self.visit_trait_item(method) } } fn process_mod(&mut self, item: &ast::Item, // The module in question, represented as an item. m: &ast::Mod) { let qualname = format!("::{}", self.analysis.ty_cx.map.path_to_string(item.id)); let cm = self.sess.codemap(); let filename = cm.span_to_filename(m.inner); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Mod); self.fmt.mod_str(item.span, sub_span, item.id, &qualname[..], self.cur_scope, &filename[..]); self.nest(item.id, |v| visit::walk_mod(v, m)); } fn process_path(&mut self, id: NodeId, span: Span, path: &ast::Path, ref_kind: Option<recorder::Row>) { if generated_code(span) { return } let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&id) { self.sess.span_bug(span, &format!("def_map has no key for {} in visit_expr", id)); } let def = def_map.get(&id).unwrap().full_def(); let sub_span = self.span.span_for_last_ident(span); match def { def::DefUpvar(..) | def::DefLocal(..) | def::DefStatic(..) | def::DefConst(..) | def::DefAssociatedConst(..) | def::DefVariant(..) => self.fmt.ref_str(ref_kind.unwrap_or(recorder::VarRef), span, sub_span, def.def_id(), self.cur_scope), def::DefStruct(def_id) => self.fmt.ref_str(recorder::StructRef, span, sub_span, def_id, self.cur_scope), def::DefTy(def_id, _) => self.fmt.ref_str(recorder::TypeRef, span, sub_span, def_id, self.cur_scope), def::DefMethod(declid, provenence) => { let sub_span = self.span.sub_span_for_meth_name(span); let defid = if declid.krate == ast::LOCAL_CRATE { let ti = ty::impl_or_trait_item(&self.analysis.ty_cx, declid); match provenence { def::FromTrait(def_id) => { Some(ty::trait_items(&self.analysis.ty_cx, def_id) .iter() .find(|mr| { mr.name() == ti.name() }) .unwrap() .def_id()) } def::FromImpl(def_id) => { let impl_items = self.analysis .ty_cx .impl_items .borrow(); Some(impl_items.get(&def_id) .unwrap() .iter() .find(|mr| { ty::impl_or_trait_item( &self.analysis.ty_cx, mr.def_id() ).name() == ti.name() }) .unwrap() .def_id()) } } } else { None }; self.fmt.meth_call_str(span, sub_span, defid, Some(declid), self.cur_scope); }, def::DefFn(def_id, _) => { self.fmt.fn_call_str(span, sub_span, def_id, self.cur_scope) } _ => self.sess.span_bug(span, &format!("Unexpected def kind while looking \ up path in `{}`: `{:?}`", self.span.snippet(span), def)), } // modules or types in the path prefix match def { def::DefMethod(did, _) => { let ti = ty::impl_or_trait_item(&self.analysis.ty_cx, did); if let ty::MethodTraitItem(m) = ti { if m.explicit_self == ty::StaticExplicitSelfCategory { self.write_sub_path_trait_truncated(path); } } } def::DefLocal(_) | def::DefStatic(_,_) | def::DefConst(..) | def::DefAssociatedConst(..) | def::DefStruct(_) | def::DefVariant(..) | def::DefFn(..) => self.write_sub_paths_truncated(path, false), _ => {}, } } fn process_struct_lit(&mut self, ex: &ast::Expr, path: &ast::Path, fields: &Vec<ast::Field>, base: &Option<P<ast::Expr>>) { if generated_code(path.span) { return } self.write_sub_paths_truncated(path, false); let ty = &ty::expr_ty_adjusted(&self.analysis.ty_cx, ex).sty; let struct_def = match *ty { ty::ty_struct(def_id, _) => { let sub_span = self.span.span_for_last_ident(path.span); self.fmt.ref_str(recorder::StructRef, path.span, sub_span, def_id, self.cur_scope); Some(def_id) } _ => None }; for field in fields { match struct_def { Some(struct_def) => { let fields = ty::lookup_struct_fields(&self.analysis.ty_cx, struct_def); for f in &fields { if generated_code(field.ident.span) { continue; } if f.name == field.ident.node.name { // We don't really need a sub-span here, but no harm done let sub_span = self.span.span_for_last_ident(field.ident.span); self.fmt.ref_str(recorder::VarRef, field.ident.span, sub_span, f.id, self.cur_scope); } } } None => {} } self.visit_expr(&*field.expr) } visit::walk_expr_opt(self, base) } fn process_method_call(&mut self, ex: &ast::Expr, args: &Vec<P<ast::Expr>>) { let method_map = self.analysis.ty_cx.method_map.borrow(); let method_callee = method_map.get(&ty::MethodCall::expr(ex.id)).unwrap(); let (def_id, decl_id) = match method_callee.origin { ty::MethodStatic(def_id) | ty::MethodStaticClosure(def_id) => { // method invoked on an object with a concrete type (not a static method) let decl_id = match ty::trait_item_of_item(&self.analysis.ty_cx, def_id) { None => None, Some(decl_id) => Some(decl_id.def_id()), }; // This incantation is required if the method referenced is a // trait's default implementation. let def_id = match ty::impl_or_trait_item(&self.analysis .ty_cx, def_id) { ty::MethodTraitItem(method) => { method.provided_source.unwrap_or(def_id) } _ => self.sess .span_bug(ex.span, "save::process_method_call: non-method \ DefId in MethodStatic or MethodStaticClosure"), }; (Some(def_id), decl_id) } ty::MethodTypeParam(ref mp) => { // method invoked on a type parameter let trait_item = ty::trait_item(&self.analysis.ty_cx, mp.trait_ref.def_id, mp.method_num); (None, Some(trait_item.def_id())) } ty::MethodTraitObject(ref mo) => { // method invoked on a trait instance let trait_item = ty::trait_item(&self.analysis.ty_cx, mo.trait_ref.def_id, mo.method_num); (None, Some(trait_item.def_id())) } }; let sub_span = self.span.sub_span_for_meth_name(ex.span); self.fmt.meth_call_str(ex.span, sub_span, def_id, decl_id, self.cur_scope); // walk receiver and args visit::walk_exprs(self, &args[..]); } fn process_pat(&mut self, p:&ast::Pat) { if generated_code(p.span) { return } match p.node { ast::PatStruct(ref path, ref fields, _) => { visit::walk_path(self, path); let def = self.analysis.ty_cx.def_map.borrow().get(&p.id).unwrap().full_def(); let struct_def = match def { def::DefConst(..) | def::DefAssociatedConst(..) => None, def::DefVariant(_, variant_id, _) => Some(variant_id), _ => { match ty::ty_to_def_id(ty::node_id_to_type(&self.analysis.ty_cx, p.id)) { None => { self.sess.span_bug(p.span, &format!("Could not find struct_def for `{}`", self.span.snippet(p.span))); } Some(def_id) => Some(def_id), } } }; if let Some(struct_def) = struct_def { let struct_fields = ty::lookup_struct_fields(&self.analysis.ty_cx, struct_def); for &Spanned { node: ref field, span } in fields { let sub_span = self.span.span_for_first_ident(span); for f in &struct_fields { if f.name == field.ident.name { self.fmt.ref_str(recorder::VarRef, span, sub_span, f.id, self.cur_scope); break; } } self.visit_pat(&*field.pat); } } } _ => visit::walk_pat(self, p) } } } impl<'l, 'tcx, 'v> Visitor<'v> for DumpCsvVisitor<'l, 'tcx> { fn visit_item(&mut self, item: &ast::Item) { if generated_code(item.span) { return } match item.node { ast::ItemUse(ref use_item) => { match use_item.node { ast::ViewPathSimple(ident, ref path) => { let sub_span = self.span.span_for_last_ident(path.span); let mod_id = match self.lookup_type_ref(item.id) { Some(def_id) => { match self.lookup_def_kind(item.id, path.span) { Some(kind) => self.fmt.ref_str(kind, path.span, sub_span, def_id, self.cur_scope), None => {}, } Some(def_id) }, None => None, }; // 'use' always introduces an alias, if there is not an explicit // one, there is an implicit one. let sub_span = match self.span.sub_span_after_keyword(use_item.span, keywords::As) { Some(sub_span) => Some(sub_span), None => sub_span, }; self.fmt.use_alias_str(path.span, sub_span, item.id, mod_id, &get_ident(ident), self.cur_scope); self.write_sub_paths_truncated(path, true); } ast::ViewPathGlob(ref path) => { // Make a comma-separated list of names of imported modules. let mut name_string = String::new(); let glob_map = &self.analysis.glob_map; let glob_map = glob_map.as_ref().unwrap(); if glob_map.contains_key(&item.id) { for n in glob_map.get(&item.id).unwrap() { if !name_string.is_empty() { name_string.push_str(", "); } name_string.push_str(n.as_str()); } } let sub_span = self.span.sub_span_of_token(path.span, token::BinOp(token::Star)); self.fmt.use_glob_str(path.span, sub_span, item.id, &name_string, self.cur_scope); self.write_sub_paths(path, true); } ast::ViewPathList(ref path, ref list) => { for plid in list { match plid.node { ast::PathListIdent { id, .. } => { match self.lookup_type_ref(id) { Some(def_id) => match self.lookup_def_kind(id, plid.span) { Some(kind) => { self.fmt.ref_str( kind, plid.span, Some(plid.span), def_id, self.cur_scope); } None => () }, None => () } }, ast::PathListMod { .. } => () } } self.write_sub_paths(path, true); } } } ast::ItemExternCrate(ref s) => { let name = get_ident(item.ident); let name = &name; let location = match *s { Some(s) => s.to_string(), None => name.to_string(), }; let alias_span = self.span.span_for_last_ident(item.span); let cnum = match self.sess.cstore.find_extern_mod_stmt_cnum(item.id) { Some(cnum) => cnum, None => 0, }; self.fmt.extern_crate_str(item.span, alias_span, item.id, cnum, name, &location[..], self.cur_scope); } ast::ItemFn(ref decl, _, _, ref ty_params, ref body) => self.process_fn(item, &**decl, ty_params, &**body), ast::ItemStatic(ref typ, _, ref expr) => self.process_static_or_const_item(item, typ, expr), ast::ItemConst(ref typ, ref expr) => self.process_static_or_const_item(item, &typ, &expr), ast::ItemStruct(ref def, ref ty_params) => self.process_struct(item, &**def, ty_params), ast::ItemEnum(ref def, ref ty_params) => self.process_enum(item, def, ty_params), ast::ItemImpl(_, _, ref ty_params, ref trait_ref, ref typ, ref impl_items) => { self.process_impl(item, ty_params, trait_ref, &**typ, impl_items) } ast::ItemTrait(_, ref generics, ref trait_refs, ref methods) => self.process_trait(item, generics, trait_refs, methods), ast::ItemMod(ref m) => self.process_mod(item, m), ast::ItemTy(ref ty, ref ty_params) => { let qualname = format!("::{}", self.analysis.ty_cx.map.path_to_string(item.id)); let value = ty_to_string(&**ty); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Type); self.fmt.typedef_str(item.span, sub_span, item.id, &qualname[..], &value[..]); self.visit_ty(&**ty); self.process_generic_params(ty_params, item.span, &qualname, item.id); }, ast::ItemMac(_) => (), _ => visit::walk_item(self, item), } } fn visit_generics(&mut self, generics: &ast::Generics) { for param in &*generics.ty_params { for bound in &*param.bounds { if let ast::TraitTyParamBound(ref trait_ref, _) = *bound { self.process_trait_ref(&trait_ref.trait_ref); } } if let Some(ref ty) = param.default { self.visit_ty(&**ty); } } } fn visit_trait_item(&mut self, trait_item: &ast::TraitItem) { match trait_item.node { ast::ConstTraitItem(ref ty, Some(ref expr)) => { self.process_const(trait_item.id, &trait_item.ident, trait_item.span, &*ty, &*expr); } ast::MethodTraitItem(ref sig, ref body) => { self.process_method(sig, body.as_ref().map(|x| &**x), trait_item.id, trait_item.ident.name, trait_item.span); } ast::ConstTraitItem(_, None) | ast::TypeTraitItem(..) => {} } } fn visit_impl_item(&mut self, impl_item: &ast::ImplItem) { match impl_item.node { ast::ConstImplItem(ref ty, ref expr) => { self.process_const(impl_item.id, &impl_item.ident, impl_item.span, &ty, &expr); } ast::MethodImplItem(ref sig, ref body) => { self.process_method(sig, Some(body), impl_item.id, impl_item.ident.name, impl_item.span); } ast::TypeImplItem(_) | ast::MacImplItem(_) => {} } } fn visit_ty(&mut self, t: &ast::Ty) { if generated_code(t.span) { return } match t.node { ast::TyPath(_, ref path) => { match self.lookup_type_ref(t.id) { Some(id) => { let sub_span = self.span.sub_span_for_type_name(t.span); self.fmt.ref_str(recorder::TypeRef, t.span, sub_span, id, self.cur_scope); }, None => () } self.write_sub_paths_truncated(path, false); visit::walk_path(self, path); }, _ => visit::walk_ty(self, t), } } fn visit_expr(&mut self, ex: &ast::Expr) { if generated_code(ex.span) { return } match ex.node { ast::ExprCall(ref _f, ref _args) => { // Don't need to do anything for function calls, // because just walking the callee path does what we want. visit::walk_expr(self, ex); } ast::ExprPath(_, ref path) => { self.process_path(ex.id, path.span, path, None); visit::walk_expr(self, ex); } ast::ExprStruct(ref path, ref fields, ref base) => self.process_struct_lit(ex, path, fields, base), ast::ExprMethodCall(_, _, ref args) => self.process_method_call(ex, args), ast::ExprField(ref sub_ex, ident) => { if generated_code(sub_ex.span) { return } self.visit_expr(&**sub_ex); let ty = &ty::expr_ty_adjusted(&self.analysis.ty_cx, &**sub_ex).sty; match *ty { ty::ty_struct(def_id, _) => { let fields = ty::lookup_struct_fields(&self.analysis.ty_cx, def_id); for f in &fields { if f.name == ident.node.name { let sub_span = self.span.span_for_last_ident(ex.span); self.fmt.ref_str(recorder::VarRef, ex.span, sub_span, f.id, self.cur_scope); break; } } } _ => self.sess.span_bug(ex.span, &format!("Expected struct type, found {:?}", ty)), } }, ast::ExprTupField(ref sub_ex, idx) => { if generated_code(sub_ex.span) { return } self.visit_expr(&**sub_ex); let ty = &ty::expr_ty_adjusted(&self.analysis.ty_cx, &**sub_ex).sty; match *ty { ty::ty_struct(def_id, _) => { let fields = ty::lookup_struct_fields(&self.analysis.ty_cx, def_id); for (i, f) in fields.iter().enumerate() { if i == idx.node { let sub_span = self.span.sub_span_after_token(ex.span, token::Dot); self.fmt.ref_str(recorder::VarRef, ex.span, sub_span, f.id, self.cur_scope); break; } } } ty::ty_tup(_) => {} _ => self.sess.span_bug(ex.span, &format!("Expected struct or tuple \ type, found {:?}", ty)), } }, ast::ExprClosure(_, ref decl, ref body) => { if generated_code(body.span) { return } let mut id = String::from_str("$"); id.push_str(&ex.id.to_string()); self.process_formals(&decl.inputs, &id[..]); // walk arg and return types for arg in &decl.inputs { self.visit_ty(&*arg.ty); } if let ast::Return(ref ret_ty) = decl.output { self.visit_ty(&**ret_ty); } // walk the body self.nest(ex.id, |v| v.visit_block(&**body)); }, _ => { visit::walk_expr(self, ex) } } } fn visit_mac(&mut self, _: &ast::Mac) { // Just stop, macros are poison to us. } fn visit_pat(&mut self, p: &ast::Pat) { self.process_pat(p); } fn visit_arm(&mut self, arm: &ast::Arm) { let mut collector = PathCollector::new(); for pattern in &arm.pats { // collect paths from the arm's patterns collector.visit_pat(&pattern); self.visit_pat(&pattern); } // This is to get around borrow checking, because we need mut self to call process_path. let mut paths_to_process = vec![]; // process collected paths for &(id, ref p, immut, ref_kind) in &collector.collected_paths { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&id) { self.sess.span_bug(p.span, &format!("def_map has no key for {} in visit_arm", id)); } let def = def_map.get(&id).unwrap().full_def(); match def { def::DefLocal(id) => { let value = if immut == ast::MutImmutable { self.span.snippet(p.span).to_string() } else { "<mutable>".to_string() }; assert!(p.segments.len() == 1, "qualified path for local variable def in arm"); self.fmt.variable_str(p.span, Some(p.span), id, &path_to_string(p), &value[..], "") } def::DefVariant(..) | def::DefTy(..) | def::DefStruct(..) => { paths_to_process.push((id, p.clone(), Some(ref_kind))) } // FIXME(nrc) what are these doing here? def::DefStatic(_, _) | def::DefConst(..) | def::DefAssociatedConst(..) => {} _ => error!("unexpected definition kind when processing collected paths: {:?}", def) } }<|fim▁hole|> } visit::walk_expr_opt(self, &arm.guard); self.visit_expr(&*arm.body); } fn visit_stmt(&mut self, s: &ast::Stmt) { if generated_code(s.span) { return } visit::walk_stmt(self, s) } fn visit_local(&mut self, l: &ast::Local) { if generated_code(l.span) { return } // The local could declare multiple new vars, we must walk the // pattern and collect them all. let mut collector = PathCollector::new(); collector.visit_pat(&l.pat); self.visit_pat(&l.pat); let value = self.span.snippet(l.span); for &(id, ref p, immut, _) in &collector.collected_paths { let value = if immut == ast::MutImmutable { value.to_string() } else { "<mutable>".to_string() }; let types = self.analysis.ty_cx.node_types(); let typ = ppaux::ty_to_string(&self.analysis.ty_cx, *types.get(&id).unwrap()); // Get the span only for the name of the variable (I hope the path // is only ever a variable name, but who knows?). let sub_span = self.span.span_for_last_ident(p.span); // Rust uses the id of the pattern for var lookups, so we'll use it too. self.fmt.variable_str(p.span, sub_span, id, &path_to_string(p), &value[..], &typ[..]); } // Just walk the initialiser and type (don't want to walk the pattern again). visit::walk_ty_opt(self, &l.ty); visit::walk_expr_opt(self, &l.init); } }<|fim▁end|>
for &(id, ref path, ref_kind) in &paths_to_process { self.process_path(id, path.span, path, ref_kind);
<|file_name|>Layer.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /****************************************************************************** ** Edit History * **---------------------------------------------------------------------------* ** DATE Module DESCRIPTION * ** 16/08/2013 Hardware Composer Add a new feature to Harware composer, * ** verlayComposer use GPU to do the * ** Hardware layer blending on Overlay * ** buffer, and then post the OVerlay * ** buffer to Display * ****************************************************************************** ** Author: [email protected] * *****************************************************************************/ #include "Layer.h" #include "GLErro.h" #include "OverlayComposer.h" namespace android { //#define _DEBUG #ifdef _DEBUG #define GL_CHECK(x) \ x; \ { \ GLenum err = glGetError(); \ if(err != GL_NO_ERROR) { \ ALOGE("glGetError() = %i (0x%.8x) at line %i\n", err, err, __LINE__); \ } \ } #else #define GL_CHECK(x) x #endif static GLfloat vertices[] = { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f }; static GLfloat texcoords[] = { 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f }; static float mtxFlipH[16] = { -1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, }; static float mtxFlipV[16] = { 1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, }; static float mtxRot90[16] = { 0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, }; static float mtxRot180[16] = { -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, }; static float mtxRot270[16] = { 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, }; static float mtxIdentity[16] = { 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, }; struct TexCoords { GLfloat u; GLfloat v; }; GLfloat mVertices[4][2]; struct TexCoords texCoord[4]; Layer::Layer(OverlayComposer* composer, struct private_handle_t *h) : mComposer(composer), mPrivH(h), mImage(EGL_NO_IMAGE_KHR), mTexTarget(GL_TEXTURE_EXTERNAL_OES), mTexName(-1U), mTransform(0), mAlpha(0.0), mSkipFlag(false) { bool ret = init(); if (!ret) { ALOGE("Layer Init failed"); return; } } bool Layer::init() { if (!wrapGraphicBuffer()) { ALOGE("wrap GraphicBuffer failed"); return false; } if (!createTextureImage()) { ALOGE("createEGLImage failed"); return false; } /* Initialize Premultiplied Alpha */ mPremultipliedAlpha = true; mNumVertices = 4; mFilteringEnabled = true; return true; } Layer::~Layer() { unWrapGraphicBuffer(); destroyTextureImage(); } bool Layer::wrapGraphicBuffer() { uint32_t size; uint32_t stride; getSizeStride(mPrivH->width, mPrivH->height, mPrivH->format, size, stride); mGFXBuffer = new GraphicBuffer(mPrivH->width, mPrivH->height, mPrivH->format, GraphicBuffer::USAGE_HW_TEXTURE, stride, (native_handle_t*)mPrivH, false); if (mGFXBuffer->initCheck() != NO_ERROR) { ALOGE("buf_src create fail"); return false; } return true; } void Layer::unWrapGraphicBuffer() { return; } bool Layer::createTextureImage() { GLint error; static EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE }; EGLDisplay mDisplay = eglGetCurrentDisplay(); mImage = eglCreateImageKHR(mDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID, (EGLClientBuffer)mGFXBuffer->getNativeBuffer(), attribs); checkEGLErrors("eglCreateImageKHR"); if (mImage == EGL_NO_IMAGE_KHR) { ALOGE("Create EGL Image failed, error = %x", eglGetError()); return false; } glGenTextures(1, &mTexName); checkGLErrors(); glBindTexture(mTexTarget, mTexName); checkGLErrors(); glEGLImageTargetTexture2DOES(mTexTarget, (GLeglImageOES)mImage); checkGLErrors(); while ((error = glGetError()) != GL_NO_ERROR) { ALOGE("createTextureImage error binding external texture image %p, (slot %p): %#04x", mImage, mGFXBuffer.get(), error); return false; } return true; } void Layer::destroyTextureImage() { EGLDisplay mDisplay = eglGetCurrentDisplay(); eglDestroyImageKHR(mDisplay, mImage); checkEGLErrors("eglDestroyImageKHR"); glDeleteTextures(1, &mTexName); } void Layer::setLayerAlpha(float alpha) { mAlpha = alpha; } bool Layer::setLayerTransform(uint32_t transform) { mTransform = transform; return true; } bool Layer::setLayerRect(struct LayerRect *rect, struct LayerRect *rV) { if (rect == NULL && rV == NULL) { ALOGE("The rectangle is NULL"); return false; } mRect = rect; mRV = rV; return true; } void Layer::mtxMul(float out[16], const float a[16], const float b[16]) { out[0] = a[0]*b[0] + a[4]*b[1] + a[8]*b[2] + a[12]*b[3]; out[1] = a[1]*b[0] + a[5]*b[1] + a[9]*b[2] + a[13]*b[3]; out[2] = a[2]*b[0] + a[6]*b[1] + a[10]*b[2] + a[14]*b[3]; out[3] = a[3]*b[0] + a[7]*b[1] + a[11]*b[2] + a[15]*b[3]; out[4] = a[0]*b[4] + a[4]*b[5] + a[8]*b[6] + a[12]*b[7]; out[5] = a[1]*b[4] + a[5]*b[5] + a[9]*b[6] + a[13]*b[7]; out[6] = a[2]*b[4] + a[6]*b[5] + a[10]*b[6] + a[14]*b[7]; out[7] = a[3]*b[4] + a[7]*b[5] + a[11]*b[6] + a[15]*b[7]; out[8] = a[0]*b[8] + a[4]*b[9] + a[8]*b[10] + a[12]*b[11]; out[9] = a[1]*b[8] + a[5]*b[9] + a[9]*b[10] + a[13]*b[11]; out[10] = a[2]*b[8] + a[6]*b[9] + a[10]*b[10] + a[14]*b[11]; out[11] = a[3]*b[8] + a[7]*b[9] + a[11]*b[10] + a[15]*b[11]; out[12] = a[0]*b[12] + a[4]*b[13] + a[8]*b[14] + a[12]*b[15]; out[13] = a[1]*b[12] + a[5]*b[13] + a[9]*b[14] + a[13]*b[15]; out[14] = a[2]*b[12] + a[6]*b[13] + a[10]*b[14] + a[14]*b[15]; out[15] = a[3]*b[12] + a[7]*b[13] + a[11]*b[14] + a[15]*b[15]; } void Layer::computeTransformMatrix() { float xform[16]; bool mFilteringEnabled = true; float tx = 0.0f, ty = 0.0f, sx = 1.0f, sy = 1.0f; sp<GraphicBuffer>& buf(mGFXBuffer); float bufferWidth = buf->getWidth(); float bufferHeight = buf->getHeight(); memcpy(xform, mtxIdentity, sizeof(xform)); if (mTransform & NATIVE_WINDOW_TRANSFORM_FLIP_H) { float result[16]; mtxMul(result, xform, mtxFlipH); memcpy(xform, result, sizeof(xform)); } if (mTransform & NATIVE_WINDOW_TRANSFORM_FLIP_V) { float result[16]; mtxMul(result, xform, mtxFlipV); memcpy(xform, result, sizeof(xform)); } if (mTransform & NATIVE_WINDOW_TRANSFORM_ROT_90) { float result[16]; mtxMul(result, xform, mtxRot90); memcpy(xform, result, sizeof(xform)); } if (mRect) { float shrinkAmount = 0.0f; if (mFilteringEnabled) { /* In order to prevent bilinear sampling beyond the edge of the * crop rectangle we may need to shrink it by 2 texels in each * dimension. Normally this would just need to take 1/2 a texel * off each end, but because the chroma channels of YUV420 images * are subsampled we may need to shrink the crop region by a whole * texel on each side. * */ switch (buf->getPixelFormat()) { case PIXEL_FORMAT_RGBA_8888: case PIXEL_FORMAT_RGBX_8888: case PIXEL_FORMAT_RGB_888: case PIXEL_FORMAT_RGB_565: case PIXEL_FORMAT_BGRA_8888: case PIXEL_FORMAT_RGBA_5551: case PIXEL_FORMAT_RGBA_4444: // We know there's no subsampling of any channels, so we // only need to shrink by a half a pixel. shrinkAmount = 0.5; default: // If we don't recognize the format, we must assume the // worst case (that we care about), which is YUV420. shrinkAmount = 1.0; } } // Only shrink the dimensions that are not the size of the buffer. int width = mRect->right - mRect->left; int height = mRect->bottom - mRect->top; if ( width < bufferWidth) { tx = ((float)(mRect->left) + shrinkAmount) / bufferWidth; sx = ((float)(width) - (2.0f * shrinkAmount)) / bufferWidth; } if (height < bufferHeight) { ty = ((float)(bufferHeight - mRect->bottom) + shrinkAmount) / bufferHeight; sy = ((float)(height) - (2.0f * shrinkAmount)) / bufferHeight; } } float crop[16] = { sx, 0, 0, 0, 0, sy, 0, 0, 0, 0, 1, 0, tx, ty, 0, 1, }; float mtxBeforeFlipV[16]; mtxMul(mtxBeforeFlipV, crop, xform); // We expects the top of its window textures to be at a Y // coordinate of 0, so SurfaceTexture must behave the same way. We don't // want to expose this to applications, however, so we must add an // additional vertical flip to the transform after all the other transforms. mtxMul(mCurrentTransformMatrix, mtxFlipV, mtxBeforeFlipV); } bool Layer::prepareDrawData() { sp<GraphicBuffer>& buf(mGFXBuffer); GLfloat left = GLfloat(mRect->left) / GLfloat(mRect->right); GLfloat top = GLfloat(mRect->top) / GLfloat(mRect->bottom); GLfloat right = GLfloat(mRect->right) / GLfloat(mRect->right); GLfloat bottom = GLfloat(mRect->bottom) / GLfloat(mRect->bottom); /* * The video layer height maybe loss some accuracy * when GPU transform float number into int number. * Here, just Compensate for the loss. * */ int format = buf->getPixelFormat(); if ((mTransform == 0 ) && (format == HAL_PIXEL_FORMAT_YCbCr_420_SP || format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12)) { float height = float(mRect->bottom - mRect->top); float pixelOffset = 1.0 / height; top -= pixelOffset; bottom += pixelOffset; } /* * Some RGB layer is cropped, it will cause RGB layer display abnormal. * Here, just correct the RGB layer to right region. * */ if ((mRect->top > 0) && (format == HAL_PIXEL_FORMAT_RGBA_8888 || format == HAL_PIXEL_FORMAT_RGBX_8888 || format == HAL_PIXEL_FORMAT_RGB_565)) { float pixelOffset = 1.0 / float(mRect->bottom); top -= float(mRect->top) * pixelOffset; } texCoord[0].u = texCoord[1].u = left; texCoord[0].v = texCoord[3].v = top; texCoord[1].v = texCoord[2].v = bottom; texCoord[2].u = texCoord[3].u = right; for (int i = 0; i < 4; i++) { texCoord[i].v = 1.0f - texCoord[i].v; } /* * Caculate the vertex coordinate * */ vertices[0] = (GLfloat)mRV->left; vertices[1] = (GLfloat)mRV->top; vertices[2] = (GLfloat)mRV->left; vertices[3] = (GLfloat)mRV->bottom; vertices[4] = (GLfloat)mRV->right; vertices[5] = (GLfloat)mRV->bottom; vertices[6] = (GLfloat)mRV->right; vertices[7] = (GLfloat)mRV->top; unsigned int fb_height = mComposer->getDisplayPlane()->getHeight(); vertices[1] = (GLfloat)fb_height - vertices[1]; vertices[3] = (GLfloat)fb_height - vertices[3]; vertices[5] = (GLfloat)fb_height - vertices[5]; vertices[7] = (GLfloat)fb_height - vertices[7]; /* * Here, some region from SurfacFlinger have exceeded the screen * size. So we remove these abnormal region, it will reduce some * garbage when rotating the phone. * Temporary disable this parameters check * */ /*if (mRV->left < 0 || mRV->left > mFBWidth || mRV->right == mRV->bottom || mRV->top < 0 || mRV->top > mFBHeight || mRV->right > mFBWidth || mRV->bottom > mFBHeight) { mSkipFlag = true; memset(vertices, 0, sizeof(vertices)); }*/ return true; } int Layer::draw() { int status = -1; /* mSkipFlag = false; */ prepareDrawData(); /* if (mSkipFlag) { ALOGD("Skip this frame"); mSkipFlag = false; return 0; }*/ glTexParameterx(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterx(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterx(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterx(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); computeTransformMatrix(); glMatrixMode(GL_TEXTURE); glLoadMatrixf(mCurrentTransformMatrix); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glDisable(GL_TEXTURE_2D); glEnable(GL_TEXTURE_EXTERNAL_OES); #ifdef PRIMARYPLANE_USE_RGB565 glEnable(GL_DITHER); #endif /* * Start call openGLES Draw list here * By default, we use Premultiplied Alpha * */ GLenum src = mPremultipliedAlpha ? GL_ONE : GL_SRC_ALPHA; //if (mAlpha < 0xFF) //{ // const GLfloat alpha = (GLfloat)mAlpha * (1.0f/255.0f); // if (mPremultipliedAlpha) // { // glColor4f(alpha, alpha, alpha, alpha); // } // else // { // glColor4f(1, 1, 1, alpha); // } // glEnable(GL_BLEND); // glBlendFunc(src, GL_ONE_MINUS_SRC_ALPHA); // glTexEnvx(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); //} //else { glColor4f(1, 1, 1, 1); glTexEnvx(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); glEnable(GL_BLEND); glBlendFunc(src, GL_ONE_MINUS_SRC_ALPHA); } glEnableClientState(GL_TEXTURE_COORD_ARRAY); glEnableClientState(GL_VERTEX_ARRAY); glVertexPointer(2, GL_FLOAT, 0, vertices); glTexCoordPointer(2, GL_FLOAT, 0, texCoord); GL_CHECK(glDrawArrays(GL_TRIANGLE_FAN, 0, mNumVertices));<|fim▁hole|> #ifdef PRIMARYPLANE_USE_RGB565 glDisable(GL_DITHER); #endif glDisable(GL_TEXTURE_EXTERNAL_OES); glDisable(GL_TEXTURE_2D); status = 0; return status; } };<|fim▁end|>
glDisableClientState(GL_TEXTURE_COORD_ARRAY); glDisable(GL_BLEND);
<|file_name|>robot.rs<|end_file_name|><|fim▁begin|>use wpilib::wpilib_hal::*; /// The base class from which all robots should be derived. /// /// # Usage /// /// ``` /// struct TestRobot {}; /// /// impl Robot for TestRobot { /// fn new() -> TestRobot { /// TestRobot{} /// } /// /// fn run(self) { /// // Do something... /// } /// } /// /// fn main() { /// TestRobot::main(); /// } /// ``` pub trait Robot: Sized { /// Run the robot class. This will be called once, at the beginning of the program, after /// initialization. fn run(self); /// Create an instance of the robot class. fn new() -> Self; /// Run the robot statically. fn main() {<|fim▁hole|> let status = HAL_Initialize(0); if status != 1 { panic!("WPILib HAL failed to initialize!"); } } let robot = Self::new(); robot.run(); } }<|fim▁end|>
// Initialize HAL unsafe {
<|file_name|>metainfo_movie.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals, division, absolute_import import logging from builtins import * # noqa pylint: disable=unused-import, redefined-builtin from flexget import plugin from flexget.event import event try: # NOTE: Importing other plugins is discouraged! from flexget.components.parsing.parsers import parser_common as plugin_parser_common except ImportError: raise plugin.DependencyError(issued_by=__name__, missing='parser_common') log = logging.getLogger('metainfo_movie') class MetainfoMovie(object): """ Check if entry appears to be a movie, and populate movie info if so. """ schema = {'type': 'boolean'} def on_task_metainfo(self, task, config): # Don't run if we are disabled if config is False: return for entry in task.entries: # If movie parser already parsed this, don't touch it. if entry.get('id'): continue self.guess_entry(entry) @staticmethod def guess_entry(entry): """ Populates movie_* fields for entries that are successfully parsed. :param entry: Entry that's being processed :return: True for successful parse """ if entry.get('movie_guessed'): # Return true if we already parsed this return True parser = plugin.get('parsing', 'metainfo_movie').parse_movie(data=entry['title']) if parser and parser.valid: parser.name = plugin_parser_common.normalize_name( plugin_parser_common.remove_dirt(parser.name) ) for field, value in parser.fields.items(): if not entry.is_lazy(field) and not entry.get(field): entry[field] = value return True return False @event('plugin.register') def register_plugin():<|fim▁hole|><|fim▁end|>
plugin.register(MetainfoMovie, 'metainfo_movie', api_ver=2)
<|file_name|>metadata.rs<|end_file_name|><|fim▁begin|>use ape; use id3; use lewton::inside_ogg::OggStreamReader; use metaflac; use mp3_duration; use regex::Regex; use std::fs; use std::path::Path; use errors::*; use utils; use utils::AudioFormat; #[derive(Debug, Clone, PartialEq)] pub struct SongTags { pub disc_number: Option<u32>, pub track_number: Option<u32>, pub title: Option<String>, pub duration: Option<u32>, pub artist: Option<String>, pub album_artist: Option<String>, pub album: Option<String>, pub year: Option<i32>, } pub fn read(path: &Path) -> Result<SongTags> { match utils::get_audio_format(path) { Some(AudioFormat::FLAC) => read_flac(path), Some(AudioFormat::MP3) => read_id3(path), Some(AudioFormat::MPC) => read_ape(path), Some(AudioFormat::OGG) => read_vorbis(path), _ => bail!("Unsupported file format for reading metadata"), } } fn read_id3(path: &Path) -> Result<SongTags> { let tag = id3::Tag::read_from_path(&path)?; let duration = mp3_duration::from_path(&path).map(|d| d.as_secs() as u32).ok(); let artist = tag.artist().map(|s| s.to_string()); let album_artist = tag.album_artist().map(|s| s.to_string()); let album = tag.album().map(|s| s.to_string()); let title = tag.title().map(|s| s.to_string()); let disc_number = tag.disc(); let track_number = tag.track(); let year = tag.year() .map(|y| y as i32) .or(tag.date_released().and_then(|d| Some(d.year))) .or(tag.date_recorded().and_then(|d| Some(d.year))); Ok(SongTags { artist: artist, album_artist: album_artist, album: album, title: title, duration: duration, disc_number: disc_number, track_number: track_number, year: year, }) } fn read_ape_string(item: &ape::Item) -> Option<String> { match item.value { ape::ItemValue::Text(ref s) => Some(s.clone()), _ => None, } } fn read_ape_i32(item: &ape::Item) -> Option<i32> { match item.value { ape::ItemValue::Text(ref s) => s.parse::<i32>().ok(), _ => None, } } fn read_ape_x_of_y(item: &ape::Item) -> Option<u32> { match item.value { ape::ItemValue::Text(ref s) => { let format = Regex::new(r#"^\d+"#).unwrap(); if let Some(m) = format.find(s) { s[m.start()..m.end()].parse().ok() } else { None } } _ => None, } } fn read_ape(path: &Path) -> Result<SongTags> { let tag = ape::read(path)?; let artist = tag.item("Artist").and_then(read_ape_string); let album = tag.item("Album").and_then(read_ape_string); let album_artist = tag.item("Album artist").and_then(read_ape_string); let title = tag.item("Title").and_then(read_ape_string); let year = tag.item("Year").and_then(read_ape_i32); let disc_number = tag.item("Disc").and_then(read_ape_x_of_y); let track_number = tag.item("Track").and_then(read_ape_x_of_y); Ok(SongTags { artist: artist, album_artist: album_artist, album: album, title: title, duration: None, disc_number: disc_number, track_number: track_number, year: year, }) } fn read_vorbis(path: &Path) -> Result<SongTags> { let file = fs::File::open(path)?; let source = OggStreamReader::new(file)?; let mut tags = SongTags { artist: None, album_artist: None, album: None, title: None, duration:None, disc_number: None, track_number: None, year: None, }; for (key, value) in source.comment_hdr.comment_list { match key.as_str() { "TITLE" => tags.title = Some(value), "ALBUM" => tags.album = Some(value), "ARTIST" => tags.artist = Some(value), "ALBUMARTIST" => tags.album_artist = Some(value), "TRACKNUMBER" => tags.track_number = value.parse::<u32>().ok(), "DISCNUMBER" => tags.disc_number = value.parse::<u32>().ok(), "DATE" => tags.year = value.parse::<i32>().ok(), _ => (), } } Ok(tags) } fn read_flac(path: &Path) -> Result<SongTags> {<|fim▁hole|> .get("DISCNUMBER") .and_then(|d| d[0].parse::<u32>().ok()); let year = vorbis.get("DATE").and_then(|d| d[0].parse::<i32>().ok()); let streaminfo = tag.get_blocks(metaflac::BlockType::StreamInfo); let duration = match streaminfo.first() { Some(&&metaflac::Block::StreamInfo(ref s)) => Some((s.total_samples as u32 / s.sample_rate) as u32), _ => None }; Ok(SongTags { artist: vorbis.artist().map(|v| v[0].clone()), album_artist: vorbis.album_artist().map(|v| v[0].clone()), album: vorbis.album().map(|v| v[0].clone()), title: vorbis.title().map(|v| v[0].clone()), duration: duration, disc_number: disc_number, track_number: vorbis.track(), year: year, }) } #[test] fn test_read_metadata() { let sample_tags = SongTags { disc_number: Some(3), track_number: Some(1), title: Some("TEST TITLE".into()), artist: Some("TEST ARTIST".into()), album_artist: Some("TEST ALBUM ARTIST".into()), album: Some("TEST ALBUM".into()), duration: None, year: Some(2016), }; let flac_sample_tag = SongTags {duration: Some(0), ..sample_tags.clone()}; let mp3_sample_tag = SongTags {duration: Some(0), ..sample_tags.clone()}; assert_eq!(read(Path::new("test/sample.mp3")).unwrap(), mp3_sample_tag); assert_eq!(read(Path::new("test/sample.ogg")).unwrap(), sample_tags); assert_eq!(read(Path::new("test/sample.flac")).unwrap(), flac_sample_tag); }<|fim▁end|>
let tag = metaflac::Tag::read_from_path(path)?; let vorbis = tag.vorbis_comments().ok_or("Missing Vorbis comments")?; let disc_number = vorbis
<|file_name|>forms.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import logging import django from django.conf import settings from django.utils import html from django.utils.translation import ugettext_lazy as _ from django.views.decorators.debug import sensitive_variables # noqa from oslo_utils import strutils import six from horizon import exceptions from horizon import forms from horizon import messages from openstack_dashboard import api from openstack_dashboard.dashboards.project.images \ import utils as image_utils from openstack_dashboard.dashboards.project.instances \ import utils as instance_utils LOG = logging.getLogger(__name__) def create_upload_form_attributes(prefix, input_type, name): """Creates attribute dicts for the switchable upload form :type prefix: str :param prefix: prefix (environment, template) of field :type input_type: str :param input_type: field type (file, raw, url) :type name: str :param name: translated text label to display to user :rtype: dict :return: an attribute set to pass to form build """ attributes = {'class': 'switched', 'data-switch-on': prefix + 'source'} attributes['data-' + prefix + 'source-' + input_type] = name return attributes class TemplateForm(forms.SelfHandlingForm): class Meta(object): name = _('Select Template') help_text = _('Select a template to launch a stack.') # TODO(jomara) - update URL choice for template & environment files<|fim▁hole|> base_choices = [('file', _('File')), ('raw', _('Direct Input'))] url_choice = [('url', _('URL'))] attributes = {'class': 'switchable', 'data-slug': 'templatesource'} template_source = forms.ChoiceField(label=_('Template Source'), choices=base_choices + url_choice, widget=forms.Select(attrs=attributes)) attributes = create_upload_form_attributes( 'template', 'file', _('Template File')) template_upload = forms.FileField( label=_('Template File'), help_text=_('A local template to upload.'), widget=forms.FileInput(attrs=attributes), required=False) attributes = create_upload_form_attributes( 'template', 'url', _('Template URL')) template_url = forms.URLField( label=_('Template URL'), help_text=_('An external (HTTP) URL to load the template from.'), widget=forms.TextInput(attrs=attributes), required=False) attributes = create_upload_form_attributes( 'template', 'raw', _('Template Data')) template_data = forms.CharField( label=_('Template Data'), help_text=_('The raw contents of the template.'), widget=forms.widgets.Textarea(attrs=attributes), required=False) attributes = {'data-slug': 'envsource', 'class': 'switchable'} environment_source = forms.ChoiceField( label=_('Environment Source'), choices=base_choices, widget=forms.Select(attrs=attributes), required=False) attributes = create_upload_form_attributes( 'env', 'file', _('Environment File')) environment_upload = forms.FileField( label=_('Environment File'), help_text=_('A local environment to upload.'), widget=forms.FileInput(attrs=attributes), required=False) attributes = create_upload_form_attributes( 'env', 'raw', _('Environment Data')) environment_data = forms.CharField( label=_('Environment Data'), help_text=_('The raw contents of the environment file.'), widget=forms.widgets.Textarea(attrs=attributes), required=False) if django.VERSION >= (1, 9): # Note(Itxaka): On django>=1.9 Charfield has an strip option that # we need to set to False as to not hit # https://bugs.launchpad.net/python-heatclient/+bug/1546166 environment_data.strip = False template_data.strip = False def __init__(self, *args, **kwargs): self.next_view = kwargs.pop('next_view') super(TemplateForm, self).__init__(*args, **kwargs) def clean(self): cleaned = super(TemplateForm, self).clean() files = self.request.FILES self.clean_uploaded_files('template', _('template'), cleaned, files) self.clean_uploaded_files('environment', _('environment'), cleaned, files) # Validate the template and get back the params. kwargs = {} if cleaned['environment_data']: kwargs['environment'] = cleaned['environment_data'] try: files, tpl =\ api.heat.get_template_files(cleaned.get('template_data'), cleaned.get('template_url')) kwargs['files'] = files kwargs['template'] = tpl validated = api.heat.template_validate(self.request, **kwargs) cleaned['template_validate'] = validated cleaned['template_validate']['files'] = files cleaned['template_validate']['template'] = tpl except Exception as e: raise forms.ValidationError(six.text_type(e)) return cleaned def clean_uploaded_files(self, prefix, field_label, cleaned, files): """Cleans Template & Environment data from form upload. Does some of the crunchy bits for processing uploads vs raw data depending on what the user specified. Identical process for environment data & template data. :type prefix: str :param prefix: prefix (environment, template) of field :type field_label: str :param field_label: translated prefix str for messages :type input_type: dict :param prefix: existing cleaned fields from form :rtype: dict :return: cleaned dict including environment & template data """ upload_str = prefix + "_upload" data_str = prefix + "_data" url = cleaned.get(prefix + '_url') data = cleaned.get(prefix + '_data') has_upload = upload_str in files # Uploaded file handler if has_upload and not url: log_template_name = files[upload_str].name LOG.info('got upload %s' % log_template_name) tpl = files[upload_str].read() if tpl.startswith('{'): try: json.loads(tpl) except Exception as e: msg = _('There was a problem parsing the' ' %(prefix)s: %(error)s') msg = msg % {'prefix': prefix, 'error': six.text_type(e)} raise forms.ValidationError(msg) cleaned[data_str] = tpl # URL handler elif url and (has_upload or data): msg = _('Please specify a %s using only one source method.') msg = msg % field_label raise forms.ValidationError(msg) elif prefix == 'template': # Check for raw template input - blank environment allowed if not url and not data: msg = _('You must specify a template via one of the ' 'available sources.') raise forms.ValidationError(msg) def create_kwargs(self, data): kwargs = {'parameters': data['template_validate'], 'environment_data': data['environment_data']} if data.get('stack_id'): kwargs['stack_id'] = data['stack_id'] return kwargs def handle(self, request, data): kwargs = self.create_kwargs(data) # NOTE (gabriel): This is a bit of a hack, essentially rewriting this # request so that we can chain it as an input to the next view... # but hey, it totally works. request.method = 'GET' return self.next_view.as_view()(request, **kwargs) class ChangeTemplateForm(TemplateForm): class Meta(object): name = _('Edit Template') help_text = _('Select a new template to re-launch a stack.') stack_id = forms.CharField(label=_('Stack ID'), widget=forms.widgets.HiddenInput) stack_name = forms.CharField(label=_('Stack Name'), widget=forms.TextInput(attrs={'readonly': 'readonly'})) class PreviewTemplateForm(TemplateForm): class Meta(object): name = _('Preview Template') help_text = _('Select a new template to preview a stack.') class CreateStackForm(forms.SelfHandlingForm): param_prefix = '__param_' class Meta(object): name = _('Create Stack') environment_data = forms.CharField( widget=forms.widgets.HiddenInput, required=False) if django.VERSION >= (1, 9): # Note(Itxaka): On django>=1.9 Charfield has an strip option that # we need to set to False as to not hit # https://bugs.launchpad.net/python-heatclient/+bug/1546166 environment_data.strip = False parameters = forms.CharField( widget=forms.widgets.HiddenInput) stack_name = forms.RegexField( max_length=255, label=_('Stack Name'), help_text=_('Name of the stack to create.'), regex=r"^[a-zA-Z][a-zA-Z0-9_.-]*$", error_messages={'invalid': _('Name must start with a letter and may ' 'only contain letters, numbers, underscores, ' 'periods and hyphens.')}) timeout_mins = forms.IntegerField( initial=60, label=_('Creation Timeout (minutes)'), help_text=_('Stack creation timeout in minutes.')) enable_rollback = forms.BooleanField( label=_('Rollback On Failure'), help_text=_('Enable rollback on create/update failure.'), required=False) def __init__(self, *args, **kwargs): parameters = kwargs.pop('parameters') # special case: load template data from API, not passed in params if kwargs.get('validate_me'): parameters = kwargs.pop('validate_me') super(CreateStackForm, self).__init__(*args, **kwargs) if self._stack_password_enabled(): self.fields['password'] = forms.CharField( label=_('Password for user "%s"') % self.request.user.username, help_text=_('This is required for operations to be performed ' 'throughout the lifecycle of the stack'), widget=forms.PasswordInput()) self._build_parameter_fields(parameters) def _stack_password_enabled(self): stack_settings = getattr(settings, 'OPENSTACK_HEAT_STACK', {}) return stack_settings.get('enable_user_pass', True) def _build_parameter_fields(self, template_validate): self.help_text = template_validate['Description'] params = template_validate.get('Parameters', {}) if template_validate.get('ParameterGroups'): params_in_order = [] for group in template_validate['ParameterGroups']: for param in group.get('parameters', []): if param in params: params_in_order.append((param, params[param])) else: # no parameter groups, simply sorted to make the order fixed params_in_order = sorted(params.items()) for param_key, param in params_in_order: field = None field_key = self.param_prefix + param_key field_args = { 'initial': param.get('Default', None), 'label': param.get('Label', param_key), 'help_text': html.escape(param.get('Description', '')), 'required': param.get('Default', None) is None } param_type = param.get('Type', None) hidden = strutils.bool_from_string(param.get('NoEcho', 'false')) if 'CustomConstraint' in param: choices = self._populate_custom_choices( param['CustomConstraint']) field_args['choices'] = choices field = forms.ChoiceField(**field_args) elif 'AllowedValues' in param: choices = map(lambda x: (x, x), param['AllowedValues']) field_args['choices'] = choices field = forms.ChoiceField(**field_args) elif param_type == 'Json' and 'Default' in param: field_args['initial'] = json.dumps(param['Default']) field = forms.CharField(**field_args) elif param_type in ('CommaDelimitedList', 'String', 'Json'): if 'MinLength' in param: field_args['min_length'] = int(param['MinLength']) field_args['required'] = field_args['min_length'] > 0 if 'MaxLength' in param: field_args['max_length'] = int(param['MaxLength']) if hidden: field_args['widget'] = forms.PasswordInput( render_value=True) field = forms.CharField(**field_args) elif param_type == 'Number': if 'MinValue' in param: field_args['min_value'] = int(param['MinValue']) if 'MaxValue' in param: field_args['max_value'] = int(param['MaxValue']) field = forms.IntegerField(**field_args) # heat-api currently returns the boolean type in lowercase # (see https://bugs.launchpad.net/heat/+bug/1361448) # so for better compatibility both are checked here elif param_type in ('Boolean', 'boolean'): field_args['required'] = False field = forms.BooleanField(**field_args) if field: self.fields[field_key] = field @sensitive_variables('password') def handle(self, request, data): prefix_length = len(self.param_prefix) params_list = [(k[prefix_length:], v) for (k, v) in six.iteritems(data) if k.startswith(self.param_prefix)] fields = { 'stack_name': data.get('stack_name'), 'timeout_mins': data.get('timeout_mins'), 'disable_rollback': not(data.get('enable_rollback')), 'parameters': dict(params_list), 'files': json.loads(data.get('parameters')).get('files'), 'template': json.loads(data.get('parameters')).get('template') } if data.get('password'): fields['password'] = data.get('password') if data.get('environment_data'): fields['environment'] = data.get('environment_data') try: api.heat.stack_create(self.request, **fields) messages.info(request, _("Stack creation started.")) return True except Exception: exceptions.handle(request) def _populate_custom_choices(self, custom_type): if custom_type == 'neutron.network': return instance_utils.network_field_data(self.request, True) if custom_type == 'nova.keypair': return instance_utils.keypair_field_data(self.request, True) if custom_type == 'glance.image': return image_utils.image_field_data(self.request, True) if custom_type == 'nova.flavor': return instance_utils.flavor_field_data(self.request, True) return [] class EditStackForm(CreateStackForm): class Meta(object): name = _('Update Stack Parameters') stack_id = forms.CharField( label=_('Stack ID'), widget=forms.widgets.HiddenInput) stack_name = forms.CharField( label=_('Stack Name'), widget=forms.TextInput(attrs={'readonly': 'readonly'})) @sensitive_variables('password') def handle(self, request, data): prefix_length = len(self.param_prefix) params_list = [(k[prefix_length:], v) for (k, v) in six.iteritems(data) if k.startswith(self.param_prefix)] stack_id = data.get('stack_id') fields = { 'stack_name': data.get('stack_name'), 'timeout_mins': data.get('timeout_mins'), 'disable_rollback': not(data.get('enable_rollback')), 'parameters': dict(params_list), 'files': json.loads(data.get('parameters')).get('files'), 'template': json.loads(data.get('parameters')).get('template') } if data.get('password'): fields['password'] = data.get('password') if data.get('environment_data'): fields['environment'] = data.get('environment_data') try: api.heat.stack_update(self.request, stack_id=stack_id, **fields) messages.info(request, _("Stack update started.")) return True except Exception: exceptions.handle(request) class PreviewStackForm(CreateStackForm): class Meta(object): name = _('Preview Stack Parameters') def __init__(self, *args, **kwargs): self.next_view = kwargs.pop('next_view') super(CreateStackForm, self).__init__(*args, **kwargs) def handle(self, request, data): prefix_length = len(self.param_prefix) params_list = [(k[prefix_length:], v) for (k, v) in six.iteritems(data) if k.startswith(self.param_prefix)] fields = { 'stack_name': data.get('stack_name'), 'timeout_mins': data.get('timeout_mins'), 'disable_rollback': not(data.get('enable_rollback')), 'parameters': dict(params_list), 'files': json.loads(data.get('parameters')).get('files'), 'template': json.loads(data.get('parameters')).get('template') } if data.get('environment_data'): fields['environment'] = data.get('environment_data') try: stack_preview = api.heat.stack_preview(self.request, **fields) request.method = 'GET' return self.next_view.as_view()(request, stack_preview=stack_preview) except Exception: exceptions.handle(request)<|fim▁end|>
# w/ client side download when applicable
<|file_name|>credentials.go<|end_file_name|><|fim▁begin|>package awsutil import ( "fmt" "os" "sort" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/arn" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/sts" "path" ) type sessionResult struct { accountName string accountId string awsSession *session.Session regions []string err error } func getCredentialsPath() string { return getAwsPath("AWS_CREDENTIAL_FILE", "credentials") } func getConfigPath() string { return getAwsPath("AWS_CONFIG_FILE", "config") } func getAwsPath(environ, fileName string) string { value := os.Getenv(environ) if value != "" { return value } home := os.Getenv("HOME") return path.Join(home, ".aws", fileName) } func (c *CredentialsOptions) setDefaults() *CredentialsOptions { if c.CredentialsPath == "" { c.CredentialsPath = *awsCredentialsFile } if c.ConfigPath == "" { c.ConfigPath = *awsConfigFile } return c } func tryLoadCredentialsWithOptions( options *CredentialsOptions) (*CredentialsStore, map[string]error, error) { accountNames, err := listAccountNames(options) if err != nil { return nil, nil, err } cs, unloadableAccounts := createCredentials(accountNames, options) return cs, unloadableAccounts, nil } func loadCredentials() (*CredentialsStore, error) { var options CredentialsOptions cs, unloadableAccounts, err := tryLoadCredentialsWithOptions( options.setDefaults()) if err != nil { return nil, err } for _, err := range unloadableAccounts { return nil, err } return cs, nil } func createCredentials( accountNames []string, options *CredentialsOptions) ( *CredentialsStore, map[string]error) { cs := &CredentialsStore{ sessionMap: make(map[string]*session.Session), accountIdToName: make(map[string]string), accountNameToId: make(map[string]string), accountRegions: make(map[string][]string), } resultsChannel := make(chan sessionResult, len(accountNames)) for _, accountName := range accountNames { go func(accountName string) { resultsChannel <- createSession(accountName, options) }(accountName) } unloadableAccounts := make(map[string]error) for range accountNames { result := <-resultsChannel if result.err != nil { unloadableAccounts[result.accountName] = result.err } else { cs.accountNames = append(cs.accountNames, result.accountName) cs.sessionMap[result.accountName] = result.awsSession cs.accountIdToName[result.accountId] = result.accountName cs.accountNameToId[result.accountName] = result.accountId cs.accountRegions[result.accountName] = result.regions } } close(resultsChannel) sort.Strings(cs.accountNames) return cs, unloadableAccounts } func createSession( accountName string, options *CredentialsOptions) sessionResult { awsSession, err := session.NewSessionWithOptions(session.Options{ Profile: accountName, SharedConfigState: session.SharedConfigEnable, SharedConfigFiles: []string{ options.CredentialsPath, options.ConfigPath, }, }) if err != nil { return sessionResult{ err: fmt.Errorf("session.NewSessionWithOptions: %s", err), accountName: accountName, } } stsService := sts.New(awsSession) inp := &sts.GetCallerIdentityInput{} var accountId string if out, err := stsService.GetCallerIdentity(inp); err != nil { return sessionResult{ err: fmt.Errorf("sts.GetCallerIdentity: %s", err), accountName: accountName, } } else { if arnV, err := arn.Parse(aws.StringValue(out.Arn)); err != nil { return sessionResult{err: err, accountName: accountName}<|fim▁hole|> } } regions, err := listRegions(CreateService(awsSession, "us-east-1")) if err != nil { // Try the ec2::DescribeRegions call in other regions before giving // up and reporting the error. We may need to add to this list. otherRegions := []string{"cn-north-1"} for _, otherRegion := range otherRegions { regions, err := listRegions(CreateService(awsSession, otherRegion)) if err == nil { return sessionResult{ accountName: accountName, accountId: accountId, awsSession: awsSession, regions: regions, } } } // If no success with other regions return the original error return sessionResult{err: err, accountName: accountName} } return sessionResult{ accountName: accountName, accountId: accountId, awsSession: awsSession, regions: regions, } }<|fim▁end|>
} else { accountId = arnV.AccountID
<|file_name|>issue-64559.rs<|end_file_name|><|fim▁begin|>fn main() { let orig = vec![true]; for _val in orig {} let _closure = || orig;<|fim▁hole|><|fim▁end|>
//~^ ERROR use of moved value: `orig` }
<|file_name|>color.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use azure::AzFloat; use azure::azure::AzColor; #[inline]<|fim▁hole|>pub fn new(r: AzFloat, g: AzFloat, b: AzFloat, a: AzFloat) -> AzColor { AzColor { r: r, g: g, b: b, a: a } } #[inline] pub fn rgb(r: u8, g: u8, b: u8) -> AzColor { AzColor { r: (r as AzFloat) / (255.0 as AzFloat), g: (g as AzFloat) / (255.0 as AzFloat), b: (b as AzFloat) / (255.0 as AzFloat), a: 1.0 as AzFloat } } #[inline] pub fn rgba(r: AzFloat, g: AzFloat, b: AzFloat, a: AzFloat) -> AzColor { AzColor { r: r, g: g, b: b, a: a } } #[inline] pub fn black() -> AzColor { AzColor { r: 0.0, g: 0.0, b: 0.0, a: 1.0 } } #[inline] pub fn transparent() -> AzColor { AzColor { r: 0.0, g: 0.0, b: 0.0, a: 0.0 } } #[inline] pub fn white() -> AzColor { AzColor { r: 1.0, g: 1.0, b: 1.0, a: 1.0 } }<|fim▁end|>
<|file_name|>base.py<|end_file_name|><|fim▁begin|># Copyright (C) 2014 Andrey Antukh <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import abc import importlib from django.core.exceptions import ImproperlyConfigured from django.conf import settings class BaseEventsPushBackend(object, metaclass=abc.ABCMeta): @abc.abstractmethod def emit_event(self, message:str, *, routing_key:str, channel:str="events"): pass def load_class(path): """ Load class from path. """ mod_name, klass_name = path.rsplit('.', 1) try: mod = importlib.import_module(mod_name) except AttributeError as e: raise ImproperlyConfigured('Error importing {0}: "{1}"'.format(mod_name, e)) try: klass = getattr(mod, klass_name) except AttributeError: raise ImproperlyConfigured('Module "{0}" does not define a "{1}" class'.format(mod_name, klass_name)) return klass<|fim▁hole|> if path is None: path = getattr(settings, "EVENTS_PUSH_BACKEND", None) if path is None: raise ImproperlyConfigured("Events push system not configured") if options is None: options = getattr(settings, "EVENTS_PUSH_BACKEND_OPTIONS", {}) cls = load_class(path) return cls(**options)<|fim▁end|>
def get_events_backend(path:str=None, options:dict=None):
<|file_name|>regular-modules.rs<|end_file_name|><|fim▁begin|>// We specify -C incremental here because we want to test the partitioning for // incremental compilation // compile-flags:-Zprint-mono-items=eager -Cincremental=tmp/partitioning-tests/regular-modules #![allow(dead_code)] #![crate_type="lib"]<|fim▁hole|> //~ MONO_ITEM fn bar @@ regular_modules[Internal] fn bar() {} //~ MONO_ITEM static BAZ @@ regular_modules[Internal] static BAZ: u64 = 0; mod mod1 { //~ MONO_ITEM fn mod1::foo @@ regular_modules-mod1[Internal] fn foo() {} //~ MONO_ITEM fn mod1::bar @@ regular_modules-mod1[Internal] fn bar() {} //~ MONO_ITEM static mod1::BAZ @@ regular_modules-mod1[Internal] static BAZ: u64 = 0; mod mod1 { //~ MONO_ITEM fn mod1::mod1::foo @@ regular_modules-mod1-mod1[Internal] fn foo() {} //~ MONO_ITEM fn mod1::mod1::bar @@ regular_modules-mod1-mod1[Internal] fn bar() {} //~ MONO_ITEM static mod1::mod1::BAZ @@ regular_modules-mod1-mod1[Internal] static BAZ: u64 = 0; } mod mod2 { //~ MONO_ITEM fn mod1::mod2::foo @@ regular_modules-mod1-mod2[Internal] fn foo() {} //~ MONO_ITEM fn mod1::mod2::bar @@ regular_modules-mod1-mod2[Internal] fn bar() {} //~ MONO_ITEM static mod1::mod2::BAZ @@ regular_modules-mod1-mod2[Internal] static BAZ: u64 = 0; } } mod mod2 { //~ MONO_ITEM fn mod2::foo @@ regular_modules-mod2[Internal] fn foo() {} //~ MONO_ITEM fn mod2::bar @@ regular_modules-mod2[Internal] fn bar() {} //~ MONO_ITEM static mod2::BAZ @@ regular_modules-mod2[Internal] static BAZ: u64 = 0; mod mod1 { //~ MONO_ITEM fn mod2::mod1::foo @@ regular_modules-mod2-mod1[Internal] fn foo() {} //~ MONO_ITEM fn mod2::mod1::bar @@ regular_modules-mod2-mod1[Internal] fn bar() {} //~ MONO_ITEM static mod2::mod1::BAZ @@ regular_modules-mod2-mod1[Internal] static BAZ: u64 = 0; } mod mod2 { //~ MONO_ITEM fn mod2::mod2::foo @@ regular_modules-mod2-mod2[Internal] fn foo() {} //~ MONO_ITEM fn mod2::mod2::bar @@ regular_modules-mod2-mod2[Internal] fn bar() {} //~ MONO_ITEM static mod2::mod2::BAZ @@ regular_modules-mod2-mod2[Internal] static BAZ: u64 = 0; } }<|fim▁end|>
//~ MONO_ITEM fn foo @@ regular_modules[Internal] fn foo() {}
<|file_name|>Box.stories.tsx<|end_file_name|><|fim▁begin|>/* eslint-disable import/no-extraneous-dependencies */ import React from 'react'; import { storiesOf } from '@storybook/react-native';<|fim▁hole|> import CenteredContent from '$shared/CenteredContent'; import { Box } from '$shared/primitives'; storiesOf('Box', module) .add('With a size, color and radiuses', () => ( <CenteredContent> <Box size={100} bg="grey" borderRadius="medium" /> </CenteredContent> )) .add('With border props and full width', () => ( <CenteredContent> <Box width="100%" height={50} bg="grey" borderBottomColor="green" borderBottomWidth={4} /> </CenteredContent> )) .add('With position props and odd size', () => ( <CenteredContent> <Box width={100} height={50} bg="grey" top={125} left={20} mb={50} /> </CenteredContent> ));<|fim▁end|>
<|file_name|>0002_auto_20170919_0319.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.11.4 on 2017-09-19 03:19 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion<|fim▁hole|> class Migration(migrations.Migration): dependencies = [ ('library', '0001_initial'), ] operations = [ migrations.CreateModel( name='AuthorRating', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('rating', models.CharField(choices=[('1', '1 Star'), ('2', '2 Stars'), ('3', '3 Stars'), ('4', '4 Stars')], max_length=1, verbose_name='Rating')), ], options={ 'ordering': ('author',), 'verbose_name': 'Author Ratings', }, ), migrations.CreateModel( name='BookRating', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('rating', models.CharField(choices=[('1', '1 Star'), ('2', '2 Stars'), ('3', '3 Stars'), ('4', '4 Stars')], max_length=1, verbose_name='Rating')), ], options={ 'ordering': ('book',), 'verbose_name': 'Book Ratings', }, ), migrations.AlterModelOptions( name='author', options={'ordering': ('name',), 'verbose_name': 'Author', 'verbose_name_plural': 'Authors'}, ), migrations.AlterModelOptions( name='book', options={'ordering': ('title',), 'verbose_name': 'Book', 'verbose_name_plural': 'Books'}, ), migrations.AlterField( model_name='author', name='age', field=models.SmallIntegerField(blank=True, null=True, verbose_name='Age'), ), migrations.AlterField( model_name='author', name='name', field=models.CharField(max_length=128, verbose_name='Name'), ), migrations.AlterField( model_name='author', name='penname', field=models.CharField(max_length=128, verbose_name='Pen Name'), ), migrations.AlterField( model_name='book', name='title', field=models.CharField(max_length=128, verbose_name='Title'), ), migrations.AddField( model_name='bookrating', name='book', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='library.Book'), ), migrations.AddField( model_name='authorrating', name='author', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='library.Author'), ), ]<|fim▁end|>
<|file_name|>qt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # vim: set fileencoding=utf-8 : #! This file is a literate Python program. You can compile the documentation #! using mylit (http://pypi.python.org/pypi/mylit/). ## title = "glitter Example: Qt" ## stylesheet = "pygments_style.css" # <h1><i>glitter</i> Example: Qt</h1> # <h2>Summary</h2> # This program will show a Qt application to display meshes. # <img src="qt.png"> # <h2>Front matter</h2> # <h3>Module docstring</h3> # The module docstring is used as a description of this example in the # generated documentation: """Example for Qt (PySide) interaction. @author: Stephan Wenger @date: 2012-03-16 """ # <h3>Imports</h3> from __future__ import unicode_literals # Some math functions are required for mouse interaction: from math import sqrt, exp # We use <a href="http://www.pyside.org/">PySide</a> for <a # href="http://qt.nokia.com/">Qt</a> interaction: from PySide import QtCore, QtGui # Note that we did not import the <code>QtOpenGL</code> package. Instead, we # will use a Qt OpenGL widget that doubles as a <i>glitter</i> # <code>Context</code>: from glitter.contexts.qt import QtWidget # If you'd rather use an existing Qt OpenGL context, # <code>QtContextWrapper</code> provides an alternative solution. # Since there are no more build-in matrices in the OpenGL core profile, we use # <i>glitter</i>'s matrix constructors. <i>glitter</i> also provides a method # for loading meshes from files that we will use: from glitter import rotation_matrix, scale_matrix, identity_matrix, load_mesh # <h2>Qt GUI</h2> # <h3>Qt OpenGL Widget</h3> <|fim▁hole|># This not only provides us with a convenient interface for changing OpenGL # state, it also ensures that the correct context is made active whenever # OpenGL commands are issued. class Canvas(QtWidget): # <h4>Initialization</h4> # The canvas will store a mesh to display, a modelview matrix that can be # changed by moving the mouse, and a helper variable for the mouse # interaction: def __init__(self, parent=None): super(Canvas, self).__init__(parent) self.mesh = None self.modelview_matrix = identity_matrix() self.lastPos = QtCore.QPoint() # The size of the canvas is specified by overriding <code>sizeHint()</code>: def sizeHint(self): return QtCore.QSize(512, 512) # When the user requests loading a mesh via the menu system, we create a # <code>Pipeline</code> containing the mesh vertices and an appropriate # shader, all with a single call to <code>load_mesh()</code> from the # <code>glitter.convenience</code> module. Also, we can pass additional # parameters to the <code>Pipeline</code> constructor. In this case, we # want depth testing to be enabled whenever the pipeline is executed, so we # pass <code>depth_text=True</code>: def loadMesh(self, filename): self.mesh = load_mesh(filename, context=self, depth_test=True) # Note that we pass the canvas as the <code>context</code> parameter to # make sure the pipeline is created in the correct context. This is # necessary because in a real-world application, we cannot be sure that # the canvas context is currently active. # When the mesh is loaded, we ask Qt to redraw the OpenGL screen: self.updateGL() # <h4>Mouse interaction</h4> # The viewpoint can be changed by moving the mouse with a button pressed: # left button rotates, right button zooms. # When a mouse button is pressed, we store the position where it was # pressed: def mousePressEvent(self, event): self.lastPos = QtCore.QPoint(event.pos()) # When the mouse is moved, we take action according to the type of the # pressed button: def mouseMoveEvent(self, event): # First, we compute the mouse movement since the last time we processed # a mouse event. If there was no movement (which does happen from time # to time), we exit early to avoid division by zero later on: dx, dy = event.x() - self.lastPos.x(), event.y() - self.lastPos.y() if dx == dy == 0: return # If the left mouse button is down, we rotate the modelview matrix # about an axis within the image plane that is perpendicular to the # direction of mouse movement. The amount of rotation is proportional # to the distance travelled. Then we cause a screen redraw by calling # <code>updateGL()</code>. elif event.buttons() & QtCore.Qt.LeftButton: self.modelview_matrix *= rotation_matrix(-sqrt(dx ** 2 + dy ** 2), (dy, dx, 0.0), degrees=True) self.updateGL() # If the right mouse button is down, we scale the modelview matrix # dependent on the vertical mouse movement. Again, a screen redraw is # triggered by calling <code>updateGL()</code>. elif event.buttons() & QtCore.Qt.RightButton: self.modelview_matrix *= scale_matrix(exp(-0.01 * dy)) self.updateGL() # Finally, the mouse position is stored for processing of the following # mouse event. self.lastPos = QtCore.QPoint(event.pos()) # <h4>OpenGL interaction</h4> # Whenever the canvas is resized (also on creation), we set the viewport # (which is actually a <i>glitter</i> <code>Context</code> property) to the # full window and change the projection matrix such that it encompasses the # whole range from -1 to +1: def resizeGL(self, w, h): self.viewport = (0, 0, w, h) self.projection_matrix = scale_matrix(h / float(w) if w > h else 1.0, w / float(h) if h > w else 1.0, 0.4) # The <code>paintGL()</code> method is called by Qt whenever the canvas # needs to be redrawn: def paintGL(self): # First, we clear the canvas using <code>Context.clear()</code>. self.clear() # Then, if a mesh pipeline has been loaded, we draw it with the # <code>modelview_matrix</code> uniform set. The pipeline binds the # vertex array and the shader, sets the depth test we requested # earlier, draws the vertex array, and resets all modified state: if self.mesh is not None: self.mesh.draw_with(modelview_matrix=self.projection_matrix * self.modelview_matrix) # Finally, when the user requests resetting the view via the menu system, # we create a clean modelview matrix and update the screen: def resetView(self): self.modelview_matrix = identity_matrix() self.updateGL() # <h3>Qt Main Window</h3> # The OpenGL widget will be displayed within a <code>QMainWindow</code> that # provides menus, keyboard shortcuts, and many other amenities: class MainWindow(QtGui.QMainWindow): # <h4>Initialization</h4> def __init__(self, parent=None): super(MainWindow, self).__init__(parent) # First, we create an instance of the <code>Canvas</code> class we # defined previously and make it the main widget in the window: self.canvas = Canvas() self.setCentralWidget(self.canvas) # Then, we create menus and keyboard shortcuts for opening meshes, # resetting the view, and exiting the program: self.fileMenu = self.menuBar().addMenu("&File") self.fileOpenMeshAction = QtGui.QAction("&Open Mesh\u2026", self) self.fileOpenMeshAction.setShortcut(QtGui.QKeySequence(QtCore.Qt.Key_O)) self.fileOpenMeshAction.triggered.connect(self.fileOpenMesh) self.fileMenu.addAction(self.fileOpenMeshAction) self.fileMenu.addSeparator() self.fileQuitAction = QtGui.QAction("&Quit", self) self.fileQuitAction.setShortcut(QtGui.QKeySequence(QtCore.Qt.Key_Escape)) self.fileQuitAction.triggered.connect(self.close) self.fileMenu.addAction(self.fileQuitAction) self.viewMenu = self.menuBar().addMenu("&View") self.viewResetAction = QtGui.QAction("&Reset", self) self.viewResetAction.setShortcut(QtGui.QKeySequence(QtCore.Qt.Key_R)) self.viewResetAction.triggered.connect(self.canvas.resetView) self.viewMenu.addAction(self.viewResetAction) # <h4>File loading</h4> # When the file open action is triggered via the menu or a keyboard # shortcut, we display a dialog for selecting a file. If a file is # selected, we tell the canvas to load a mesh from it. def fileOpenMesh(self, filename=None): if filename is None: dialog = QtGui.QFileDialog(self, "Open Mesh") dialog.setNameFilters(["%s files (*.%s)" % (x.upper(), x) for x in load_mesh.supported_formats]) dialog.setViewMode(QtGui.QFileDialog.Detail) dialog.setAcceptMode(QtGui.QFileDialog.AcceptOpen) dialog.setFileMode(QtGui.QFileDialog.ExistingFile) if dialog.exec_(): filename = dialog.selectedFiles()[0] if filename is not None: self.canvas.loadMesh(filename) # <h2>Main section</h2> # Finally, if this program is being run from the command line, we create a # <code>QApplication</code>, show a <code>MainWindow</code> instance and run # the Qt main loop. if __name__ == "__main__": import sys app = QtGui.QApplication(sys.argv) mainWindow = MainWindow() mainWindow.show() sys.exit(app.exec_()) # When the main window is closed, the application will exit cleanly.<|fim▁end|>
# OpenGL rendering in Qt is through an OpenGL widget. Instead of subclassing # <code>QGLWidget</code> directly, we inherit from <i>glitter</i>'s # <code>QtWidget</code> which acts as a <i>glitter</i> <code>Context</code>.
<|file_name|>CursorUtils.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2013 nohana, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amalgam.database; import android.annotation.TargetApi; import android.database.Cursor; import android.os.Build; /** * Utility for the {@link android.database.Cursor} */ @SuppressWarnings("unused") // public APIs public final class CursorUtils { private static final int TRUE = 1; private CursorUtils() { throw new AssertionError(); } /** * Close with null checks. * @param cursor to close. */ public static void close(Cursor cursor) { if (cursor == null) { return; } cursor.close();<|fim▁hole|> /** * Read the boolean data for the column. * @see android.database.Cursor#getColumnIndex(String). * @param cursor the cursor. * @param columnName the column name. * @return the boolean value. */ public static boolean getBoolean(Cursor cursor, String columnName) { return cursor != null && cursor.getInt(cursor.getColumnIndex(columnName)) == TRUE; } /** * Read the int data for the column. * @see android.database.Cursor#getInt(int). * @see android.database.Cursor#getColumnIndex(String). * @param cursor the cursor. * @param columnName the column name. * @return the int value. */ public static int getInt(Cursor cursor, String columnName) { if (cursor == null) { return -1; } return cursor.getInt(cursor.getColumnIndex(columnName)); } /** * Read the String data for the column. * @see android.database.Cursor#getString(int). * @see android.database.Cursor#getColumnIndex(String). * @param cursor the cursor. * @param columnName the column name. * @return the String value. */ public static String getString(Cursor cursor, String columnName) { if (cursor == null) { return null; } return cursor.getString(cursor.getColumnIndex(columnName)); } /** * Read the short data for the column. * @see android.database.Cursor#getShort(int). * @see android.database.Cursor#getColumnIndex(String). * @param cursor the cursor. * @param columnName the column name. * @return the short value. */ public static short getShort(Cursor cursor, String columnName) { if (cursor == null) { return -1; } return cursor.getShort(cursor.getColumnIndex(columnName)); } /** * Read the long data for the column. * @see android.database.Cursor#getLong(int). * @see android.database.Cursor#getColumnIndex(String). * @param cursor the cursor. * @param columnName the column name. * @return the long value. */ public static long getLong(Cursor cursor, String columnName) { if (cursor == null) { return -1; } return cursor.getLong(cursor.getColumnIndex(columnName)); } /** * Read the double data for the column. * @see android.database.Cursor#getDouble(int). * @see android.database.Cursor#getColumnIndex(String). * @param cursor the cursor. * @param columnName the column name. * @return the double value. */ public static double getDouble(Cursor cursor, String columnName) { if (cursor == null) { return -1; } return cursor.getDouble(cursor.getColumnIndex(columnName)); } /** * Read the float data for the column. * @see android.database.Cursor#getFloat(int). * @see android.database.Cursor#getColumnIndex(String). * @param cursor the cursor. * @param columnName the column name. * @return the float value. */ public static float getFloat(Cursor cursor, String columnName) { if (cursor == null) { return -1; } return cursor.getFloat(cursor.getColumnIndex(columnName)); } /** * Read the blob data for the column. * @see android.database.Cursor#getBlob(int). * @see android.database.Cursor#getColumnIndex(String). * @param cursor the cursor. * @param columnName the column name. * @return the blob value. */ public static byte[] getBlob(Cursor cursor, String columnName) { if (cursor == null) { return null; } return cursor.getBlob(cursor.getColumnIndex(columnName)); } /** * Checks the type of the column. * @see android.database.Cursor#getType(int). * @see android.database.Cursor#getColumnIndex(String). * @param cursor the cursor. * @param columnName the column name. * @return the type of the column. */ @TargetApi(Build.VERSION_CODES.HONEYCOMB) public static int getType(Cursor cursor, String columnName) { if (cursor == null) { return Cursor.FIELD_TYPE_NULL; } return cursor.getType(cursor.getColumnIndex(columnName)); } /** * Checks if the column value is null or not. * @see android.database.Cursor#isNull(int). * @see android.database.Cursor#getColumnIndex(String). * @param cursor the cursor. * @param columnName the column name. * @return true if the column value is null. */ public static boolean isNull(Cursor cursor, String columnName) { return cursor != null && cursor.isNull(cursor.getColumnIndex(columnName)); } }<|fim▁end|>
}
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup, Extension, Command from distutils.command.build import build from distutils.command.build_ext import build_ext from distutils.command.config import config from distutils.msvccompiler import MSVCCompiler from distutils import sysconfig import string import sys mkobjs = ['column', 'custom', 'derived', 'fileio', 'field', 'format', 'handler', 'persist', 'remap', 'std', 'store', 'string', 'table', 'univ', 'view', 'viewx'] class config_mk(config): def run(self): # work around bug in Python 2.2-supplied check_header, fixed # in Python 2.3; body needs to be a valid, non-zero-length string if self.try_cpp(body="/* body */", headers=['unicodeobject.h'], include_dirs=[sysconfig.get_python_inc()]): build = self.distribution.reinitialize_command('build_ext') build.define = 'HAVE_UNICODEOBJECT_H' # trust that mk4.h provides the correct HAVE_LONG_LONG value, # since Mk4py doesn't #include "config.h" class build_mk(build): def initialize_options(self): # build in builds directory by default, unless specified otherwise build.initialize_options(self) self.build_base = '../builds' class build_mkext(build_ext): def finalize_options(self): self.run_command('config') # force use of C++ compiler (helps on some platforms) import os cc = os.environ.get('CXX', sysconfig.get_config_var('CXX')) if not cc: cc = sysconfig.get_config_var('CCC') # Python 1.5.2 if cc: os.environ['CC'] = cc build_ext.finalize_options(self) def build_extension(self, ext): # work around linker problem with MacPython 2.3 if sys.platform == 'darwin': try: self.compiler.linker_so.remove("-Wl,-x") except: pass # work around linker problem with Linux, Python 2.2 and earlier: # despite setting $CC above, still uses Python compiler if sys.platform == 'linux2': try: ext.libraries.append("stdc++") except: pass if ext.name == "Mk4py": if isinstance(self.compiler, MSVCCompiler): suffix = '.obj' if self.debug: prefix = '../builds/msvc60/mklib/Debug/' else: prefix = '../builds/msvc60/mklib/Release/' else: suffix = '.o' prefix = '../builds/' for i in range(len(ext.extra_objects)): nm = ext.extra_objects[i] if nm in mkobjs: if string.find(nm, '.') == -1: nm = nm + suffix nm = prefix + nm ext.extra_objects[i] = nm build_ext.build_extension(self, ext) class test_regrtest(Command): # Original version of this class posted # by Berthold Hoellmann to [email protected] description = "test the distribution prior to install" user_options = [ ('build-base=', 'b', "base build directory (default: 'build.build-base')"), ('build-purelib=', None, "build directory for platform-neutral distributions"), ('build-platlib=', None, "build directory for platform-specific distributions"), ('build-lib=', None, "build directory for all distribution (defaults to either " + "build-purelib or build-platlib"), ('test-dir=', None, "directory that contains the test definitions"), ('test-options=', None, "command-line options to pass to test.regrtest") ] def initialize_options(self): self.build_base = None # these are decided only after 'build_base' has its final value # (unless overridden by the user or client) self.build_purelib = None self.build_platlib = None self.test_dir = 'test' self.test_options = None def finalize_options(self): build = self.distribution.get_command_obj('build') build_options = ('build_base', 'build_purelib', 'build_platlib') for option in build_options: val = getattr(self, option) if val: setattr(build, option, getattr(self, option)) build.ensure_finalized() for option in build_options: setattr(self, option, getattr(build, option)) def run(self): # Invoke the 'build' command to "build" pure Python modules # (ie. copy 'em into the build tree) self.run_command('build') # remember old sys.path to restore it afterwards old_path = sys.path[:] # extend sys.path sys.path.insert(0, self.build_purelib) sys.path.insert(0, self.build_platlib) sys.path.insert(0, self.test_dir) # Use test.regrtest, unlike the original version of this class import test.regrtest # jcw 2004-04-26 - why do I need to add these here to find the tests? #import leaktest - not very portable import test_inttypes import test_stringtype #import test_hash - doesn't work # jcw end test.regrtest.STDTESTS = [] test.regrtest.NOTTESTS = [] if self.test_options: sys.argv[1:] = string.split(self.test_options, ' ') else: del sys.argv[1:] # remove stale modules del sys.modules['metakit'] try: del sys.modules['Mk4py'] except: pass self.announce("running tests") test.regrtest.main(testdir=self.test_dir) # restore sys.path sys.path = old_path[:] #try: # import metakit #except: # metakit = sys.modules['metakit'] setup(name = "metakit", version = "2.4.9.7", description = "Python bindings to the Metakit database library", #long_description = metakit.__doc__,<|fim▁hole|> author_email = "[email protected]", url = "http://www.equi4.com/metakit/python.html", maintainer = "Jean-Claude Wippler", maintainer_email = "[email protected]", license = "X/MIT style, see: http://www.equi4.com/mklicense.html", keywords = ['database'], py_modules = ['metakit'], cmdclass = {'build': build_mk, 'build_ext': build_mkext, 'test': test_regrtest, 'config': config_mk}, ext_modules = [Extension("Mk4py", sources=["PyProperty.cpp", "PyRowRef.cpp", "PyStorage.cpp", "PyView.cpp", "scxx/PWOImp.cpp", ], include_dirs=["scxx", "../include"], extra_objects=mkobjs, )] ) ## Local Variables: ## compile-command: "python setup.py build -b ../builds" ## End:<|fim▁end|>
author = "Gordon McMillan / Jean-Claude Wippler",
<|file_name|>common.rs<|end_file_name|><|fim▁begin|>use std::{ borrow::Cow, collections::HashMap, }; use parse_wiki_text::Node; use crate::{Parameters, OtherTemplate}; // Why? Because MediaWiki (and thus Wikipedia) titles are case-sensitive in all // but the first character. pub fn uppercase_first_letter(string: &str) -> Cow<'_, str> { if let Some(first_char) = string.chars().nth(0) { if first_char.is_ascii_lowercase() { Cow::Owned(format!("{}{}", first_char.to_ascii_uppercase(), &string[1..])) } else { Cow::Borrowed(string) } } else { Cow::Borrowed(string) } } fn remove_final_utc(timestamp: &str) -> &str { if timestamp.ends_with(" (UTC)") { &timestamp[..timestamp.len() - 6] } else { timestamp } } pub fn fuzzy_parse_timestamp(timestamp: &str) -> Result<chrono::naive::NaiveDateTime, dtparse::ParseError> { dtparse::parse(remove_final_utc(timestamp)).map(|(date, _time)| date) } pub fn make_map(params: &[(&str, &str)]) -> HashMap<String, String> { params.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect() } pub fn get_template_param_2<'a, 'b>(template: &'a OtherTemplate, key1: impl Into<Cow<'b, str>>, key2: impl Into<Cow<'b, str>>) -> &'a str { //template.named.get(key1.into().as_ref()).map_or_else(|| template.named.get(key2.into().as_ref()).map_or("", Cow::as_ref), Cow::as_ref).trim() template .named .get(key1.into().as_ref()) .map_or_else( || template.named.get(key2.into().as_ref()).map_or("", String::as_ref), String::as_ref, ) .trim() } pub fn get_template_param<'a, 'b>(template: &'a OtherTemplate, key1: impl Into<Cow<'b, str>>) -> &'a str { template.named.get(key1.into().as_ref()).map_or("", String::as_ref).trim() } #[derive(Clone, Copy)] pub enum WikitextTransform { RequirePureText, GetTextContent, KeepMarkup } pub fn nodes_to_text<'a>(nodes: &[Node<'a>], transform: WikitextTransform) -> Result<Cow<'a, str>, String> { let error_msg = format!("unknown node type encountered: {:?}", &nodes); use Node::*; use WikitextTransform::*; use Cow::*; nodes.iter().map(|node| match (node, transform) { (Text { value, .. }, _) => Ok(Borrowed(*value)), (_, RequirePureText) => Err(()), (Bold { .. }, KeepMarkup) => Ok(Borrowed("'''")), (Italic { .. }, KeepMarkup) => Ok(Borrowed("''")),<|fim▁hole|> (BoldItalic { .. }, KeepMarkup) => Ok(Borrowed("'''''")), (Bold { .. }, GetTextContent) | (Italic { .. }, GetTextContent) | (BoldItalic { .. }, GetTextContent) => Ok(Borrowed("")), (Link { text, .. }, GetTextContent) => nodes_to_text(text, GetTextContent).map_err(|_| ()), (Link { target, text, .. }, KeepMarkup) => nodes_to_text(text, KeepMarkup).map_or_else(|_| Err(()), |wikitext| Ok(Owned(format!("[[{}|{}]]", target, wikitext)))), _ => Err(()), }).collect::<Result<Vec<_>, _>>().map_or_else( |()| Err(error_msg), |mut values| if values.len() == 1 { Ok(values.remove(0)) } else { Ok(Cow::Owned(values.join(""))) } ) } pub trait ToParams<'a> { const PREFIX: &'static str; type Iter: Iterator<Item = (&'static str, Cow<'a, str>)>; // (key suffix (like "date"), value (like "2020-09-13")) fn to_params(self) -> Self::Iter; } /// Counts how many "date" params are specified for the given prefix in the /// given {{article history}} transclusion. fn count_existing_entries<'a, T: ToParams<'a>>(params: &Parameters) -> usize { if params.contains_key(&format!("{}{}", T::PREFIX, "date")) { (2..) .into_iter() .find(|idx| !params.contains_key(&format!("{}{}date", T::PREFIX, idx))) .unwrap() // if this doesn't work, then there were an infinite number of parameters! - 1 // the find will return the first number WITHOUT an entry, // but the numbers are 1-indexed so the count will be 1 less } else { 0 } } pub fn update_article_history<'a, T: ToParams<'a>>(entries: Vec<T>, params: &mut Parameters) { let num_existing_entries = count_existing_entries::<T>(params); fn get_param_prefix<'a, T: ToParams<'a>>(idx: usize) -> String { match idx { 0 => T::PREFIX.into(), _ => format!("{}{}", T::PREFIX, idx + 1), } } params.extend(entries .into_iter() .map(ToParams::to_params) .zip(num_existing_entries..) // i.e. if there are 2 existing entries, the first new index will be 3 .flat_map(|(params, idx): (T::Iter, usize)| { params.map(move |(suffix, value)| (get_param_prefix::<T>(idx) + suffix, value.into_owned())) }), ); } pub enum PageExistenceResult { PrimaryExists, BackupExists, NeitherExist, } pub async fn try_page_existence(api: &mediawiki::api::Api, primary_title: &str, backup_title: &str) -> Result<PageExistenceResult, String> { let res = api.get_query_api_json(&make_map(&[ ("action", "query"), ("titles", &format!("{}|{}", primary_title, backup_title)), ("formatversion", "2"), ])).await.map_err(|e| format!("API error: {:?}", e))?; let mut does_primary_exist = true; let mut does_backup_exist = true; for page in res["query"]["pages"].as_array().ok_or(format!("no pages in res: {:?}", res))? { let page_title = page["title"].as_str().ok_or(format!("no title for page: full res {:?}", res))?; let is_page_missing = page["missing"].as_bool().unwrap_or(false); if page_title == primary_title && is_page_missing { does_primary_exist = false; } else if page_title == backup_title && is_page_missing { does_backup_exist = false; } else { return Err(format!("unrecognized title {}: full response {:?}", page_title, res)); } } Ok(match (does_primary_exist, does_backup_exist) { (true, _) => PageExistenceResult::PrimaryExists, (false, true) => PageExistenceResult::BackupExists, (false, false) => PageExistenceResult::NeitherExist, }) }<|fim▁end|>
<|file_name|>reporter.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use cssparser::{Parser, SourcePosition}; use log; use style_traits::ParseErrorReporter; #[derive(JSTraceable, HeapSizeOf)] pub struct CSSErrorReporter; impl ParseErrorReporter for CSSErrorReporter { fn report_error(&self, input: &mut Parser, position: SourcePosition, message: &str) {<|fim▁hole|> if log_enabled!(log::LogLevel::Info) { let location = input.source_location(position); // TODO eventually this will got into a "web console" or something. info!("{}:{} {}", location.line, location.column, message) } } fn clone(&self) -> Box<ParseErrorReporter + Send + Sync> { let error_reporter = box CSSErrorReporter; return error_reporter; } }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*-coding:Utf-8 -* # Copyright (c) 2012 NOEL-BARON Léo # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Package contenant la commande 'chercherbois'.""" from random import random, randint, choice from math import sqrt from primaires.interpreteur.commande.commande import Commande from primaires.perso.exceptions.stat import DepassementStat class CmdChercherBois(Commande): """Commande 'chercherbois'""" def __init__(self): """Constructeur de la commande""" Commande.__init__(self, "chercherbois", "gatherwood") self.nom_categorie = "objets" self.aide_courte = "permet de chercher du bois" self.aide_longue = \ "Cette commande permet de chercher du combustible dans la salle " \ "où vous vous trouvez." def interpreter(self, personnage, dic_masques): """Méthode d'interprétation de commande""" salle = personnage.salle if salle.interieur: personnage << "|err|Vous ne pouvez chercher du combustible " \ "ici.|ff|" return personnage.agir("chercherbois") prototypes = importeur.objet.prototypes.values()<|fim▁hole|> if personnage.salle.terrain.nom in proto.terrains: combustibles.append((proto.rarete, proto)) combustibles = sorted(combustibles, key=lambda combu: combu[0]) if not combustibles: personnage << "|err|Il n'y a rien qui puisse brûler par ici.|ff|" else: niveau = sqrt(personnage.get_talent("collecte_bois") / 100) if not niveau: niveau = 0.1 proba_trouver = round(random(), 1) if proba_trouver <= niveau: # on trouve du bois possibles = [] for proba, combustible in combustibles: if 2 * proba_trouver >= (proba - 1) / 10: for i in range(int(10 / proba)): possibles.append(combustible) nb_obj = randint(int(proba_trouver * 10), int(niveau * 10)) + 1 if possibles: choix = choice(possibles) somme_qualites = 0 end = int(choix.poids_unitaire * nb_obj / 2) try: personnage.stats.endurance -= end except DepassementStat: personnage << "|err|Vous êtes trop fatigué pour " \ "cela.|ff|" return try: personnage.stats.endurance -= 3 except DepassementStat: personnage << "|err|Vous êtes trop fatigué pour cela.|ff|" return # On cherche le bois personnage.etats.ajouter("collecte_bois") personnage << "Vous vous penchez et commencez à chercher du bois." personnage.salle.envoyer( "{} se met à chercher quelque chose par terre.", personnage) yield 5 if "collecte_bois" not in personnage.etats: return if choix: for i in range(nb_obj): objet = importeur.objet.creer_objet(choix) personnage.salle.objets_sol.ajouter(objet) somme_qualites += objet.qualite personnage << "Vous trouvez {} " \ "et vous relevez.".format(choix.get_nom(nb_obj)) personnage.salle.envoyer("{} se relève, l'air satisfait.", personnage) personnage.pratiquer_talent("collecte_bois") personnage.gagner_xp("survie", somme_qualites * 2) else: personnage << "Vous vous redressez sans avoir rien trouvé." personnage.salle.envoyer("{} se relève, l'air dépité.", personnage) personnage.pratiquer_talent("collecte_bois", 4) personnage.etats.retirer("collecte_bois")<|fim▁end|>
prototypes = [p for p in prototypes if p.est_de_type("combustible")] combustibles = [] choix = None for proto in prototypes:
<|file_name|>exsample.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*- import subprocess import os from pathlib import Path cwd = os.getcwd()<|fim▁hole|> subprocess.call(['make']) # res = subprocess.check_output('uname -a',shell=True) res = subprocess.check_output( r"./darknet detector test cfg/coco.data cfg/yolo.cfg yolo.weights /home/zaki/NoooDemo/0001.jpg", shell=True) except Exception as ex: print(ex) finally: os.chdir(cwd) print(res) def main() -> None: pass if __name__ == '__main__': main()<|fim▁end|>
try: print(os.getcwd())
<|file_name|>deglitch.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """Deglitch utilities ===================== """ import numpy as np import logging _logger = logging.getLogger("sloth.math.deglitch") def remove_spikes_medfilt1d(y_spiky, backend="silx", kernel_size=3, threshold=0.1): """Remove spikes in a 1D array using medfilt from silx.math Parameters ---------- y_spiky : array spiky data backend : str, optional library to use as backend - 'silx' -> from silx.math.medianfilter import medfilt1d - 'pymca' -> from PyMca5.PyMcaMath.PyMcaSciPy.signal import medfilt1d - 'pandas' : TODO kernel_size : int, optional kernel size where to calculate median, must be odd [3] threshold : float, optional relative difference between filtered and spiky data [0.1] Returns ------- array filtered array """ ynew = np.zeros_like(y_spiky) if not (kernel_size % 2): kernel_size += 1<|fim▁hole|> if backend == "silx": return remove_spikes_silx(y_spiky, kernel_size=kernel_size, threshold=threshold) elif backend == "pymca": return remove_spikes_silx(y_spiky, kernel_size=kernel_size, threshold=threshold) elif backend == "pandas": return remove_spikes_pandas(y_spiky, window=kernel_size, threshold=threshold) else: _logger.warning("backend for medfilt1d not found! -> returning zeros") return ynew def remove_spikes_silx(y_spiky, kernel_size=3, threshold=0.1): """Remove spikes in a 1D array using medfilt from silx.math Parameters ---------- y_spiky : array spiky data kernel_size : int, optional kernel size where to calculate median, must be odd [3] threshold : float, optional difference between filtered and spiky data relative [0.1] Returns ------- array filtered array """ ynew = np.zeros_like(y_spiky) try: from silx.math.medianfilter import medfilt1d except ImportError: _logger.warning("medfilt1d (from SILX) not found! -> returning zeros") return ynew y_filtered = medfilt1d( y_spiky, kernel_size=kernel_size, conditional=True, mode="nearest", cval=0 ) diff = y_filtered - y_spiky rel_diff = diff / y_filtered ynew = np.where(abs(rel_diff) > threshold, y_filtered, y_spiky) return ynew def remove_spikes_pymca(y_spiky, kernel_size=9, threshold=0.66): """Remove spikes in a 1D array using medfilt from PyMca5.PyMcaMath.PyMcaScipy.signal Parameters ---------- y_spiky : array spiky data kernel_size : int, optional kernel size where to calculate median, should be odd [9] threshold : float, optional difference between filtered and spiky data in sigma units [0.66] Returns ------- array filtered array """ ynew = np.zeros_like(y_spiky) try: from PyMca5.PyMcaMath.PyMcaSciPy.signal import medfilt1d except ImportError: _logger.warning("medfilt1d (from PyMca5) not found! -> returning zeros") return ynew y_filtered = medfilt1d(y_spiky, kernel_size) diff = y_filtered - y_spiky mean = diff.mean() sigma = (y_spiky - mean) ** 2 sigma = np.sqrt(sigma.sum() / float(len(sigma))) ynew = np.where(abs(diff) > threshold * sigma, y_filtered, y_spiky) return ynew def remove_spikes_pandas(y, window=3, threshold=3): """remove spikes using pandas Taken from `https://ocefpaf.github.io/python4oceanographers/blog/2015/03/16/outlier_detection/`_ .. note:: this will not work in pandas > 0.17 one could simply do `df.rolling(3, center=True).median()`; also df.as_matrix() is deprecated, use df.values instead Parameters ---------- y : array 1D window : int (optional) window in rolling median [3] threshold : int (optional) number of sigma difference with original data Return ------ ynew : array like x/y """ ynew = np.zeros_like(y) try: import pandas as pd except ImportError: _logger.error("pandas not found! -> returning zeros") return ynew df = pd.DataFrame(y) try: yf = ( pd.rolling_median(df, window=window, center=True) .fillna(method="bfill") .fillna(method="ffill") ) diff = yf.as_matrix() - y mean = diff.mean() sigma = (y - mean) ** 2 sigma = np.sqrt(sigma.sum() / float(len(sigma))) ynew = np.where(abs(diff) > threshold * sigma, yf.as_matrix(), y) except Exception: yf = ( df.rolling(window, center=True) .median() .fillna(method="bfill") .fillna(method="ffill") ) diff = yf.values - y mean = diff.mean() sigma = (y - mean) ** 2 sigma = np.sqrt(sigma.sum() / float(len(sigma))) ynew = np.where(abs(diff) > threshold * sigma, yf.values, y) # ynew = np.array(yf.values).reshape(len(x)) return ynew<|fim▁end|>
_logger.warning("'kernel_size' must be odd -> adjusted to %d", kernel_size)
<|file_name|>bundle.js<|end_file_name|><|fim▁begin|>const c = require('ansi-colors') const glob = require('glob') const path = require('path') const terserVersion = require('terser/package.json').version const TerserWebpackPlugin = require('terser-webpack-plugin') const webpack = require('webpack') const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin const { argv } = require('yargs') const config = require('../config') // Ensures that production settings for Babel are used process.env.NODE_ENV = 'build-es' /* eslint-disable no-await-in-loop */ /* eslint-disable no-console */ /* eslint-disable no-restricted-syntax */ // // Webpack config // const makeWebpackConfig = (entry) => ({ devtool: false, mode: 'production', target: 'web', entry, output: { filename: path.basename(entry), path: config.paths.base('bundle-size', 'dist'), ...(argv.debug && { pathinfo: true, }), }, module: { rules: [ { test: /\.(js|ts)$/, loader: 'babel-loader', exclude: /node_modules/, options: { cacheDirectory: true, }, }, ], }, externals: { react: 'react', 'react-dom': 'reactDOM', }, ...(argv.debug && { optimization: { minimizer: [ new TerserWebpackPlugin({ cache: true, parallel: true, sourceMap: false, terserOptions: { mangle: false, output: { beautify: true, comments: true, preserve_annotations: true, }, }, }),<|fim▁hole|> }, }), performance: { hints: false, }, plugins: [ argv.debug && new BundleAnalyzerPlugin({ analyzerMode: 'static', logLevel: 'warn', openAnalyzer: false, reportFilename: `${path.basename(entry, '.js')}.html`, }), ].filter(Boolean), resolve: { alias: { 'semantic-ui-react': config.paths.dist('es', 'index.js'), }, }, }) function webpackAsync(webpackConfig) { return new Promise((resolve, reject) => { const compiler = webpack(webpackConfig) compiler.run((err, stats) => { if (err) { reject(err) } const info = stats.toJson() if (stats.hasErrors()) { reject(new Error(info.errors.toString())) } if (stats.hasWarnings()) { reject(new Error(info.warnings.toString())) } resolve(info) }) }) } // // // ;(async () => { const fixtures = glob.sync('fixtures/*.size.js', { cwd: __dirname, }) console.log(c.cyan(`ℹ Using Webpack ${webpack.version} & Terser ${terserVersion}`)) console.log(c.cyan('ℹ Running following fixtures:')) console.log(c.cyan(fixtures.map((fixture) => ` - ${fixture}`).join('\n'))) for (const fixture of fixtures) { const fixturePath = config.paths.base('bundle-size', fixture) await webpackAsync(makeWebpackConfig(fixturePath)) console.log(c.green(`✔ Completed: ${fixture}`)) } })()<|fim▁end|>
],
<|file_name|>temp_test_ortho.py<|end_file_name|><|fim▁begin|>import rppy import numpy as np import matplotlib.pyplot as plt vp1 = 3000 vs1 = 1500 p1 = 2000 e1_1 = 0.0 d1_1 = 0.0 y1_1 = 0.0 e2_1 = 0.0 d2_1 = 0.0 y2_1 = 0.0<|fim▁hole|>C1 = rppy.reflectivity.Cij(vp1, vs1, p1, e1_1, d1_1, y1_1, e2_1, d2_1, y2_1, d3_1) vp2 = 4000 vs2 = 2000 p2 = 2200 e1_2 = 0.0 d1_2 = 0.0 y1_2 = 0.0 e2_2 = 0.0 d2_2 = 0.0 y2_2 = 0.0 d3_2 = 0.0 chi2 = 0.0 C2 = rppy.reflectivity.Cij(vp2, vs2, p2, e1_2, d1_2, y1_2, e2_2, d2_2, y2_2, d3_2) phi = np.arange(0, 90, 1) theta = np.arange(0, 90, 1) loopang = phi theta = np.array([30]) rphti = np.zeros(np.shape(loopang)) rpzoe = np.zeros(np.shape(loopang)) rprug = np.zeros(np.shape(loopang)) for aid, val in enumerate(loopang): rphti[aid] = rppy.reflectivity.exact_ortho(C1, p1, C2, p2, chi1, chi2, loopang[aid], theta) rprug[aid] = rppy.reflectivity.ruger_hti(vp1, vs1, p1, e2_1, d2_1, y2_1, vp2, vs2, p2, e2_2, d2_2, y2_2, np.radians(theta), np.radians(loopang[aid])) rpzoe[aid] = rppy.reflectivity.zoeppritz(vp1, vs1, p1, vp2, vs2, p2, np.radians(theta)) plt.figure(1) plt.plot(loopang, rphti, loopang, rprug, loopang, rpzoe) plt.legend(['hti', 'ruger', 'zoe']) plt.show()<|fim▁end|>
d3_1 = 0.0 chi1 = 0.0
<|file_name|>fetcher_test.go<|end_file_name|><|fim▁begin|>// Copyright 2021 The LUCI Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package updater import ( "context" "testing" gerritpb "go.chromium.org/luci/common/proto/gerrit" "go.chromium.org/luci/cv/internal/changelist" gf "go.chromium.org/luci/cv/internal/gerrit/gerritfake" . "github.com/smartystreets/goconvey/convey" . "go.chromium.org/luci/common/testing/assertions" ) func TestRelatedChangeProcessing(t *testing.T) { t.Parallel() Convey("setGitDeps works", t, func() {<|fim▁hole|> toUpdate: changelist.UpdateFields{ Snapshot: &changelist.Snapshot{Kind: &changelist.Snapshot_Gerrit{Gerrit: &changelist.Gerrit{}}}, }, } Convey("No related changes", func() { f.setGitDeps(ctx, nil) So(f.toUpdate.Snapshot.GetGerrit().GetGitDeps(), ShouldBeNil) f.setGitDeps(ctx, []*gerritpb.GetRelatedChangesResponse_ChangeAndCommit{}) So(f.toUpdate.Snapshot.GetGerrit().GetGitDeps(), ShouldBeNil) }) Convey("Just itself", func() { // This isn't happening today, but CV shouldn't choke if Gerrit changes. f.setGitDeps(ctx, []*gerritpb.GetRelatedChangesResponse_ChangeAndCommit{ gf.RelatedChange(111, 3, 3), // No parents. }) So(f.toUpdate.Snapshot.GetGerrit().GetGitDeps(), ShouldBeNil) f.setGitDeps(ctx, []*gerritpb.GetRelatedChangesResponse_ChangeAndCommit{ gf.RelatedChange(111, 3, 3, "107_2"), }) So(f.toUpdate.Snapshot.GetGerrit().GetGitDeps(), ShouldBeNil) }) Convey("Has related, but no deps", func() { f.setGitDeps(ctx, []*gerritpb.GetRelatedChangesResponse_ChangeAndCommit{ gf.RelatedChange(111, 3, 3, "107_2"), gf.RelatedChange(114, 1, 3, "111_3"), gf.RelatedChange(117, 2, 2, "114_1"), }) So(f.toUpdate.Snapshot.GetGerrit().GetGitDeps(), ShouldBeNil) }) Convey("Has related, but lacking this change crbug/1199471", func() { f.setGitDeps(ctx, []*gerritpb.GetRelatedChangesResponse_ChangeAndCommit{ gf.RelatedChange(114, 1, 3, "111_3"), gf.RelatedChange(117, 2, 2, "114_1"), }) So(f.toUpdate.Snapshot.GetErrors(), ShouldHaveLength, 1) So(f.toUpdate.Snapshot.GetErrors()[0].GetCorruptGerritMetadata(), ShouldContainSubstring, "https://crbug.com/1199471") }) Convey("Has related, and several times itself", func() { f.setGitDeps(ctx, []*gerritpb.GetRelatedChangesResponse_ChangeAndCommit{ gf.RelatedChange(111, 2, 2, "107_2"), gf.RelatedChange(111, 3, 3, "107_2"), gf.RelatedChange(114, 1, 3, "111_3"), }) So(f.toUpdate.Snapshot.GetErrors()[0].GetCorruptGerritMetadata(), ShouldContainSubstring, "https://crbug.com/1199471") }) Convey("1 parent", func() { f.setGitDeps(ctx, []*gerritpb.GetRelatedChangesResponse_ChangeAndCommit{ gf.RelatedChange(107, 1, 3, "104_2"), gf.RelatedChange(111, 3, 3, "107_1"), gf.RelatedChange(117, 2, 2, "114_1"), }) So(f.toUpdate.Snapshot.GetGerrit().GetGitDeps(), ShouldResembleProto, []*changelist.GerritGitDep{ {Change: 107, Immediate: true}, }) }) Convey("Diamond", func() { f.setGitDeps(ctx, []*gerritpb.GetRelatedChangesResponse_ChangeAndCommit{ gf.RelatedChange(103, 2, 2), gf.RelatedChange(104, 2, 2, "103_2"), gf.RelatedChange(107, 1, 3, "104_2"), gf.RelatedChange(108, 1, 3, "104_2"), gf.RelatedChange(111, 3, 3, "107_1", "108_1"), gf.RelatedChange(114, 1, 3, "111_3"), gf.RelatedChange(117, 2, 2, "114_1"), }) So(f.toUpdate.Snapshot.GetGerrit().GetGitDeps(), ShouldResembleProto, []*changelist.GerritGitDep{ {Change: 107, Immediate: true}, {Change: 108, Immediate: true}, {Change: 104, Immediate: false}, {Change: 103, Immediate: false}, }) }) Convey("Same revision, different changes", func() { c104 := gf.RelatedChange(104, 1, 1, "103_2") c105 := gf.RelatedChange(105, 1, 1, "103_2") c105.GetCommit().Id = c104.GetCommit().GetId() f.setGitDeps(ctx, []*gerritpb.GetRelatedChangesResponse_ChangeAndCommit{ gf.RelatedChange(103, 2, 2), c104, c105, // should be ignored, somewhat arbitrarily. gf.RelatedChange(111, 3, 3, "104_1"), }) So(f.toUpdate.Snapshot.GetGerrit().GetGitDeps(), ShouldResembleProto, []*changelist.GerritGitDep{ {Change: 104, Immediate: true}, {Change: 103, Immediate: false}, }) }) Convey("2 parents which are the same change at different revisions", func() { // Actually happened, see https://crbug.com/988309. f.setGitDeps(ctx, []*gerritpb.GetRelatedChangesResponse_ChangeAndCommit{ gf.RelatedChange(104, 1, 2, "long-ago-merged1"), gf.RelatedChange(107, 1, 1, "long-ago-merged2"), gf.RelatedChange(104, 2, 2, "107_1"), gf.RelatedChange(111, 3, 3, "104_1", "104_2"), }) So(f.toUpdate.Snapshot.GetGerrit().GetGitDeps(), ShouldResembleProto, []*changelist.GerritGitDep{ {Change: 104, Immediate: true}, {Change: 107, Immediate: false}, }) }) }) }<|fim▁end|>
ctx := context.Background() f := fetcher{ change: 111, host: "host",
<|file_name|>EndpointIdIsRandomExpectTest.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.openstack.nova.v2_0; <|fim▁hole|>import static org.jclouds.Constants.PROPERTY_ENDPOINT; import static org.testng.Assert.assertEquals; import java.util.Properties; import org.jclouds.http.HttpRequest; import org.jclouds.http.HttpResponse; import org.jclouds.openstack.nova.v2_0.internal.BaseNovaApiExpectTest; import org.testng.annotations.Test; import com.google.common.collect.ImmutableSet; /** * Tests to ensure that we can pick the only endpoint of a service */ @Test(groups = "unit", testName = "EndpointIdIsRandomExpectTest") public class EndpointIdIsRandomExpectTest extends BaseNovaApiExpectTest { public EndpointIdIsRandomExpectTest() { this.identity = "demo:demo"; this.credential = "password"; } @Override protected Properties setupProperties() { Properties overrides = super.setupProperties(); overrides.setProperty(PROPERTY_ENDPOINT, "http://10.10.10.10:5000/v2.0/"); return overrides; } public void testVersionMatchOnConfiguredRegionsWhenResponseIs2xx() { HttpRequest authenticate = HttpRequest .builder() .method("POST") .endpoint("http://10.10.10.10:5000/v2.0/tokens") .addHeader("Accept", "application/json") .payload( payloadFromStringWithContentType( "{\"auth\":{\"passwordCredentials\":{\"username\":\"demo\",\"password\":\"password\"},\"tenantName\":\"demo\"}}", "application/json")).build(); HttpResponse authenticationResponse = HttpResponse.builder().statusCode(200) .payload(payloadFromResourceWithContentType("/access_version_uids.json", "application/json")).build(); NovaApi whenNovaRegionExists = requestSendsResponse(authenticate, authenticationResponse); assertEquals(whenNovaRegionExists.getConfiguredRegions(), ImmutableSet.of("RegionOne")); } }<|fim▁end|>
<|file_name|>subdomains.controller.js<|end_file_name|><|fim▁begin|>/* global _, angular */ angular.module('bidos') .controller('Subdomains', Subdomains); function Subdomains(Resources, $scope, $state) { Resources.get().then(function(data) { $scope.subdomains = _.filter(data.subdomains, {domain_id: parseInt($state.params.domainId)});<|fim▁hole|> }; }<|fim▁end|>
}); $scope.select = function (subdomain) { $state.go('bidos.domains.subdomains.items', {subdomainId: parseInt(subdomain.id)});
<|file_name|>tldMap.service.js<|end_file_name|><|fim▁begin|>(function() { 'use strict'; angular .module('tiny-leaflet-directive') .factory('tldMapService', tldMapService); tldMapService.$inject = ['tldHelpers']; function tldMapService(tldHelpers) { var maps = {}; return { setMap: setMap, getMap: getMap, unresolveMap: unresolveMap }; function setMap(leafletMap, mapId) { var defer = tldHelpers.getUnresolvedDefer(maps, mapId);<|fim▁hole|> } function getMap (mapId) { var defer = tldHelpers.getDefer(maps, mapId); return defer.promise; } function unresolveMap(mapId) { maps[mapId] = undefined; } } })();<|fim▁end|>
defer.resolve(leafletMap); tldHelpers.setResolvedDefer(maps, mapId);
<|file_name|>test_deprecated.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import absolute_import, division, print_function, unicode_literals import warnings from builtins import object, str from contextlib import contextmanager import mock from packaging.version import Version from pants.base.deprecated import (BadDecoratorNestingError, BadSemanticVersionError, CodeRemovedError, InvalidSemanticVersionOrderingError, MissingSemanticVersionError, NonDevSemanticVersionError, deprecated, deprecated_conditional, deprecated_module, warn_or_error) from pants.util.collections import assert_single_element from pants_test.test_base import TestBase _FAKE_CUR_VERSION = '2.0.0.dev0' class DeprecatedTest(TestBase): FUTURE_VERSION = '9999.9.9.dev0' @contextmanager def _test_deprecation(self, deprecation_expected=True): with warnings.catch_warnings(record=True) as seen_warnings: def assert_deprecation_warning(): if deprecation_expected: warning = assert_single_element(seen_warnings) self.assertEqual(warning.category, DeprecationWarning) return warning.message else: self.assertEqual(0, len(seen_warnings)) warnings.simplefilter('always') self.assertEqual(0, len(seen_warnings)) yield assert_deprecation_warning assert_deprecation_warning() def test_deprecated_function(self): expected_return = 'deprecated_function' @deprecated(self.FUTURE_VERSION) def deprecated_function(): return expected_return with self._test_deprecation(): self.assertEqual(expected_return, deprecated_function()) def test_deprecated_method(self): expected_return = 'deprecated_method' class Test(object): @deprecated(self.FUTURE_VERSION) def deprecated_method(self): return expected_return with self._test_deprecation(): self.assertEqual(expected_return, Test().deprecated_method()) def test_deprecated_conditional_true(self): predicate = lambda: True with self._test_deprecation(): deprecated_conditional(predicate, self.FUTURE_VERSION, "test hint message", stacklevel=0) def test_deprecated_conditional_false(self): predicate = lambda: False with self._test_deprecation(deprecation_expected=False): deprecated_conditional(predicate, self.FUTURE_VERSION, "test hint message", stacklevel=0) def test_deprecated_property(self): expected_return = 'deprecated_property' class Test(object): @property @deprecated(self.FUTURE_VERSION) def deprecated_property(self): return expected_return with self._test_deprecation(): self.assertEqual(expected_return, Test().deprecated_property) def test_deprecated_module(self): with self._test_deprecation() as extract_deprecation_warning: # Note: Attempting to import here a dummy module that just calls deprecated_module() does not # properly trigger the deprecation, due to a bad interaction with pytest that I've not fully # understood. But we trust python to correctly execute modules on import, so just testing a # direct call of deprecated_module() here is fine. deprecated_module(self.FUTURE_VERSION, hint_message='Do not use me.') warning_message = str(extract_deprecation_warning()) self.assertIn('module will be removed', warning_message) self.assertIn('Do not use me', warning_message) def test_deprecation_hint(self): hint_message = 'Find the foos, fast!' expected_return = 'deprecated_function' @deprecated(self.FUTURE_VERSION, hint_message=hint_message) def deprecated_function(): return expected_return with self._test_deprecation() as extract_deprecation_warning: self.assertEqual(expected_return, deprecated_function()) self.assertIn(hint_message, str(extract_deprecation_warning())) def test_deprecation_subject(self): subject = '`./pants blah`' expected_return = 'deprecated_function' @deprecated(self.FUTURE_VERSION, subject=subject) def deprecated_function(): return expected_return with self._test_deprecation() as extract_deprecation_warning: self.assertEqual(expected_return, deprecated_function()) self.assertIn(subject, str(extract_deprecation_warning())) def test_removal_version_required(self): with self.assertRaises(MissingSemanticVersionError): @deprecated(None) def test_func(): pass def test_removal_version_bad(self): with self.assertRaises(BadSemanticVersionError): warn_or_error('a.a.a', 'dummy description') with self.assertRaises(BadSemanticVersionError): @deprecated('a.a.a') def test_func0(): pass with self.assertRaises(BadSemanticVersionError): warn_or_error(1.0, 'dummy description') with self.assertRaises(BadSemanticVersionError): @deprecated(1.0) def test_func1(): pass with self.assertRaises(BadSemanticVersionError): warn_or_error('1.a.0', 'dummy description') with self.assertRaises(BadSemanticVersionError): @deprecated('1.a.0') def test_func1a():<|fim▁hole|> @deprecated('1.0.0') def test_func1a(): pass @mock.patch('pants.base.deprecated.PANTS_SEMVER', Version(_FAKE_CUR_VERSION)) def test_removal_version_same(self): with self.assertRaises(CodeRemovedError): warn_or_error(_FAKE_CUR_VERSION, 'dummy description') @deprecated(_FAKE_CUR_VERSION) def test_func(): pass with self.assertRaises(CodeRemovedError): test_func() def test_removal_version_lower(self): with self.assertRaises(CodeRemovedError): warn_or_error('0.0.27.dev0', 'dummy description') @deprecated('0.0.27.dev0') def test_func(): pass with self.assertRaises(CodeRemovedError): test_func() def test_bad_decorator_nesting(self): with self.assertRaises(BadDecoratorNestingError): class Test(object): @deprecated(self.FUTURE_VERSION) @property def test_prop(this): pass def test_deprecation_start_version_validation(self): with self.assertRaises(BadSemanticVersionError): warn_or_error(removal_version='1.0.0.dev0', deprecated_entity_description='dummy', deprecation_start_version='1.a.0') with self.assertRaises(InvalidSemanticVersionOrderingError): warn_or_error(removal_version='0.0.0.dev0', deprecated_entity_description='dummy', deprecation_start_version='1.0.0.dev0') @mock.patch('pants.base.deprecated.PANTS_SEMVER', Version(_FAKE_CUR_VERSION)) def test_deprecation_start_period(self): with self.assertRaises(CodeRemovedError): warn_or_error(removal_version=_FAKE_CUR_VERSION, deprecated_entity_description='dummy', deprecation_start_version='1.0.0.dev0') with self.warnings_catcher() as w: warn_or_error(removal_version='999.999.999.dev999', deprecated_entity_description='dummy', deprecation_start_version=_FAKE_CUR_VERSION) self.assertWarning(w, DeprecationWarning, 'DEPRECATED: dummy will be removed in version 999.999.999.dev999.') self.assertIsNone( warn_or_error(removal_version='999.999.999.dev999', deprecated_entity_description='dummy', deprecation_start_version='500.0.0.dev0'))<|fim▁end|>
pass def test_removal_version_non_dev(self): with self.assertRaises(NonDevSemanticVersionError):
<|file_name|>rand.cc<|end_file_name|><|fim▁begin|><|fim▁hole|> * * This file is part of M3 (Microkernel-based SysteM for Heterogeneous Manycores). * * M3 is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. * * M3 is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License version 2 for more details. */ #include <base/stream/IStringStream.h> #include <base/util/Random.h> #include <base/util/Time.h> #include <base/CPU.h> #include <m3/stream/Standard.h> #include "loop.h" using namespace m3; alignas(64) static rand_type buffer[EL_COUNT]; int main(int argc, char **argv) { if(argc != 2) exitmsg("Usage: " << argv[0] << " <count>"); size_t count = IStringStream::read_from<size_t>(argv[1]); while(count > 0) { size_t amount = Math::min(count, ARRAY_SIZE(buffer)); Time::start(0x5555); CPU::compute(amount * 8); // generate(buffer, amount); Time::stop(0x5555); cout.write(buffer, amount * sizeof(rand_type)); count -= amount; } return 0; }<|fim▁end|>
/* * Copyright (C) 2016-2018, Nils Asmussen <[email protected]> * Economic rights: Technische Universitaet Dresden (Germany)
<|file_name|>0006_auto_20160615_1321.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-06-15 20:21 from __future__ import unicode_literals from django.db import migrations, models import posts.models<|fim▁hole|> dependencies = [ ('posts', '0005_auto_20160615_1249'), ] operations = [ migrations.AlterField( model_name='post', name='image', field=models.ImageField(blank=True, height_field='height_field', null=True, upload_to=posts.models.upload_location, width_field='width_field'), ), ]<|fim▁end|>
class Migration(migrations.Migration):
<|file_name|>_cusum.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Methods for estimating structural breaks in time series regressions TODO: extract and move Chow test from "commission test" over to here """ from __future__ import division from collections import namedtuple import logging import numpy as np import pandas as pd from scipy.optimize import brentq from scipy import stats from scipy.stats import norm import xarray as xr from ._core import pandas_like, StructuralBreakResult from ..accel import try_jit from ..regression._recresid import _recresid logger = logging.getLogger(__name__) pnorm = norm.cdf # OLS-CUSUM # dict: CUSUM OLS critical values CUSUM_OLS_CRIT = { 0.01: 1.63, 0.05: 1.36, 0.10: 1.22 } @try_jit(nopython=True, nogil=True) def _cusum(resid, ddof): n = resid.size df = n - ddof sigma = ((resid ** 2).sum() / df * n) ** 0.5 process = resid.cumsum() / sigma return process @try_jit(nopython=True, nogil=True) def _cusum_OLS(X, y): n, p = X.shape beta = np.linalg.lstsq(X, y)[0] resid = np.dot(X, beta) - y process = _cusum(resid, p) _process = np.abs(process) idx = _process.argmax() score = _process[idx] return process, score, idx def cusum_OLS(X, y, alpha=0.05): ur""" OLS-CUSUM test for structural breaks Tested against R's ``strucchange`` package and is faster than the equivalent function in the ``statsmodels`` Python package when Numba is installed. The OLS-CUSUM test statistic, based on a single OLS regression, is defined as: .. math:: W_n^0(t) = \frac{1}{\hat{\sigma}\sqrt{n}} \sum_{i=1}^{n}{\hat{\mu_i}} Args: X (array like): 2D (n_obs x n_features) design matrix y (array like): 1D (n_obs) indepdent variable alpha (float): Test threshold (either 0.01, 0.05, or 0.10) from Ploberger and Krämer (1992) Returns: StructuralBreakResult: A named tuple include the the test name, change point (index of ``y``), the test ``score`` and ``pvalue``, and a boolean testing if the CUSUM score is significant at the given ``alpha`` """ _X = X.values if isinstance(X, pandas_like) else X _y = y.values.ravel() if isinstance(y, pandas_like) else y.ravel() process, score, idx = _cusum_OLS(_X, _y) if isinstance(y, pandas_like): if isinstance(y, (pd.Series, pd.DataFrame)): index = y.index idx = index[idx] elif isinstance(y, xr.DataArray): index = y.to_series().index idx = index[idx] process = pd.Series(data=process, index=index, name='OLS-CUSUM') # crit = stats.kstwobign.isf(alpha) ~70usec crit = CUSUM_OLS_CRIT[alpha] pval = stats.kstwobign.sf(score) return StructuralBreakResult(method='OLS-CUSUM', index=idx, score=score, process=process, boundary=crit, pvalue=pval, signif=score > crit) # REC-CUSUM def _brownian_motion_pvalue(x, k): """ Return pvalue for some given test statistic """ # TODO: Make generic, add "type='Brownian Motion'"? if x < 0.3: p = 1 - 0.1464 * x else: p = 2 * (1 - pnorm(3 * x) + np.exp(-4 * x ** 2) * (pnorm(x) + pnorm(5 * x) - 1) - np.exp(-16 * x ** 2) * (1 - pnorm(x))) return 1 - (1 - p) ** k def _cusum_rec_test_crit(alpha): """ Return critical test statistic value for some alpha """ return brentq(lambda _x: _brownian_motion_pvalue(_x, 1) - alpha, 0, 20) @try_jit(nopython=True, nogil=True) def _cusum_rec_boundary(x, alpha=0.05): """ Equivalent to ``strucchange::boundary.efp``` for Rec-CUSUM """ n = x.ravel().size bound = _cusum_rec_test_crit(alpha) boundary = (bound + (2 * bound * np.arange(0, n) / (n - 1))) return boundary @try_jit() def _cusum_rec_efp(X, y): """ Equivalent to ``strucchange::efp`` for Rec-CUSUM """ # Run "efp" n, k = X.shape w = _recresid(X, y, k)[k:] sigma = w.var(ddof=1) ** 0.5 # can't jit because of ddof w = np.concatenate((np.array([0]), w)) return np.cumsum(w) / (sigma * (n - k) ** 0.5) @try_jit(nopython=True, nogil=True) def _cusum_rec_sctest(x): """ Equivalent to ``strucchange::sctest`` for Rec-CUSUM """<|fim▁hole|> x = x * 1 / (1 + 2 * j) stat = np.abs(x).max() return stat def cusum_recursive(X, y, alpha=0.05): ur""" Rec-CUSUM test for structural breaks Tested against R's ``strucchange`` package. The REC-CUSUM test, based on the recursive residuals, is defined as: .. math:: W_n(t) = \frac{1}{\tilde{\sigma}\sqrt{n}} \sum_{i=k+1}^{k+(n-k)}{\tilde{\mu_i}} Critical values for this test statistic are taken from:: A. Zeileis. p values and alternative boundaries for CUSUM tests. Working Paper 78, SFB "Adaptive Information Systems and Modelling in Economics and Management Science", December 2000b. Args: X (array like): 2D (n_obs x n_features) design matrix y (array like): 1D (n_obs) indepdent variable alpha (float): Test threshold Returns: StructuralBreakResult: A named tuple include the the test name, change point (index of ``y``), the test ``score`` and ``pvalue``, and a boolean testing if the CUSUM score is significant at the given ``alpha`` """ _X = X.values if isinstance(X, pandas_like) else X _y = y.values.ravel() if isinstance(y, pandas_like) else y.ravel() process = _cusum_rec_efp(_X, _y) stat = _cusum_rec_sctest(process) stat_pvalue = _brownian_motion_pvalue(stat, 1) pvalue_crit = _cusum_rec_test_crit(alpha) if stat_pvalue < alpha: boundary = _cusum_rec_boundary(process, alpha) idx = np.where(np.abs(process) > boundary)[0].min() else: idx = np.abs(process).max() if isinstance(y, pandas_like): if isinstance(y, (pd.Series, pd.DataFrame)): index = y.index idx = index[idx] elif isinstance(y, xr.DataArray): index = y.to_series().index idx = index[idx] process = pd.Series(data=process, index=index, name='REC-CUSUM') boundary = pd.Series(data=boundary, index=index, name='Boundary') return StructuralBreakResult(method='REC-CUSUM', process=process, boundary=boundary, index=idx, pvalue=stat_pvalue, score=stat, signif=stat_pvalue < pvalue_crit)<|fim▁end|>
x = x[1:] j = np.linspace(0, 1, x.size + 1)[1:]
<|file_name|>nat_monitor_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import unittest from unittest.mock import MagicMock import logging import nat_monitor import utils class NatInstanceTest(unittest.TestCase): def setUp(self): self.vpc_conn = MagicMock() self.ec2_conn = MagicMock() self.instance_id = 'i-abc123' self.subnet = MagicMock() self.subnet.id = 'subnetid' self.route_table = MagicMock() self.route_table.id = 'rt-123' self.vpc_conn.get_all_subnets = MagicMock(return_value=[self.subnet]) self.vpc_conn.get_all_route_tables = MagicMock( return_value=[self.route_table]) self.vpc_conn.create_route = MagicMock() self.vpc_id = 'vpc123' self.az = 'us-east-1a' self.instance = MagicMock() self.role = 'nat' self.instance.tags = { 'Role': self.role, 'Name': NatInstanceTest.__name__} self.instance_tags = MagicMock() self.name = 'name' self.instance_tags.get_name = MagicMock(return_value=self.name) self.instances = [self.instance] self.ec2_conn.get_only_instances = MagicMock( return_value=self.instances) self.ec2_conn.modify_instance_attribute = MagicMock() self.instance_metadata = { 'instance-id': self.instance_id, 'network': { 'interfaces': { 'macs': { '0e:bf:0c:a1:f6:59': { 'vpc-id': self.vpc_id } } } }, 'placement': { 'availability-zone': self.az } } self.nat_instance = nat_monitor.NatInstance( self.vpc_conn, self.ec2_conn, self.instance_tags, self.instance_metadata) def test_init(self): self.assertEqual(self.nat_instance.vpc_conn, self.vpc_conn) self.assertEqual(self.nat_instance.ec2_conn, self.ec2_conn) self.assertEqual(self.nat_instance.vpc_id, self.vpc_id) self.assertEqual(self.nat_instance.az, self.az) self.assertEqual(self.nat_instance.instance_id, self.instance_id) self.assertEqual( self.nat_instance.my_route_table_id, self.route_table.id) self.assertEqual(self.nat_instance.name_tag, self.name) <|fim▁hole|> def test_set_route(self): self.nat_instance.set_route() self.vpc_conn.create_route.assert_called_with( self.nat_instance.my_route_table_id, '0.0.0.0/0', instance_id=self.nat_instance.instance_id) if __name__ == '__main__': unittest.main()<|fim▁end|>
def test_disable_source_dest_check(self): self.nat_instance.disable_source_dest_check() self.ec2_conn.modify_instance_attribute.assert_called_with( self.instance_id, 'sourceDestCheck', False)
<|file_name|>network.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ MicroSAN Network Module """ import socket, traceback IPADDR_BROADCAST = '<broadcast>' IPADDR_ANY = '' SOCK_BUF_SIZE = 8192 SOCK_TIMEOUT = 1 MICROSAN_PORT = 20001 APP_PORT = 51000 class UDPSocket(object): def __init__(self, addr = None, reuse_addr = False): """ Create socket for use """ self.buf = [] if addr is None: addr = (IPADDR_ANY, APP_PORT) self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if reuse_addr: self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) if addr[0] == IPADDR_ANY: self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)<|fim▁hole|> self.sock.settimeout(SOCK_TIMEOUT) self.sock.bind(addr) def send_data(self, data, addr): """ Sends data """ self.sock.sendto(data, addr) def recv_data(self): """ Wait for data """ try: message, address = self.sock.recvfrom(SOCK_BUF_SIZE) self.buf.append((message, address)) except (KeyboardInterrupt, SystemExit): raise # except: # traceback.print_exc() def get_next_buf(self): """ Return data from buffer """ return self.buf.pop(0) def close(self): """ Closes socket """ self.sock.close() def clear_buf(self): """ Clears buffer """ self.buf = [] class BroadcastSocket(UDPSocket): def __init__(self, addr = None, reuse_addr = True): """ Create socket for use """ super(BroadcastSocket, self).__init__(addr = addr, reuse_addr = reuse_addr) def send_data(self, data, addr = None): """ Sends broadcast """ if addr is None: addr = (IPADDR_BROADCAST, MICROSAN_PORT) super(BroadcastSocket, self).send_data(data, addr)<|fim▁end|>
<|file_name|>structarm__dct4__instance__q31.js<|end_file_name|><|fim▁begin|>var structarm__dct4__instance__q31 = [ [ "N", "structarm__dct4__instance__q31.html#a46a9f136457350676e2bfd3768ff9d6d", null ], <|fim▁hole|> [ "Nby2", "structarm__dct4__instance__q31.html#a32d3268ba4629908dba056599f0a904d", null ], [ "normalize", "structarm__dct4__instance__q31.html#ac80ff7b28fca36aeef74dea12e8312dd", null ], [ "pCfft", "structarm__dct4__instance__q31.html#ac96579cfb28d08bb11dd2fe4c6303833", null ], [ "pCosFactor", "structarm__dct4__instance__q31.html#af97204d1838925621fc82021a0c2d6c1", null ], [ "pRfft", "structarm__dct4__instance__q31.html#af1487dab5e7963b85dc0fdc6bf492542", null ], [ "pTwiddle", "structarm__dct4__instance__q31.html#a7db236e22673146bb1d2c962f0713f08", null ] ];<|fim▁end|>
<|file_name|>test_cryotank.py<|end_file_name|><|fim▁begin|>import synapse.common as s_common import synapse.cryotank as s_cryotank import synapse.lib.const as s_const import synapse.tests.utils as s_t_utils from synapse.tests.utils import alist logger = s_cryotank.logger cryodata = (('foo', {'bar': 10}), ('baz', {'faz': 20})) class CryoTest(s_t_utils.SynTest): async def test_cryo_cell_async(self): async with self.getTestCryo() as cryo: async with cryo.getLocalProxy() as prox: self.true(await prox.init('foo')) self.eq([], await alist(prox.rows('foo', 0, 1))) async def test_cryo_cell(self): with self.getTestDir() as dirn: async with self.getTestCryoAndProxy(dirn=dirn) as (cryo, prox): self.eq((), await prox.list()) self.true(await prox.init('foo')) self.eq('foo', (await prox.list())[0][0]) self.none(await prox.last('foo')) self.eq([], await alist(prox.rows('foo', 0, 1))) self.true(await prox.puts('foo', cryodata)) info = await prox.list() self.eq('foo', info[0][0]) self.eq(2, info[0][1].get('stat').get('entries')) self.true(await prox.puts('foo', cryodata)) items = await alist(prox.slice('foo', 1, 3)) self.eq(items[0][1][0], 'baz') metrics = await alist(prox.metrics('foo', 0, 9999)) self.len(2, metrics) self.eq(2, metrics[0][1]['count']) self.eq(3, (await prox.last('foo'))[0]) self.eq('baz', (await prox.last('foo'))[1][0]) iden = s_common.guid() self.eq(0, await prox.offset('foo', iden)) items = await alist(prox.slice('foo', 0, 1000, iden=iden)) self.eq(0, await prox.offset('foo', iden)) items = await alist(prox.slice('foo', 4, 1000, iden=iden)) self.eq(4, await prox.offset('foo', iden)) # test the direct tank share.... async with cryo.getLocalProxy(share='cryotank/foo') as lprox: items = await alist(lprox.slice(1, 3)) self.eq(items[0][1][0], 'baz')<|fim▁hole|> await lprox.puts(cryodata) self.len(6, await alist(lprox.slice(0, 9999))) # test offset storage and updating iden = s_common.guid() self.eq(0, await lprox.offset(iden)) self.eq(2, await lprox.puts(cryodata, seqn=(iden, 0))) self.eq(2, await lprox.offset(iden)) # test the new open share async with cryo.getLocalProxy(share='cryotank/lulz') as lprox: self.len(0, await alist(lprox.slice(0, 9999))) await lprox.puts(cryodata) self.len(2, await alist(lprox.slice(0, 9999))) self.len(1, await alist(lprox.metrics(0))) # Delete apis self.false(await prox.delete('newp')) self.true(await prox.delete('lulz')) # Re-open the tank and ensure that the deleted tank is not present. async with self.getTestCryoAndProxy(dirn=dirn) as (cryo, prox): tanks = await prox.list() self.len(1, tanks) self.eq('foo', tanks[0][0]) async def test_cryo_init(self): with self.getTestDir() as dirn: async with self.getTestCryo(dirn) as cryo: # test passing conf data in through init directly tank = await cryo.init('conftest', conf={'map_size': s_const.mebibyte * 64}) self.eq(tank.slab.mapsize, s_const.mebibyte * 64) _, conf = await cryo.hive.get(('cryo', 'names', 'conftest')) self.eq(conf, {'map_size': s_const.mebibyte * 64}) # And the data was persisted async with self.getTestCryo(dirn) as cryo: tank = cryo.tanks.get('conftest') self.eq(tank.slab.mapsize, s_const.mebibyte * 64) _, conf = await cryo.hive.get(('cryo', 'names', 'conftest')) self.eq(conf, {'map_size': s_const.mebibyte * 64})<|fim▁end|>
self.len(4, await alist(lprox.slice(0, 9999)))
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Utilities for random number generation //! //! The key functions are `random()` and `Rng::gen()`. These are polymorphic //! and so can be used to generate any type that implements `Rand`. Type inference //! means that often a simple call to `rand::random()` or `rng.gen()` will //! suffice, but sometimes an annotation is required, e.g. `rand::random::<f64>()`. //! //! See the `distributions` submodule for sampling random numbers from //! distributions like normal and exponential. //! //! # Thread-local RNG //! //! There is built-in support for a RNG associated with each thread stored //! in thread-local storage. This RNG can be accessed via `thread_rng`, or //! used implicitly via `random`. This RNG is normally randomly seeded //! from an operating-system source of randomness, e.g. `/dev/urandom` on //! Unix systems, and will automatically reseed itself from this source //! after generating 32 KiB of random data. //! //! # Cryptographic security //! //! An application that requires an entropy source for cryptographic purposes //! must use `OsRng`, which reads randomness from the source that the operating //! system provides (e.g. `/dev/urandom` on Unixes or `CryptGenRandom()` on Windows). //! The other random number generators provided by this module are not suitable //! for such purposes. //! //! *Note*: many Unix systems provide `/dev/random` as well as `/dev/urandom`. //! This module uses `/dev/urandom` for the following reasons: //! //! - On Linux, `/dev/random` may block if entropy pool is empty; `/dev/urandom` will not block. //! This does not mean that `/dev/random` provides better output than //! `/dev/urandom`; the kernel internally runs a cryptographically secure pseudorandom //! number generator (CSPRNG) based on entropy pool for random number generation, //! so the "quality" of `/dev/random` is not better than `/dev/urandom` in most cases. //! However, this means that `/dev/urandom` can yield somewhat predictable randomness //! if the entropy pool is very small, such as immediately after first booting. //! Linux 3.17 added the `getrandom(2)` system call which solves the issue: it blocks if entropy //! pool is not initialized yet, but it does not block once initialized. //! `OsRng` tries to use `getrandom(2)` if available, and use `/dev/urandom` fallback if not. //! If an application does not have `getrandom` and likely to be run soon after first booting, //! or on a system with very few entropy sources, one should consider using `/dev/random` via //! `ReaderRng`. //! - On some systems (e.g. FreeBSD, OpenBSD and Mac OS X) there is no difference //! between the two sources. (Also note that, on some systems e.g. FreeBSD, both `/dev/random` //! and `/dev/urandom` may block once if the CSPRNG has not seeded yet.) #![unstable(feature = "rand")] use prelude::v1::*; use cell::RefCell; use io; use mem; use rc::Rc; #[cfg(target_pointer_width = "32")] use core_rand::IsaacRng as IsaacWordRng; #[cfg(target_pointer_width = "64")] use core_rand::Isaac64Rng as IsaacWordRng; pub use core_rand::{Rand, Rng, SeedableRng}; pub use core_rand::{XorShiftRng, IsaacRng, Isaac64Rng}; pub use core_rand::reseeding; pub use rand::os::OsRng; pub mod os; pub mod reader; /// The standard RNG. This is designed to be efficient on the current /// platform. #[derive(Copy, Clone)] pub struct StdRng { rng: IsaacWordRng, } impl StdRng { /// Create a randomly seeded instance of `StdRng`. /// /// This is a very expensive operation as it has to read /// randomness from the operating system and use this in an /// expensive seeding operation. If one is only generating a small /// number of random numbers, or doesn't need the utmost speed for /// generating each number, `thread_rng` and/or `random` may be more /// appropriate. /// /// Reading the randomness from the OS may fail, and any error is /// propagated via the `io::Result` return value. pub fn new() -> io::Result<StdRng> { OsRng::new().map(|mut r| StdRng { rng: r.gen() }) } } impl Rng for StdRng {<|fim▁hole|> fn next_u32(&mut self) -> u32 { self.rng.next_u32() } #[inline] fn next_u64(&mut self) -> u64 { self.rng.next_u64() } } impl<'a> SeedableRng<&'a [usize]> for StdRng { fn reseed(&mut self, seed: &'a [usize]) { // the internal RNG can just be seeded from the above // randomness. self.rng.reseed(unsafe {mem::transmute(seed)}) } fn from_seed(seed: &'a [usize]) -> StdRng { StdRng { rng: SeedableRng::from_seed(unsafe {mem::transmute(seed)}) } } } /// Controls how the thread-local RNG is reseeded. struct ThreadRngReseeder; impl reseeding::Reseeder<StdRng> for ThreadRngReseeder { fn reseed(&mut self, rng: &mut StdRng) { *rng = match StdRng::new() { Ok(r) => r, Err(e) => panic!("could not reseed thread_rng: {}", e) } } } const THREAD_RNG_RESEED_THRESHOLD: usize = 32_768; type ThreadRngInner = reseeding::ReseedingRng<StdRng, ThreadRngReseeder>; /// The thread-local RNG. #[derive(Clone)] pub struct ThreadRng { rng: Rc<RefCell<ThreadRngInner>>, } /// Retrieve the lazily-initialized thread-local random number /// generator, seeded by the system. Intended to be used in method /// chaining style, e.g. `thread_rng().gen::<isize>()`. /// /// The RNG provided will reseed itself from the operating system /// after generating a certain amount of randomness. /// /// The internal RNG used is platform and architecture dependent, even /// if the operating system random number generator is rigged to give /// the same sequence always. If absolute consistency is required, /// explicitly select an RNG, e.g. `IsaacRng` or `Isaac64Rng`. pub fn thread_rng() -> ThreadRng { // used to make space in TLS for a random number generator thread_local!(static THREAD_RNG_KEY: Rc<RefCell<ThreadRngInner>> = { let r = match StdRng::new() { Ok(r) => r, Err(e) => panic!("could not initialize thread_rng: {}", e) }; let rng = reseeding::ReseedingRng::new(r, THREAD_RNG_RESEED_THRESHOLD, ThreadRngReseeder); Rc::new(RefCell::new(rng)) }); ThreadRng { rng: THREAD_RNG_KEY.with(|t| t.clone()) } } impl Rng for ThreadRng { fn next_u32(&mut self) -> u32 { self.rng.borrow_mut().next_u32() } fn next_u64(&mut self) -> u64 { self.rng.borrow_mut().next_u64() } #[inline] fn fill_bytes(&mut self, bytes: &mut [u8]) { self.rng.borrow_mut().fill_bytes(bytes) } }<|fim▁end|>
#[inline]
<|file_name|>mips.rs<|end_file_name|><|fim▁begin|>pub type c_char = i8; pub type c_long = i32; pub type c_ulong = u32; pub type size_t = u32; pub type ptrdiff_t = i32; pub type clock_t = i32; pub type time_t = i32; pub type suseconds_t = i32; pub type wchar_t = i32; pub type intptr_t = i32; pub type uintptr_t = u32; pub type off_t = i32; pub type ino_t = u32; pub type ssize_t = i32; pub type blkcnt_t = i32; pub type blksize_t = i32; pub type nlink_t = u32; s! { pub struct stat { pub st_dev: ::c_ulong, pub st_pad1: [::c_long; 3], pub st_ino: ::ino_t, pub st_mode: ::mode_t, pub st_nlink: ::nlink_t, pub st_uid: ::uid_t, pub st_gid: ::gid_t, pub st_rdev: ::c_ulong, pub st_pad2: [::c_long; 2], pub st_size: ::off_t, pub st_pad3: ::c_long, pub st_atime: ::time_t, pub st_atime_nsec: ::c_long, pub st_mtime: ::time_t, pub st_mtime_nsec: ::c_long, pub st_ctime: ::time_t, pub st_ctime_nsec: ::c_long, pub st_blksize: ::blksize_t, pub st_blocks: ::blkcnt_t, pub st_pad5: [::c_long; 14], } pub struct pthread_attr_t { __size: [u32; 9] } pub struct sigaction { pub sa_flags: ::c_uint, pub sa_sigaction: ::sighandler_t, pub sa_mask: sigset_t, _restorer: *mut ::c_void, _resv: [::c_int; 1], } pub struct stack_t { pub ss_sp: *mut ::c_void, pub ss_size: ::size_t, pub ss_flags: ::c_int, } pub struct sigset_t { __val: [::c_ulong; 32], } pub struct siginfo_t { pub si_signo: ::c_int, pub si_code: ::c_int, pub si_errno: ::c_int, pub _pad: [::c_int; 29], } } pub const RLIMIT_NOFILE: ::c_int = 5; pub const RLIMIT_AS: ::c_int = 6; pub const RLIMIT_RSS: ::c_int = 7; pub const RLIMIT_NPROC: ::c_int = 8; pub const RLIMIT_MEMLOCK: ::c_int = 9; pub const RLIMIT_NLIMITS: ::c_int = 15; pub const RLIM_INFINITY: ::rlim_t = 0x7fffffff; pub const O_APPEND: ::c_int = 8; pub const O_CREAT: ::c_int = 256; pub const O_EXCL: ::c_int = 1024; pub const O_NOCTTY: ::c_int = 2048; pub const O_NONBLOCK: ::c_int = 128; pub const O_SYNC: ::c_int = 0x10; pub const O_RSYNC: ::c_int = 0x10; pub const O_DSYNC: ::c_int = 0x10; pub const EDEADLK: ::c_int = 45; pub const ENAMETOOLONG: ::c_int = 78; pub const ENOLCK: ::c_int = 46; pub const ENOSYS: ::c_int = 89; pub const ENOTEMPTY: ::c_int = 93; pub const ELOOP: ::c_int = 90; pub const ENOMSG: ::c_int = 35; pub const EIDRM: ::c_int = 36; pub const ECHRNG: ::c_int = 37; pub const EL2NSYNC: ::c_int = 38; pub const EL3HLT: ::c_int = 39; pub const EL3RST: ::c_int = 40; pub const ELNRNG: ::c_int = 41; pub const EUNATCH: ::c_int = 42; pub const ENOCSI: ::c_int = 43; pub const EL2HLT: ::c_int = 44; pub const EBADE: ::c_int = 50; pub const EBADR: ::c_int = 51; pub const EXFULL: ::c_int = 52; pub const ENOANO: ::c_int = 53; pub const EBADRQC: ::c_int = 54; pub const EBADSLT: ::c_int = 55; pub const EDEADLOCK: ::c_int = 56; pub const EMULTIHOP: ::c_int = 74; pub const EOVERFLOW: ::c_int = 79; pub const ENOTUNIQ: ::c_int = 80; pub const EBADFD: ::c_int = 81; pub const EBADMSG: ::c_int = 77; pub const EREMCHG: ::c_int = 82; pub const ELIBACC: ::c_int = 83; pub const ELIBBAD: ::c_int = 84; pub const ELIBSCN: ::c_int = 85; pub const ELIBMAX: ::c_int = 86; pub const ELIBEXEC: ::c_int = 87; pub const EILSEQ: ::c_int = 88; pub const ERESTART: ::c_int = 91; pub const ESTRPIPE: ::c_int = 92; pub const EUSERS: ::c_int = 94; pub const ENOTSOCK: ::c_int = 95; pub const EDESTADDRREQ: ::c_int = 96; pub const EMSGSIZE: ::c_int = 97; pub const EPROTOTYPE: ::c_int = 98; pub const ENOPROTOOPT: ::c_int = 99; pub const EPROTONOSUPPORT: ::c_int = 120; pub const ESOCKTNOSUPPORT: ::c_int = 121; pub const EOPNOTSUPP: ::c_int = 122; pub const EPFNOSUPPORT: ::c_int = 123; pub const EAFNOSUPPORT: ::c_int = 124; pub const EADDRINUSE: ::c_int = 125; pub const EADDRNOTAVAIL: ::c_int = 126; pub const ENETDOWN: ::c_int = 127; pub const ENETUNREACH: ::c_int = 128; pub const ENETRESET: ::c_int = 129; pub const ECONNABORTED: ::c_int = 130; pub const ECONNRESET: ::c_int = 131; pub const ENOBUFS: ::c_int = 132; pub const EISCONN: ::c_int = 133; pub const ENOTCONN: ::c_int = 134; pub const ESHUTDOWN: ::c_int = 143; pub const ETOOMANYREFS: ::c_int = 144; pub const ETIMEDOUT: ::c_int = 145; pub const ECONNREFUSED: ::c_int = 146; pub const EHOSTDOWN: ::c_int = 147; pub const EHOSTUNREACH: ::c_int = 148; pub const EALREADY: ::c_int = 149; pub const EINPROGRESS: ::c_int = 150; pub const ESTALE: ::c_int = 151; pub const EUCLEAN: ::c_int = 135; pub const ENOTNAM: ::c_int = 137; pub const ENAVAIL: ::c_int = 138; pub const EISNAM: ::c_int = 139; pub const EREMOTEIO: ::c_int = 140; pub const EDQUOT: ::c_int = 1133; pub const ENOMEDIUM: ::c_int = 159; pub const EMEDIUMTYPE: ::c_int = 160; pub const ECANCELED: ::c_int = 158; pub const ENOKEY: ::c_int = 161; pub const EKEYEXPIRED: ::c_int = 162; pub const EKEYREVOKED: ::c_int = 163; pub const EKEYREJECTED: ::c_int = 164; pub const EOWNERDEAD: ::c_int = 165; pub const ENOTRECOVERABLE: ::c_int = 166; pub const ERFKILL: ::c_int = 167; pub const MAP_NORESERVE: ::c_int = 0x400; pub const MAP_ANON: ::c_int = 0x800; pub const MAP_ANONYMOUS: ::c_int = 0x800; pub const MAP_GROWSDOWN: ::c_int = 0x1000; pub const MAP_DENYWRITE: ::c_int = 0x2000; pub const MAP_EXECUTABLE: ::c_int = 0x4000; pub const MAP_LOCKED: ::c_int = 0x8000; pub const MAP_POPULATE: ::c_int = 0x10000; pub const MAP_NONBLOCK: ::c_int = 0x20000; pub const SOCK_STREAM: ::c_int = 2; pub const SOCK_DGRAM: ::c_int = 1; pub const SOL_SOCKET: ::c_int = 0xffff; pub const SO_REUSEADDR: ::c_int = 4; pub const SO_TYPE: ::c_int = 4104; pub const SO_ERROR: ::c_int = 4103; pub const SO_DONTROUTE: ::c_int = 16; pub const SO_BROADCAST: ::c_int = 32; pub const SO_SNDBUF: ::c_int = 4097; pub const SO_RCVBUF: ::c_int = 4098; pub const SO_KEEPALIVE: ::c_int = 8; pub const SO_OOBINLINE: ::c_int = 256; pub const SO_LINGER: ::c_int = 128; pub const SO_RCVLOWAT: ::c_int = 4100; pub const SO_SNDLOWAT: ::c_int = 4099; pub const SO_RCVTIMEO: ::c_int = 4102; pub const SO_SNDTIMEO: ::c_int = 4101; pub const SO_ACCEPTCONN: ::c_int = 4105; pub const __SIZEOF_PTHREAD_MUTEX_T: usize = 24; pub const __SIZEOF_PTHREAD_RWLOCK_T: usize = 32; pub const __SIZEOF_PTHREAD_MUTEXATTR_T: usize = 4; pub const FIOCLEX: ::c_ulong = 0x6601; <|fim▁hole|> pub const SIGBUS: ::c_int = 10; pub const SIG_SETMASK: ::c_int = 3;<|fim▁end|>
pub const SA_ONSTACK: ::c_ulong = 0x08000000; pub const SA_SIGINFO: ::c_ulong = 0x00000008;
<|file_name|>bitcoin_el_GR.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="el_GR" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Wabcoin</source> <translation>Σχετικά με το Wabcoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Wabcoin&lt;/b&gt; version</source> <translation>Έκδοση Wabcoin</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Πνευματική ιδιοκτησία </translation> </message> <message> <location line="+0"/> <source>The Wabcoin developers</source> <translation>Οι Wabcoin προγραμματιστές </translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Βιβλίο Διευθύνσεων</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Διπλό-κλικ για επεξεργασία της διεύθυνσης ή της ετικέτας</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Δημιούργησε νέα διεύθυνση</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Αντέγραψε την επιλεγμένη διεύθυνση στο πρόχειρο του συστήματος</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Νέα διεύθυνση</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Wabcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Αυτές είναι οι Wabcoin διευθύνσεις σας για να λαμβάνετε πληρωμές. Δίνοντας μία ξεχωριστή διεύθυνση σε κάθε αποστολέα, θα μπορείτε να ελέγχετε ποιος σας πληρώνει.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Αντιγραφή διεύθυνσης</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Δείξε &amp;QR κωδικα</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Wabcoin address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως σας ανήκει μια συγκεκριμένη διεύθυνση Wabcoin</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>&amp;Υπέγραψε το μήνυμα</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Αντιγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Εξαγωγή δεδομένων καρτέλας σε αρχείο</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation>&amp;Εξαγωγή</translation> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Wabcoin address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως ανήκει μια συγκεκριμένη διεύθυνση Wabcoin</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Διαγραφή</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Wabcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Αυτές είναι οι Wabcoin διευθύνσεις σας για να λαμβάνετε πληρωμές. Δίνοντας μία ξεχωριστή διεύθυνση σε κάθε αποστολέα, θα μπορείτε να ελέγχετε ποιος σας πληρώνει.</translation> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Αντιγραφή &amp;επιγραφής</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Επεξεργασία</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation>Αποστολή νομισμάτων</translation> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Εξαγωγή Δεδομενων Βιβλίου Διευθύνσεων</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Αρχείο οριοθετημένο με κόμματα (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Εξαγωγή λαθών</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Αδυναμία εγγραφής στο αρχείο %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Ετικέτα</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Διεύθυνση</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(χωρίς ετικέτα)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Φράση πρόσβασης </translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Βάλτε κωδικό πρόσβασης</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Νέος κωδικός πρόσβασης</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Επανέλαβε τον νέο κωδικό πρόσβασης</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Εισάγετε τον νέο κωδικό πρόσβασης στον πορτοφόλι &lt;br/&gt; Παρακαλώ χρησιμοποιείστε ένα κωδικό με &lt;b&gt; 10 ή περισσότερους τυχαίους χαρακτήρες&lt;/b&gt; ή &lt;b&gt; οχτώ ή παραπάνω λέξεις&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Κρυπτογράφησε το πορτοφόλι</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Αυτη η ενεργεία χρειάζεται τον κωδικό του πορτοφολιού για να ξεκλειδώσει το πορτοφόλι.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Ξεκλειδωσε το πορτοφολι</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Αυτη η ενεργεια χρειάζεται τον κωδικο του πορτοφολιου για να αποκρυπτογραφησειι το πορτοφολι.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Αποκρυπτογράφησε το πορτοφολι</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Άλλαξε κωδικο πρόσβασης</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Εισάγετε τον παλιό και τον νεο κωδικο στο πορτοφολι.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Επιβεβαίωσε την κρυπτογραφηση του πορτοφολιού</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR WABCOINS&lt;/b&gt;!</source> <translation>Προσοχη: Εαν κρυπτογραφησεις το πορτοφολι σου και χάσεις τον κωδικο σου θα χάσεις &lt;b&gt; ΟΛΑ ΣΟΥ ΤΑ WABCOINS&lt;/b&gt;! Είσαι σίγουρος ότι θέλεις να κρυπτογραφησεις το πορτοφολι;</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Είστε σίγουροι ότι θέλετε να κρυπτογραφήσετε το πορτοφόλι σας;</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>ΣΗΜΑΝΤΙΚΟ: Τα προηγούμενα αντίγραφα ασφαλείας που έχετε κάνει από το αρχείο του πορτοφόλιου σας θα πρέπει να αντικατασταθουν με το νέο που δημιουργείται, κρυπτογραφημένο αρχείο πορτοφόλιου. Για λόγους ασφαλείας, τα προηγούμενα αντίγραφα ασφαλείας του μη κρυπτογραφημένου αρχείου πορτοφόλιου θα καταστουν άχρηστα μόλις αρχίσετε να χρησιμοποιείτε το νέο κρυπτογραφημένο πορτοφόλι. </translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Προσοχη: το πλήκτρο Caps Lock είναι ενεργο.</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Κρυπτογραφημενο πορτοφολι</translation> </message> <message> <location line="-56"/> <source>Wabcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your wabcoins from being stolen by malware infecting your computer.</source> <translation>Το Wabcoin θα κλεισει τώρα για να τελειώσει την διαδικασία κρυπτογραφησης. Θυμησου ότι κρυπτογραφώντας το πορτοφολι σου δεν μπορείς να προστατέψεις πλήρως τα wabcoins σου από κλοπή στην περίπτωση όπου μολυνθεί ο υπολογιστής σου με κακόβουλο λογισμικο.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Η κρυπτογραφηση του πορτοφολιού απέτυχε</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Η κρυπτογράφηση του πορτοφολιού απέτυχε λογω εσωτερικού σφάλματος. Το πορτοφολι δεν κρυπτογραφηθηκε.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Οι εισαχθέντες κωδικοί δεν ταιριάζουν.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>το ξεκλείδωμα του πορτοφολιού απέτυχε</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Ο κωδικος που εισήχθη για την αποκρυπτογραφηση του πορτοφολιού ήταν λαθος.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Η αποκρυπτογραφηση του πορτοφολιού απέτυχε</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Ο κωδικος του πορτοφολιού άλλαξε με επιτυχία.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Υπογραφή &amp;Μηνύματος...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Συγχρονισμός με το δίκτυο...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Επισκόπηση</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Εμφάνισε γενική εικονα του πορτοφολιού</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Συναλλαγές</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Περιήγηση στο ιστορικο συνναλαγων</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Εξεργασια της λιστας των αποθηκευμενων διευθύνσεων και ετικετων</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Εμφάνισε την λίστα των διευθύνσεων για την παραλαβή πληρωμων</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>Έ&amp;ξοδος</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Εξοδος από την εφαρμογή</translation> </message> <message> <location line="+4"/> <source>Show information about Wabcoin</source> <translation>Εμφάνισε πληροφορίες σχετικά με το Wabcoin</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Σχετικά με &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Εμφάνισε πληροφορίες σχετικά με Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Επιλογές...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Κρυπτογράφησε το πορτοφόλι</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Αντίγραφο ασφαλείας του πορτοφολιού</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Άλλαξε κωδικο πρόσβασης</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Εισαγωγή μπλοκ από τον σκληρο δίσκο ... </translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Φόρτωση ευρετηρίου μπλοκ στον σκληρο δισκο...</translation> </message> <message> <location line="-347"/> <source>Send coins to a Wabcoin address</source> <translation>Στείλε νομισματα σε μια διεύθυνση wabcoin</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Wabcoin</source> <translation>Επεργασία ρυθμισεων επιλογών για το Wabcoin</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Δημιουργία αντιγράφου ασφαλείας πορτοφολιού σε άλλη τοποθεσία</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Αλλαγή του κωδικού κρυπτογράφησης του πορτοφολιού</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Παράθυρο αποσφαλμάτωσης</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Άνοιγμα κονσόλας αποσφαλμάτωσης και διαγνωστικών</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>&amp;Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>Wabcoin</source> <translation>Wabcoin</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Πορτοφόλι</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation>&amp;Αποστολή</translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation>&amp;Παραλαβή </translation> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation>&amp;Διεύθυνσεις</translation> </message> <message> <location line="+22"/> <source>&amp;About Wabcoin</source> <translation>&amp;Σχετικα:Wabcoin</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Εμφάνισε/Κρύψε</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Εμφάνιση ή αποκρύψη του κεντρικου παράθυρου </translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>Κρυπτογραφήστε τα ιδιωτικά κλειδιά που ανήκουν στο πορτοφόλι σας </translation> </message> <message> <location line="+7"/> <source>Sign messages with your Wabcoin addresses to prove you own them</source> <translation>Υπογράψτε ένα μήνυμα για να βεβαιώσετε πως είστε ο κάτοχος αυτής της διεύθυνσης</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Wabcoin addresses</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως ανήκει μια συγκεκριμένη διεύθυνση Wabcoin</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Αρχείο</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Ρυθμίσεις</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Βοήθεια</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Εργαλειοθήκη καρτελών</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>Wabcoin client</source> <translation>Πελάτης Wabcoin</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Wabcoin network</source> <translation><numerusform>%n ενεργή σύνδεση στο δίκτυο Wabcoin</numerusform><numerusform>%n ενεργές συνδέσεις στο δίκτυο Βitcoin</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation>Η πηγή του μπλοκ δεν ειναι διαθέσιμη... </translation> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation>Μεταποιημένα %1 απο % 2 (κατ &apos;εκτίμηση) μπλοκ της ιστορίας της συναλλαγής. </translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Έγινε λήψη %1 μπλοκ ιστορικού συναλλαγών</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation><numerusform>%n ώρες </numerusform><numerusform>%n ώρες </numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n ημέρες </numerusform><numerusform>%n ημέρες </numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation><numerusform>%n εβδομαδες</numerusform><numerusform>%n εβδομαδες</numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation>%1 πίσω</translation> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation>Το τελευταίο μπλοκ που ελήφθη δημιουργήθηκε %1 πριν.</translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation>Οι συναλλαγές μετά από αυτό δεν θα είναι ακόμη ορατες.</translation> </message> <message> <location line="+22"/> <source>Error</source> <translation>Σφάλμα</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Προειδοποίηση</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Πληροφορία</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Η συναλλαγή ξεπερνάει το όριο. Μπορεί να ολοκληρωθεί με μια αμοιβή των %1, η οποία αποδίδεται στους κόμβους που επεξεργάζονται τις συναλλαγές και βοηθούν στην υποστήριξη του δικτύου. Θέλετε να συνεχίσετε;</translation> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Ενημερωμένο</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Ενημέρωση...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Επιβεβαίωση αμοιβής συναλλαγής</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Η συναλλαγή απεστάλη</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Εισερχόμενη συναλλαγή</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Ημερομηνία: %1 Ποσό: %2 Τύπος: %3 Διεύθυνση: %4 </translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Χειρισμός URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Wabcoin address or malformed URI parameters.</source> <translation>Το URI δεν μπορεί να αναλυθεί! Αυτό μπορεί να προκληθεί από μια μη έγκυρη διεύθυνση Wabcoin ή ακατάλληλη παραμέτρο URI.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Το πορτοφόλι είναι &lt;b&gt;κρυπτογραφημένο&lt;/b&gt; και &lt;b&gt;ξεκλείδωτο&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Το πορτοφόλι είναι &lt;b&gt;κρυπτογραφημένο&lt;/b&gt; και &lt;b&gt;κλειδωμένο&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Wabcoin can no longer continue safely and will quit.</source> <translation>Παρουσιάστηκε ανεπανόρθωτο σφάλμα. Το Wabcoin δεν μπορεί πλέον να συνεχίσει με ασφάλεια και θα τερματισθει.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Ειδοποίηση Δικτύου</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Επεξεργασία Διεύθυνσης</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Επιγραφή</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Η επιγραφή που σχετίζεται με αυτή την καταχώρηση του βιβλίου διευθύνσεων</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Διεύθυνση</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Η διεύθυνση που σχετίζεται με αυτή την καταχώρηση του βιβλίου διευθύνσεων. Μπορεί να τροποποιηθεί μόνο για τις διευθύνσεις αποστολής.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Νέα διεύθυνση λήψης</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Νέα διεύθυνση αποστολής</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Επεξεργασία διεύθυνσης λήψης</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Επεξεργασία διεύθυνσης αποστολής</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Η διεύθυνση &quot;%1&quot; βρίσκεται ήδη στο βιβλίο διευθύνσεων.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Wabcoin address.</source> <translation>Η διεύθυνση &quot;%1&quot; δεν είναι έγκυρη Wabcoin διεύθυνση.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Δεν είναι δυνατό το ξεκλείδωμα του πορτοφολιού.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Η δημιουργία νέου κλειδιού απέτυχε.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Wabcoin-Qt</source> <translation>wabcoin-qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>έκδοση</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Χρήση:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>επιλογής γραμμής εντολών</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>επιλογές UI</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Όρισε γλώσσα, για παράδειγμα &quot;de_DE&quot;(προεπιλογή:τοπικές ρυθμίσεις)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Έναρξη ελαχιστοποιημένο</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Εμφάνισε την οθόνη εκκίνησης κατά την εκκίνηση(προεπιλογή:1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Ρυθμίσεις</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Κύριο</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation>Η προαιρετική αμοιβή για κάθε kB επισπεύδει την επεξεργασία των συναλλαγών σας. Οι περισσότερες συναλλαγές είναι 1 kB. </translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Αμοιβή &amp;συναλλαγής</translation> </message> <message> <location line="+31"/> <source>Automatically start Wabcoin after logging in to the system.</source> <translation>Αυτόματη εκκίνηση του Wabcoin μετά την εισαγωγή στο σύστημα</translation> </message> <message> <location line="+3"/> <source>&amp;Start Wabcoin on system login</source> <translation>&amp;Έναρξη του Βιtcoin κατά την εκκίνηση του συστήματος</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Επαναφορα όλων των επιλογων του πελάτη σε default.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>Επαναφορα ρυθμίσεων</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Δίκτυο</translation> </message> <message> <location line="+6"/> <source>Automatically open the Wabcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Αυτόματο άνοιγμα των θυρών Wabcoin στον δρομολογητή. Λειτουργεί μόνο αν ο δρομολογητής σας υποστηρίζει τη λειτουργία UPnP.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Απόδοση θυρών με χρήστη &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Wabcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Σύνδεση στο Wabcoin δίκτυο μέσω διαμεσολαβητή SOCKS4 (π.χ. για σύνδεση μέσω Tor)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Σύνδεση μέσω διαμεσολαβητή SOCKS</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP διαμεσολαβητή:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Διεύθυνση IP του διαμεσολαβητή (π.χ. 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Θύρα:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Θύρα διαμεσολαβητή</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS &amp;Έκδοση:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>SOCKS εκδοση του διαμεσολαβητη (e.g. 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Παράθυρο</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Εμφάνιση μόνο εικονιδίου στην περιοχή ειδοποιήσεων κατά την ελαχιστοποίηση</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Ελαχιστοποίηση στην περιοχή ειδοποιήσεων αντί της γραμμής εργασιών</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Ελαχιστοποίηση αντί για έξοδο κατά το κλείσιμο του παραθύρου</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>Ε&amp;λαχιστοποίηση κατά το κλείσιμο</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>%Απεικόνιση</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Γλώσσα περιβάλλοντος εργασίας: </translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Wabcoin.</source> <translation>Εδώ μπορεί να ρυθμιστεί η γλώσσα διεπαφής χρήστη. Αυτή η ρύθμιση θα ισχύσει μετά την επανεκκίνηση του Wabcoin.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Μονάδα μέτρησης:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Διαλέξτε την προεπιλεγμένη υποδιαίρεση που θα εμφανίζεται όταν στέλνετε νομίσματα.</translation> </message> <message> <location line="+9"/> <source>Whether to show Wabcoin addresses in the transaction list or not.</source> <translation>Επιλέξτε αν θέλετε να εμφανίζονται οι διευθύνσεις Wabcoin στη λίστα συναλλαγών.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>Εμφάνιση διευθύνσεων στη λίστα συναλλαγών</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;ΟΚ</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Ακύρωση</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Εφαρμογή</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>προεπιλογή</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Επιβεβαιώση των επιλογων επαναφοράς </translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Για ορισμένες ρυθμίσεις πρεπει η επανεκκίνηση να τεθεί σε ισχύ.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Θέλετε να προχωρήσετε;</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Προειδοποίηση</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Wabcoin.</source> <translation>Αυτή η ρύθμιση θα ισχύσει μετά την επανεκκίνηση του Wabcoin.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Φόρμα</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Wabcoin network after a connection is established, but this process has not completed yet.</source> <translation>Οι πληροφορίες που εμφανίζονται μπορεί να είναι ξεπερασμένες. Το πορτοφόλι σας συγχρονίζεται αυτόματα με το δίκτυο Wabcoin μετά από μια σύνδεση, αλλά αυτή η διαδικασία δεν έχει ακόμη ολοκληρωθεί. </translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Υπόλοιπο</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Ανεπιβεβαίωτες</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Πορτοφόλι</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>Ανώριμος</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Εξορυγμενο υπόλοιπο που δεν έχει ακόμα ωριμάσει </translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Πρόσφατες συναλλαγές&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Το τρέχον υπόλοιπο</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Το άθροισμα των συναλλαγών που δεν έχουν ακόμα επιβεβαιωθεί και δεν προσμετρώνται στο τρέχον υπόλοιπό σας</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>εκτός συγχρονισμού</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start wabcoin: click-to-pay handler</source> <translation>Δεν είναι δυνατή η εκκίνηση του Wabcoin: click-to-pay handler</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Κώδικας QR</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Αίτηση πληρωμής</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Ποσό:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Επιγραφή:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Μήνυμα:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Αποθήκευση ως...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Σφάλμα κατά την κωδικοποίηση του URI σε κώδικα QR</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>Το αναγραφόμενο ποσό δεν είναι έγκυρο, παρακαλούμε να το ελέγξετε.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Το αποτέλεσμα της διεύθυνσης είναι πολύ μεγάλο. Μειώστε το μέγεθος για το κείμενο της ετικέτας/ μηνύματος.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Αποθήκευση κώδικα QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>Εικόνες PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Όνομα Πελάτη</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>Μη διαθέσιμο</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Έκδοση Πελάτη</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Πληροφορία</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Χρησιμοποιηση της OpenSSL εκδοσης</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Χρόνος εκκίνησης</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Δίκτυο</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Αριθμός συνδέσεων</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>Στο testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>αλυσίδα εμποδισμού</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Τρέχον αριθμός μπλοκ</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Κατ&apos; εκτίμηση συνολικά μπλοκς</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Χρόνος τελευταίου μπλοκ</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Άνοιγμα</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>επιλογής γραμμής εντολών</translation> </message> <message> <location line="+7"/> <source>Show the Wabcoin-Qt help message to get a list with possible Wabcoin command-line options.</source> <translation>Εμφανιση του Wabcoin-Qt μήνυματος βοήθειας για να πάρετε μια λίστα με τις πιθανές επιλογές Wabcoin γραμμής εντολών.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Εμφάνιση</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Κονσόλα</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Ημερομηνία κατασκευής</translation> </message> <message> <location line="-104"/> <source>Wabcoin - Debug window</source> <translation>Wabcoin - Παράθυρο αποσφαλμάτωσης</translation> </message> <message> <location line="+25"/> <source>Wabcoin Core</source> <translation>Wabcoin Core</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Αρχείο καταγραφής εντοπισμού σφαλμάτων </translation> </message> <message> <location line="+7"/> <source>Open the Wabcoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Ανοίξτε το αρχείο καταγραφής εντοπισμού σφαλμάτων από τον τρέχοντα κατάλογο δεδομένων. Αυτό μπορεί να πάρει μερικά δευτερόλεπτα για τα μεγάλα αρχεία καταγραφής. </translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Καθαρισμός κονσόλας</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Wabcoin RPC console.</source> <translation>Καλώς ήρθατε στην Wabcoin RPC κονσόλα.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Χρησιμοποιήστε το πάνω και κάτω βέλος για να περιηγηθείτε στο ιστορικο, και &lt;b&gt;Ctrl-L&lt;/b&gt; για εκκαθαριση οθονης.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Γράψτε &lt;b&gt;βοήθεια&lt;/b&gt; για μια επισκόπηση των διαθέσιμων εντολών</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Αποστολή νομισμάτων</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Αποστολή σε πολλούς αποδέκτες ταυτόχρονα</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>&amp;Προσθήκη αποδέκτη</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Διαγραφή όλων των πεδίων συναλλαγής</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Καθαρισμός &amp;Όλων</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Υπόλοιπο:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123,456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Επιβεβαίωση αποστολής</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>Αποστολη</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; σε %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Επιβεβαίωση αποστολής νομισμάτων</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Είστε βέβαιοι για την αποστολή %1;</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>και</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>Η διεύθυνση του αποδέκτη δεν είναι σωστή. Παρακαλώ ελέγξτε ξανά.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Το ποσό πληρωμής πρέπει να είναι μεγαλύτερο από 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Το ποσό ξεπερνάει το διαθέσιμο υπόλοιπο</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Το σύνολο υπερβαίνει το υπόλοιπό σας όταν συμπεριληφθεί και η αμοιβή %1</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Βρέθηκε η ίδια διεύθυνση δύο φορές. Επιτρέπεται μία μόνο εγγραφή για κάθε διεύθυνση, σε κάθε διαδικασία αποστολής.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Σφάλμα: Η δημιουργία της συναλλαγής απέτυχε</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Σφάλμα: Η συναλλαγή απερρίφθη. Αυτό ενδέχεται να συμβαίνει αν κάποια από τα νομίσματα έχουν ήδη ξοδευθεί, όπως αν χρησιμοποιήσατε αντίγραφο του wallet.dat και τα νομίσματα ξοδεύθηκαν εκεί.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Φόρμα</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>&amp;Ποσό:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Πληρωμή &amp;σε:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Διεύθυνση αποστολής της πληρωμής (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Εισάγετε μια επιγραφή για αυτή τη διεύθυνση ώστε να καταχωρηθεί στο βιβλίο διευθύνσεων</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Επιγραφή</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Επιλογή διεύθυνσης από το βιβλίο διευθύνσεων</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Επικόλληση διεύθυνσης από το πρόχειρο</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Αφαίρεση αποδέκτη</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Wabcoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Εισάγετε μια διεύθυνση Wabcoin (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Υπογραφές - Είσοδος / Επαλήθευση μήνυματος </translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Υπογραφή Μηνύματος</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Μπορείτε να υπογράφετε μηνύματα με τις διευθύνσεις σας, ώστε ν&apos; αποδεικνύετε πως αυτές σας ανήκουν. Αποφεύγετε να υπογράφετε κάτι αόριστο καθώς ενδέχεται να εξαπατηθείτε. Υπογράφετε μόνο πλήρης δηλώσεις με τις οποίες συμφωνείτε.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Εισάγετε μια διεύθυνση Wabcoin (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Επιλογή διεύθυνσης από το βιβλίο διευθύνσεων</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Επικόλληση διεύθυνσης από το βιβλίο διευθύνσεων</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Εισάγετε εδώ το μήνυμα που θέλετε να υπογράψετε</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Υπογραφή</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Αντέγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Wabcoin address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως σας ανήκει μια συγκεκριμένη διεύθυνση Wabcoin</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Υπογραφη μήνυματος</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Επαναφορά όλων των πεδίων μήνυματος</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Καθαρισμός &amp;Όλων</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>&amp;Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Πληκτρολογήστε την υπογραφή διεύθυνσης, μήνυμα (βεβαιωθείτε ότι έχετε αντιγράψει τις αλλαγές γραμμής, κενά, tabs, κ.λπ. ακριβώς) και την υπογραφή παρακάτω, για να ελέγξει το μήνυμα. Να είστε προσεκτικοί για να μην διαβάσετε περισσότερα στην υπογραφή ό, τι είναι στην υπογραφή ίδιο το μήνυμα , για να μην εξαπατηθούν από έναν άνθρωπο -in - the-middle επίθεση.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Εισάγετε μια διεύθυνση Wabcoin (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Wabcoin address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως υπογραφθηκε απο μια συγκεκριμένη διεύθυνση Wabcoin</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Επαναφορά όλων επαλήθευμενων πεδίων μήνυματος </translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Wabcoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Εισάγετε μια διεύθυνση Wabcoin (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Κάντε κλικ στο &quot;Υπογραφή Μηνύματος&quot; για να λάβετε την υπογραφή</translation> </message> <message> <location line="+3"/> <source>Enter Wabcoin signature</source> <translation>Εισαγωγή υπογραφής Wabcoin</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>Η διεύθυνση που εισήχθη είναι λάθος.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Παρακαλούμε ελέγξτε την διεύθυνση και δοκιμάστε ξανά.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>Η διεύθυνση που έχει εισαχθεί δεν αναφέρεται σε ένα πλήκτρο.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>το ξεκλείδωμα του πορτοφολιού απέτυχε</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>Το προσωπικό κλειδί εισαγμενης διευθυνσης δεν είναι διαθέσιμο.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Η υπογραφή του μηνύματος απέτυχε.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Μήνυμα υπεγράφη.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>Η υπογραφή δεν μπόρεσε να αποκρυπτογραφηθεί.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Παρακαλούμε ελέγξτε την υπογραφή και δοκιμάστε ξανά.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>Η υπογραφή δεν ταιριάζει με το μήνυμα. </translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Η επιβεβαίωση του μηνύματος απέτυχε</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Μήνυμα επιβεβαιώθηκε.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Wabcoin developers</source> <translation>Οι Wabcoin προγραμματιστές </translation> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Ανοιχτό μέχρι %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/χωρίς σύνδεση;</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/χωρίς επιβεβαίωση</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 επιβεβαιώσεις</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Κατάσταση</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, έχει μεταδοθεί μέσω %n κόμβων</numerusform><numerusform>, έχει μεταδοθεί μέσω %n κόμβων</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Ημερομηνία</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Πηγή</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Δημιουργία </translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Από</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Προς</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation> δική σας διεύθυνση </translation> </message> <message> <location line="-2"/> <source>label</source> <translation>eπιγραφή</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Πίστωση </translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>ωρίμανση σε %n επιπλέον μπλοκ</numerusform><numerusform>ωρίμανση σε %n επιπλέον μπλοκ</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>μη αποδεκτό</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Debit</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Τέλος συναλλαγής </translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Καθαρό ποσό</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Μήνυμα</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Σχόλιο:</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID Συναλλαγής:</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Πρέπει να περιμένετε 120 μπλοκ πριν μπορέσετε να χρησιμοποιήσετε τα νομίσματα που έχετε δημιουργήσει. Το μπλοκ που δημιουργήσατε μεταδόθηκε στο δίκτυο για να συμπεριληφθεί στην αλυσίδα των μπλοκ. Αν δεν μπει σε αυτή θα μετατραπεί σε &quot;μη αποδεκτό&quot; και δε θα μπορεί να καταναλωθεί. Αυτό συμβαίνει σπάνια όταν κάποιος άλλος κόμβος δημιουργήσει ένα μπλοκ λίγα δευτερόλεπτα πριν από εσάς.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Πληροφορίες αποσφαλμάτωσης</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Συναλλαγή</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>εισροές </translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Ποσό</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>αληθής</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>αναληθής </translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, δεν έχει ακόμα μεταδοθεί μ&apos; επιτυχία</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation><numerusform>Ανοιχτό για %n μπλοκ</numerusform><numerusform>Ανοιχτό για %n μπλοκ</numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>άγνωστο</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Λεπτομέρειες συναλλαγής</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Αυτό το παράθυρο δείχνει μια λεπτομερή περιγραφή της συναλλαγής</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Ημερομηνία</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Τύπος</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Διεύθυνση</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Ποσό</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation><numerusform>Ανοιχτό για %n μπλοκ</numerusform><numerusform>Ανοιχτό για %n μπλοκ</numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Ανοιχτό μέχρι %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Χωρίς σύνδεση (%1 επικυρώσεις)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Χωρίς επιβεβαίωση (%1 από %2 επικυρώσεις)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Επικυρωμένη (%1 επικυρώσεις)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation><numerusform>Το υπόλοιπο από την εξόρυξη θα είναι διαθέσιμο μετά από %n μπλοκ</numerusform><numerusform>Το υπόλοιπο από την εξόρυξη θα είναι διαθέσιμο μετά από %n μπλοκ</numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Αυτό το μπλοκ δεν έχει παραληφθεί από κανέναν άλλο κόμβο και κατά πάσα πιθανότητα θα απορριφθεί!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Δημιουργήθηκε αλλά απορρίφθηκε</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Παραλαβή με</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Ελήφθη από</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Αποστολή προς</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Πληρωμή προς εσάς</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Εξόρυξη</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(δ/α)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Κατάσταση συναλλαγής. Πηγαίνετε το ποντίκι πάνω από αυτό το πεδίο για να δείτε τον αριθμό των επικυρώσεων</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Ημερομηνία κι ώρα λήψης της συναλλαγής.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Είδος συναλλαγής.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Διεύθυνση αποστολής της συναλλαγής.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Ποσό που αφαιρέθηκε ή προστέθηκε στο υπόλοιπο.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Όλα</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Σήμερα</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Αυτή την εβδομάδα</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Αυτόν τον μήνα</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Τον προηγούμενο μήνα</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Αυτό το έτος</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Έκταση...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Ελήφθη με</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Απεστάλη προς</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Προς εσάς</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Εξόρυξη</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Άλλο</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Αναζήτηση με βάση τη διεύθυνση ή την επιγραφή</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Ελάχιστο ποσό</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Αντιγραφή διεύθυνσης</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Αντιγραφή επιγραφής</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Αντιγραφή ποσού</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Αντιγραφη του ID Συναλλαγής</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Επεξεργασία επιγραφής</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Εμφάνιση λεπτομερειών συναλλαγής</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Εξαγωγή Στοιχείων Συναλλαγών</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Αρχείο οριοθετημένο με κόμματα (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Επικυρωμένες</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Ημερομηνία</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Τύπος</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Επιγραφή</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Διεύθυνση</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Ποσό</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Σφάλμα εξαγωγής</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Αδυναμία εγγραφής στο αρχείο %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Έκταση:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>έως</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Αποστολή νομισμάτων</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation>&amp;Εξαγωγή</translation> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Εξαγωγή δεδομένων καρτέλας σε αρχείο</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation>Αντίγραφο ασφαλείας του πορτοφολιού</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Αρχεία δεδομένων πορτοφολιού (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Αποτυχία κατά τη δημιουργία αντιγράφου</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Παρουσιάστηκε σφάλμα κατά την αποθήκευση των δεδομένων πορτοφολιού στη νέα τοποθεσία.</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Η δημιουργια αντιγραφου ασφαλειας πετυχε</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Τα δεδομένα πορτοφόλιου αποθηκεύτηκαν με επιτυχία στη νέα θέση. </translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Wabcoin version</source> <translation>Έκδοση Wabcoin</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Χρήση:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or wabcoind</source> <translation>Αποστολή εντολής στον εξυπηρετητή ή στο wabcoind</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Λίστα εντολών</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Επεξήγηση εντολής</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Επιλογές:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: wabcoin.conf)</source> <translation>Ορίστε αρχείο ρυθμίσεων (προεπιλογή: wabcoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: wabcoind.pid)</source> <translation>Ορίστε αρχείο pid (προεπιλογή: wabcoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Ορισμός φακέλου δεδομένων</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Όρισε το μέγεθος της βάσης προσωρινής αποθήκευσης σε megabytes(προεπιλογή:25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 11333 or testnet: 111333)</source> <translation>Εισερχόμενες συνδέσεις στη θύρα &lt;port&gt; (προεπιλογή: 11333 ή στο testnet: 111333)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Μέγιστες αριθμός συνδέσεων με τους peers &lt;n&gt; (προεπιλογή: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Σύνδεση σε έναν κόμβο για την ανάκτηση διευθύνσεων από ομοτίμους, και αποσυνδέσh</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>Διευκρινίστε τη δικιά σας δημόσια διεύθυνση.</translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Όριο αποσύνδεσης προβληματικών peers (προεπιλογή: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Δευτερόλεπτα πριν επιτραπεί ξανά η σύνδεση των προβληματικών peers (προεπιλογή: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Ένα σφάλμα συνέβη καθώς προετοιμαζόταν η πόρτα RPC %u για αναμονή IPv4: %s</translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 11332 or testnet: 111332)</source> <translation>Εισερχόμενες συνδέσεις JSON-RPC στη θύρα &lt;port&gt; (προεπιλογή: 11332 or testnet: 111332)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Αποδοχή εντολών κονσόλας και JSON-RPC</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Εκτέλεση στο παρασκήνιο κι αποδοχή εντολών</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Χρήση του δοκιμαστικού δικτύου</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Να δέχεσαι συνδέσεις από έξω(προεπιλογή:1)</translation> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=wabcoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Wabcoin Alert&quot; [email protected] </source> <translation>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=wabcoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Wabcoin Alert&quot; [email protected] </translation> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Ένα σφάλμα συνέβη καθώς προετοιμαζόταν η υποδοχη RPC %u για αναμονη του IPv6, επεσε πισω στο IPv4:%s</translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Αποθηκευση σε συγκεκριμένη διεύθυνση. Χρησιμοποιήστε τα πλήκτρα [Host] : συμβολισμός θύρα για IPv6</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Wabcoin is probably already running.</source> <translation>Αδυναμία κλειδώματος του φακέλου δεδομένων %s. Πιθανώς το Wabcoin να είναι ήδη ενεργό.</translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Σφάλμα: Η συναλλαγή απορρίφθηκε. Αυτό ίσως οφείλεται στο ότι τα νομίσματά σας έχουν ήδη ξοδευτεί, π.χ. με την αντιγραφή του wallet.dat σε άλλο σύστημα και την χρήση τους εκεί, χωρίς η συναλλαγή να έχει καταγραφεί στο παρόν σύστημα.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Σφάλμα: Αυτή η συναλλαγή απαιτεί αμοιβή συναλλαγής τουλάχιστον %s λόγω του μεγέθους, πολυπλοκότητας ή της χρήσης πρόσφατης παραλαβής κεφαλαίου</translation> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>Εκτέλεση της εντολής όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Εκτέλεσε την εντολή όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Ορίστε το μέγιστο μέγεθος των high-priority/low-fee συναλλαγων σε bytes (προεπιλογή: 27000)</translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Αυτό είναι ένα προ-τεστ κυκλοφορίας - χρησιμοποιήστε το με δική σας ευθύνη - δεν χρησιμοποιείτε για εξόρυξη ή για αλλες εφαρμογές</translation> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Προειδοποίηση: Η παράμετρος -paytxfee είναι πολύ υψηλή. Πρόκειται για την αμοιβή που θα πληρώνετε για κάθε συναλλαγή που θα στέλνετε.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Προειδοποίηση: Εμφανίσεις συναλλαγων δεν μπορεί να είναι σωστες! Μπορεί να χρειαστεί να αναβαθμίσετε, ή άλλοι κόμβοι μπορεί να χρειαστεί να αναβαθμίστουν. </translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Wabcoin will not work properly.</source> <translation>Προειδοποίηση: Παρακαλώ βεβαιωθείτε πως η ημερομηνία κι ώρα του συστήματός σας είναι σωστές. Αν το ρολόι του υπολογιστή σας πάει λάθος, ενδέχεται να μη λειτουργεί σωστά το Wabcoin.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Προειδοποίηση : Σφάλμα wallet.dat κατα την ανάγνωση ! Όλα τα κλειδιά αναγνωρισθηκαν σωστά, αλλά τα δεδομένα των συναλλαγών ή καταχωρήσεις στο βιβλίο διευθύνσεων μπορεί να είναι ελλιπείς ή λανθασμένα. </translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Προειδοποίηση : το αρχειο wallet.dat ειναι διεφθαρμένο, τα δεδομένα σώζονται ! Original wallet.dat αποθηκεύονται ως πορτοφόλι { timestamp } bak στο % s ? . . Αν το υπόλοιπο του ή τις συναλλαγές σας, είναι λάθος θα πρέπει να επαναφέρετε από ένα αντίγραφο ασφαλείας</translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Προσπάθεια για ανακτησει ιδιωτικων κλειδιων από ενα διεφθαρμένο αρχειο wallet.dat </translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>Αποκλεισμός επιλογων δημιουργίας: </translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Σύνδεση μόνο με ορισμένους κόμβους</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation>Εντοπισθηκε διεφθαρμενη βαση δεδομενων των μπλοκ</translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Ανακαλύψτε την δικη σας IP διεύθυνση (προεπιλογή: 1 όταν ακούει και δεν - externalip) </translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation>Θελετε να δημιουργηθει τωρα η βαση δεδομενων του μπλοκ? </translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Σφάλμα κατά την ενεργοποίηση της βάσης δεδομένων μπλοκ</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation>Σφάλμα κατά την ενεργοποίηση της βάσης δεδομένων πορτοφόλιου %s!</translation> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Σφάλμα φορτωσης της βασης δεδομενων των μπλοκ</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation>Σφάλμα φορτωσης της βασης δεδομενων των μπλοκ</translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Προειδοποίηση: Χαμηλός χώρος στο δίσκο </translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Σφάλμα: το πορτοφόλι είναι κλειδωμένο, δεν μπορεί να δημιουργηθεί συναλλαγή</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Λάθος: λάθος συστήματος:</translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>ταλαιπωρηθειτε για να ακούσετε σε οποιαδήποτε θύρα. Χρήση - ακούστε = 0 , αν θέλετε αυτό.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation>Αποτυχία αναγνωσης των block πληροφοριων</translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation>Η αναγνωση του μπλοκ απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation>Ο συγχρονισμος του μπλοκ ευρετηριου απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation>Η δημιουργια του μπλοκ ευρετηριου απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation>Η δημιουργια των μπλοκ πληροφοριων απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation>Η δημιουργια του μπλοκ απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation>Αδυναμία εγγραφής πληροφοριων αρχειου</translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation>Αποτυχία εγγραφής στη βάση δεδομένων νομίσματος</translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation>Αποτυχία εγγραφής δείκτη συναλλαγών </translation> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation>Αποτυχία εγγραφής αναίρεσης δεδομένων </translation> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Βρες ομότιμους υπολογιστές χρησιμοποιώντας αναζήτηση DNS(προεπιλογή:1)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation>Δημιουργία νομισμάτων (προκαθορισμος: 0)</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Πόσα μπλοκ να ελέγχθουν κατά την εκκίνηση (προεπιλογή:288,0=όλα)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation>Πόσο εξονυχιστική να είναι η επιβεβαίωση του μπλοκ(0-4, προεπιλογή:3)</translation> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation>Δεν ειναι αρκετες περιγραφες αρχείων διαθέσιμες.</translation> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Εισαγωγή μπλοκ από εξωτερικό αρχείο blk000?.dat</translation> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation>Ορίσμος του αριθμόυ θεματων στην υπηρεσία κλήσεων RPC (προεπιλογή: 4) </translation> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation>Επαλήθευση των μπλοκ... </translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation>Επαλήθευση πορτοφολιου... </translation> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Εισαγωγή μπλοκ από εξωτερικό αρχείο blk000?.dat</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation>Ορίσμος του αριθμό των νημάτων ελέγχου σεναρίου (μέχρι 16, 0 = auto, &lt;0 = αφήνουν τους πολλους πυρήνες δωρεάν, default: 0)</translation> </message> <message> <location line="+77"/> <source>Information</source> <translation>Πληροφορία</translation> </message> <message> <location line="+3"/><|fim▁hole|> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation>Διατηρήση ένος πλήρες ευρετήριου συναλλαγών (προεπιλογή: 0) </translation> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Μέγιστος buffer λήψης ανά σύνδεση, &lt;n&gt;*1000 bytes (προεπιλογή: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Μέγιστος buffer αποστολής ανά σύνδεση, &lt;n&gt;*1000 bytes (προεπιλογή: 1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Μονο αποδοχη αλυσίδας μπλοκ που ταιριάζει με τα ενσωματωμένα σημεία ελέγχου (προεπιλογή: 1) </translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation> Συνδέση μόνο σε κόμβους του δικτύου &lt;net&gt; (IPv4, IPv6 ή Tor) </translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Έξοδος επιπλέον πληροφοριών εντοπισμού σφαλμάτων</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Έξοδος επιπλέον πληροφοριών εντοπισμού σφαλμάτων</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Χρονοσφραγίδα πληροφοριών εντοπισμού σφαλμάτων</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the Wabcoin Wiki for SSL setup instructions)</source> <translation>Ρυθμίσεις SSL: (ανατρέξτε στο Wabcoin Wiki για οδηγίες ρυθμίσεων SSL)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Επιλέξτε την έκδοση του διαμεσολαβητη για να χρησιμοποιήσετε (4-5 , προεπιλογή: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Αποστολή πληροφοριών εντοπισμού σφαλμάτων στην κονσόλα αντί του αρχείου debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Αποστολή πληροφοριών εντοπισμού σφαλμάτων στον debugger</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Ορίσμος του μέγιστου μέγεθος block σε bytes (προεπιλογή: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Ορίστε το μέγιστο μέγεθος block σε bytes (προεπιλογή: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Συρρίκνωση του αρχείο debug.log κατα την εκκίνηση του πελάτη (προεπιλογή: 1 όταν δεν-debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation>Η υπογραφή συναλλαγής απέτυχε </translation> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Ορισμός λήξης χρονικού ορίου σε χιλιοστά του δευτερολέπτου(προεπιλογή:5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation>Λάθος Συστήματος:</translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation>Το ποσό της συναλλαγής είναι πολύ μικρο </translation> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation>Τα ποσά των συναλλαγών πρέπει να είναι θετικα</translation> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation>Η συναλλαγή ειναι πολύ μεγάλη </translation> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Χρησιμοποίηση του UPnP για την χρήση της πόρτας αναμονής (προεπιλογή:0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Χρησιμοποίηση του UPnP για την χρήση της πόρτας αναμονής (προεπιλογή:1)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Χρήση διακομιστή μεσολάβησης για την επίτευξη των Tor κρυμμένων υπηρεσιων (προεπιλογή: ίδιο με το-proxy) </translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Όνομα χρήστη για τις συνδέσεις JSON-RPC</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation>Προειδοποίηση</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Προειδοποίηση: Αυτή η έκδοση είναι ξεπερασμένη, απαιτείται αναβάθμιση </translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation>Θα πρέπει να ξαναχτίστουν οι βάσεις δεδομένων που χρησιμοποιούντε-Αναδημιουργία αλλάγων-txindex </translation> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>Το αρχειο wallet.dat ειναι διεφθαρμένο, η διάσωση απέτυχε</translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Κωδικός για τις συνδέσεις JSON-RPC</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Αποδοχή συνδέσεων JSON-RPC από συγκεκριμένη διεύθυνση IP</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Αποστολή εντολών στον κόμβο &lt;ip&gt; (προεπιλογή: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Εκτέλεσε την εντολή όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Αναβάθμισε το πορτοφόλι στην τελευταία έκδοση</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Όριο πλήθους κλειδιών pool &lt;n&gt; (προεπιλογή: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Επανέλεγχος της αλυσίδας μπλοκ για απούσες συναλλαγές</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Χρήση του OpenSSL (https) για συνδέσεις JSON-RPC</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Αρχείο πιστοποιητικού του διακομιστή (προεπιλογή: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Προσωπικό κλειδί του διακομιστή (προεπιλογή: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Αποδεκτά κρυπτογραφήματα (προεπιλογή: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Αυτό το κείμενο βοήθειας</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Αδύνατη η σύνδεση με τη θύρα %s αυτού του υπολογιστή (bind returned error %d, %s) </translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Σύνδεση μέσω διαμεσολαβητή socks</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Να επιτρέπονται οι έλεγχοι DNS για προσθήκη και σύνδεση κόμβων</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Φόρτωση διευθύνσεων...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Σφάλμα φόρτωσης wallet.dat: Κατεστραμμένο Πορτοφόλι</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Wabcoin</source> <translation>Σφάλμα φόρτωσης wallet.dat: Το Πορτοφόλι απαιτεί μια νεότερη έκδοση του Wabcoin</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Wabcoin to complete</source> <translation>Απαιτείται η επανεγγραφή του Πορτοφολιού, η οποία θα ολοκληρωθεί στην επανεκκίνηση του Wabcoin</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Σφάλμα φόρτωσης αρχείου wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Άγνωστo δίκτυο ορίζεται σε onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Άγνωστo δίκτυο ορίζεται: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Λάθος ποσότητα</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Ανεπαρκές κεφάλαιο</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Φόρτωση ευρετηρίου μπλοκ...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Προσέθεσε ένα κόμβο για σύνδεση και προσπάθησε να κρατήσεις την σύνδεση ανοιχτή</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Wabcoin is probably already running.</source> <translation>Αδύνατη η σύνδεση με τη θύρα %s αυτού του υπολογιστή. Το Wabcoin είναι πιθανώς ήδη ενεργό.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Αμοιβή ανά KB που θα προστίθεται στις συναλλαγές που στέλνεις</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Φόρτωση πορτοφολιού...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>Δεν μπορώ να υποβαθμίσω το πορτοφόλι</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Ανίχνευση...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Η φόρτωση ολοκληρώθηκε</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>Χρήση της %s επιλογής</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>Σφάλμα</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Πρέπει να βάλεις ένα κωδικό στο αρχείο παραμέτρων: %s Εάν το αρχείο δεν υπάρχει, δημιούργησε το με δικαιώματα μόνο για ανάγνωση από τον δημιουργό</translation> </message> </context> </TS><|fim▁end|>
<source>Invalid -tor address: &apos;%s&apos;</source> <translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή: &apos;%s&apos;</translation>
<|file_name|>CallNodeContextMenu.test.js<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // @flow import * as React from 'react'; import { Provider } from 'react-redux'; import copy from 'copy-to-clipboard'; import { render } from 'firefox-profiler/test/fixtures/testing-library'; import { CallNodeContextMenu } from '../../components/shared/CallNodeContextMenu'; import { storeWithProfile } from '../fixtures/stores'; import { getProfileFromTextSamples } from '../fixtures/profiles/processed-profile'; import { changeRightClickedCallNode, changeExpandedCallNodes, setContextMenuVisibility, } from '../../actions/profile-view'; import { selectedThreadSelectors } from '../../selectors/per-thread'; import { ensureExists } from '../../utils/flow'; import { fireFullClick } from '../fixtures/utils'; describe('calltree/CallNodeContextMenu', function () { // Provide a store with a useful profile to assert context menu operations off of. function createStore() { // Create a profile that every transform can be applied to. const { profile, funcNamesDictPerThread: [{ A, B }], } = getProfileFromTextSamples(` A A A B[lib:XUL] B[lib:XUL] B[lib:XUL] B[lib:XUL] B[lib:XUL] B[lib:XUL] B[lib:XUL] B[lib:XUL] B[lib:XUL] C C H D F I E E `); const store = storeWithProfile(profile); store.dispatch(changeExpandedCallNodes(0, [[A]])); store.dispatch(changeRightClickedCallNode(0, [A, B])); return store; } function setup(store = createStore(), openMenuState = true) { store.dispatch(setContextMenuVisibility(openMenuState)); const renderResult = render( <Provider store={store}> <CallNodeContextMenu /> </Provider> ); return { ...renderResult, getState: store.getState }; } describe('basic rendering', function () { it('does not render the context menu when it is closed', () => { const isContextMenuOpen = false; const { container } = setup(createStore(), isContextMenuOpen); expect(container.querySelector('.react-contextmenu')).toBeNull(); }); it('renders a full context menu when open, with many nav items', () => { const isContextMenuOpen = true; const { container } = setup(createStore(), isContextMenuOpen); expect( ensureExists( container.querySelector('.react-contextmenu'), `Couldn't find the context menu root component .react-contextmenu` ).children.length > 1 ).toBeTruthy();<|fim▁hole|> describe('clicking on call tree transforms', function () { // Iterate through each transform slug, and click things in it. const fixtures = [ { matcher: /Merge function/, type: 'merge-function' }, { matcher: /Merge node only/, type: 'merge-call-node' }, { matcher: /Focus on subtree only/, type: 'focus-subtree' }, { matcher: /Focus on function/, type: 'focus-function' }, { matcher: /Collapse function/, type: 'collapse-function-subtree' }, { matcher: /XUL/, type: 'collapse-resource' }, { matcher: /Collapse direct recursion/, type: 'collapse-direct-recursion', }, { matcher: /Drop samples/, type: 'drop-function' }, ]; fixtures.forEach(({ matcher, type }) => { it(`adds a transform for "${type}"`, function () { const { getState, getByText } = setup(); fireFullClick(getByText(matcher)); expect( selectedThreadSelectors.getTransformStack(getState())[0].type ).toBe(type); }); }); }); describe('clicking on the rest of the menu items', function () { it('can expand all call nodes in the call tree', function () { const { getState, getByText } = setup(); expect( selectedThreadSelectors.getExpandedCallNodeIndexes(getState()) ).toHaveLength(1); fireFullClick(getByText('Expand all')); // This test only asserts that a bunch of call nodes were actually expanded. expect( selectedThreadSelectors.getExpandedCallNodeIndexes(getState()) ).toHaveLength(11); }); it('can look up functions on SearchFox', function () { const { getByText } = setup(); jest.spyOn(window, 'open').mockImplementation(() => {}); fireFullClick(getByText(/Searchfox/)); expect(window.open).toBeCalledWith( 'https://searchfox.org/mozilla-central/search?q=B', '_blank' ); }); it('can copy a function name', function () { const { getByText } = setup(); // Copy is a mocked module, clear it both before and after. fireFullClick(getByText('Copy function name')); expect(copy).toBeCalledWith('B'); }); it('can copy a script URL', function () { // Create a new profile that has JavaScript in it. const { profile, funcNamesPerThread: [funcNames], } = getProfileFromTextSamples(` A.js `); const funcIndex = funcNames.indexOf('A.js'); const [thread] = profile.threads; thread.funcTable.fileName[funcIndex] = thread.stringTable.indexForString( 'https://example.com/script.js' ); const store = storeWithProfile(profile); store.dispatch(changeRightClickedCallNode(0, [funcIndex])); const { getByText } = setup(store); // Copy is a mocked module, clear it both before and after. fireFullClick(getByText('Copy script URL')); expect(copy).toBeCalledWith('https://example.com/script.js'); }); it('can copy a stack', function () { const { getByText } = setup(); // Copy is a mocked module, clear it both before and after. fireFullClick(getByText('Copy stack')); expect(copy).toBeCalledWith(`B\nA\n`); }); }); });<|fim▁end|>
expect(container.firstChild).toMatchSnapshot(); }); });
<|file_name|>mdl-table.component.ts<|end_file_name|><|fim▁begin|>import { Component, EventEmitter, Input, Output, ViewEncapsulation, } from "@angular/core"; export interface IMdlTableColumn { key: string; name: string; numeric?: boolean; } export interface IMdlTableModelItem { selected: boolean; } export interface IMdlTableModel { columns: IMdlTableColumn[]; data: IMdlTableModelItem[]; } export class MdlDefaultTableModel implements IMdlTableModel { public columns: IMdlTableColumn[]; public data: IMdlTableModelItem[] = []; constructor(columns: IMdlTableColumn[]) { this.columns = columns; } addAll(data: IMdlTableModelItem[]): void { this.data.push(...data); } } const template = ` <table class="mdl-data-table"> <thead> <tr> <th *ngIf="selectable"> <mdl-checkbox mdl-ripple [ngModel]="isAllSelected()" (ngModelChange)="toogleAll()"></mdl-checkbox> </th> <th *ngFor="let column of model.columns" [ngClass]="{'mdl-data-table__cell--non-numeric': !column.numeric}"> {{column.name}} </th> </tr> </thead> <tbody> <tr *ngFor="let data of model.data; let i = index" [ngClass]="{'is-selected': selectable && data.selected}"> <td *ngIf="selectable"> <mdl-checkbox mdl-ripple [(ngModel)]="data.selected" (ngModelChange)="selectionChanged()"></mdl-checkbox> </td> <td *ngFor="let column of model.columns" [ngClass]="{'mdl-data-table__cell--non-numeric': !column.numeric}" [innerHTML]="data[column.key]"> </td> </tr> </tbody> </table> `; const styles = ` :host{ display:inline-block; } `; @Component({ selector: "mdl-table", template, styles: [styles], encapsulation: ViewEncapsulation.None, }) export class MdlTableComponent {<|fim▁hole|> // eslint-disable-next-line @Input('table-model') model: IMdlTableModel; selectable = false; isAllSelected(): boolean { return false; } // eslint-disable-next-line @typescript-eslint/no-empty-function toogleAll(): void {} // eslint-disable-next-line @typescript-eslint/no-empty-function selectionChanged(): void {} } @Component({ selector: "mdl-table-selectable", template, styles: [styles], encapsulation: ViewEncapsulation.None, }) export class MdlSelectableTableComponent extends MdlTableComponent { // eslint-disable-next-line @Input('table-model') model: IMdlTableModel; // eslint-disable-next-line @Input('table-model-selected') selected: IMdlTableModelItem[]; // eslint-disable-next-line @Output('table-model-selectionChanged') selectionChange = new EventEmitter(); public selectable = true; public allSelected = false; isAllSelected(): boolean { return this.model.data.every((data) => data.selected); } toogleAll(): void { const selected = !this.isAllSelected(); this.model.data.forEach((data) => (data.selected = selected)); this.updateSelected(); } selectionChanged(): void { this.updateSelected(); } private updateSelected() { this.selected = this.model.data.filter((data) => data.selected); this.selectionChange.emit({ value: this.selected }); } }<|fim▁end|>
<|file_name|>mandelbrot_set.py<|end_file_name|><|fim▁begin|>import numpy as np import moderngl from ported._example import Example class Fractal(Example): title = "Mandelbrot" gl_version = (3, 3) def __init__(self, **kwargs): super().__init__(**kwargs) self.prog = self.ctx.program( vertex_shader=''' #version 330 in vec2 in_vert; out vec2 v_text; void main() { gl_Position = vec4(in_vert, 0.0, 1.0); v_text = in_vert; } ''',<|fim▁hole|> in vec2 v_text; out vec4 f_color; uniform sampler2D Texture; uniform vec2 Center; uniform float Scale; uniform float Ratio; uniform int Iter; void main() { vec2 c; int i; c.x = Ratio * v_text.x * Scale - Center.x; c.y = v_text.y * Scale - Center.y; vec2 z = c; for (i = 0; i < Iter; i++) { float x = (z.x * z.x - z.y * z.y) + c.x; float y = (z.y * z.x + z.x * z.y) + c.y; if ((x * x + y * y) > 4.0) { break; } z.x = x; z.y = y; } f_color = texture(Texture, vec2((i == Iter ? 0.0 : float(i)) / 100.0, 0.0)); } ''' ) self.center = self.prog['Center'] self.scale = self.prog['Scale'] self.ratio = self.prog['Ratio'] self.iter = self.prog['Iter'] self.texture = self.load_texture_2d('pal.png') vertices = np.array([-1.0, -1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0]) self.vbo = self.ctx.buffer(vertices.astype('f4')) self.vao = self.ctx.simple_vertex_array(self.prog, self.vbo, 'in_vert') def render(self, time, frame_time): self.ctx.clear(1.0, 1.0, 1.0) self.center.value = (0.5, 0.0) self.iter.value = 100 self.scale.value = 1.5 self.ratio.value = self.aspect_ratio self.texture.use() self.vao.render(moderngl.TRIANGLE_STRIP) if __name__ == '__main__': Fractal.run()<|fim▁end|>
fragment_shader=''' #version 330
<|file_name|>neproblem.rs<|end_file_name|><|fim▁begin|>use rand; use rand::{Rng, StdRng, SeedableRng}; use ea::*; use neuro::{ActivationFunctionType, MultilayeredNetwork, NeuralArchitecture, NeuralNetwork}; use problem::*; //-------------------------------------------- /// Trait for problem where NN is a solution. /// /// # Example: Custom NE problem /// ``` /// extern crate revonet; /// extern crate rand; /// /// use rand::{Rng, SeedableRng, StdRng}; /// /// use revonet::ea::*; /// use revonet::ne::*; /// use revonet::neuro::*; /// use revonet::neproblem::*; /// /// // Dummy problem returning random fitness. /// struct RandomNEProblem {} /// /// impl RandomNEProblem { /// fn new() -> RandomNEProblem { /// RandomNEProblem{} /// } /// } /// /// impl NeuroProblem for RandomNEProblem { /// // return number of NN inputs. /// fn get_inputs_num(&self) -> usize {1} /// // return number of NN outputs. /// fn get_outputs_num(&self) -> usize {1} /// // return NN with random weights and a fixed structure. For now the structure should be the same all the time to make sure that crossover is possible. Likely to change in the future. /// fn get_default_net(&self) -> MultilayeredNetwork { /// let mut rng = rand::thread_rng(); /// let mut net: MultilayeredNetwork = MultilayeredNetwork::new(self.get_inputs_num(), self.get_outputs_num()); /// net.add_hidden_layer(5 as usize, ActivationFunctionType::Sigmoid) /// .build(&mut rng, NeuralArchitecture::Multilayered); /// net /// } /// /// // Function to evaluate performance of a given NN. /// fn compute_with_net<T: NeuralNetwork>(&self, nn: &mut T) -> f32 { /// let mut rng: StdRng = StdRng::from_seed(&[0]); /// /// let mut input = (0..self.get_inputs_num()) /// .map(|_| rng.gen::<f32>()) /// .collect::<Vec<f32>>(); /// // compute NN output using random input. /// let mut output = nn.compute(&input); /// output[0] /// } /// } /// /// fn main() {} /// ``` pub trait NeuroProblem: Problem { /// Number of input variables. fn get_inputs_num(&self) -> usize; /// Number of output (target) variables. fn get_outputs_num(&self) -> usize; /// Returns random network with default number of inputs and outputs and some predefined structure. /// /// For now all networks returned by implementation of this functions have the same structure and /// random weights. This was done to ensure possibility to cross NN's and might change in the future. fn get_default_net(&self) -> MultilayeredNetwork; /// Compute fitness value for the given neural network. /// /// # Arguments: /// * `net` - neural network to compute fitness for. fn compute_with_net<T: NeuralNetwork>(&self, net: &mut T) -> f32; } /// Default implementation of the `Problem` trait for `NeuroProblem` #[allow(unused_variables, unused_mut)] impl<T: NeuroProblem> Problem for T { fn compute<I: Individual>(&self, ind: &mut I) -> f32 { let fitness; fitness = self.compute_with_net(ind.to_net_mut().expect("Can not extract mutable ANN")); // match ind.to_net_mut() { // Some(ref mut net) => {fitness = self.compute_with_net(net);}, // None => panic!("NN is not defined"), // }; ind.set_fitness(fitness); ind.get_fitness() } fn get_random_individual<U: Individual, R: Rng>(&self, size: usize, mut rng: &mut R) -> U { let mut res_ind = U::new(); res_ind.set_net(self.get_default_net()); res_ind } } /// /// Classical noiseless XOR problem with 2 binary inputs and 1 output. /// #[allow(dead_code)] pub struct XorProblem {} #[allow(dead_code)] impl XorProblem { pub fn new() -> XorProblem { XorProblem{} } } #[allow(dead_code)] impl NeuroProblem for XorProblem { fn get_inputs_num(&self) -> usize {2} fn get_outputs_num(&self) -> usize {1} fn get_default_net(&self) -> MultilayeredNetwork { let mut rng = rand::thread_rng(); let mut net: MultilayeredNetwork = MultilayeredNetwork::new(self.get_inputs_num(), self.get_outputs_num()); net.add_hidden_layer(4 as usize, ActivationFunctionType::Sigmoid) .build(&mut rng, NeuralArchitecture::BypassInputs); // .build(&mut rng, NeuralArchitecture::BypassInputs); net } fn compute_with_net<T: NeuralNetwork>(&self, nn: &mut T) -> f32 { let mut er = 0f32; let output = nn.compute(&[0f32, 0f32]); er += output[0] * output[0]; let output = nn.compute(&[1f32, 1f32]); er += output[0] * output[0]; let output = nn.compute(&[0f32, 1f32]); er += (1f32-output[0]) * (1f32-output[0]); let output = nn.compute(&[1f32, 0f32]); er += (1f32-output[0]) * (1f32-output[0]); er } } /// /// Problem which is typically used to test GP algorithms. Represents symbolic regression with /// 1 input and 1 output. There are three variants: /// * `f` - 4-th order polynomial. /// * `g` - 5-th order polynomial. /// * `h` - 6-th order polynomial. /// /// See for details: Luke S. Essentials of metaheuristics. /// #[allow(dead_code)] pub struct SymbolicRegressionProblem { func: fn(&SymbolicRegressionProblem, f32) -> f32, } #[allow(dead_code)] impl SymbolicRegressionProblem { /// Create a new problem depending on the problem type: /// * `f` - 4-th order polynomial. /// * `g` - 5-th order polynomial. /// * `h` - 6-th order polynomial. /// /// # Arguments: /// * `problem_type` - symbol from set `('f', 'g', 'h')` to set the problem type. pub fn new(problem_type: char) -> SymbolicRegressionProblem { match problem_type { 'f' => SymbolicRegressionProblem::new_f(), 'g' => SymbolicRegressionProblem::new_g(), 'h' => SymbolicRegressionProblem::new_h(), _ => { panic!(format!("Unknown problem type for symbolic regression problem: {}", problem_type)) } } } /// Create `f`-type problem (4-th order polynomial) pub fn new_f() -> SymbolicRegressionProblem { SymbolicRegressionProblem { func: SymbolicRegressionProblem::f } } /// Create `g`-type problem (4-th order polynomial) pub fn new_g() -> SymbolicRegressionProblem { SymbolicRegressionProblem { func: SymbolicRegressionProblem::g } } /// Create `h`-type problem (4-th order polynomial) pub fn new_h() -> SymbolicRegressionProblem { SymbolicRegressionProblem { func: SymbolicRegressionProblem::h } } fn f(&self, x: f32) -> f32 { let x2 = x * x; x2 * x2 + x2 * x + x2 + x } fn g(&self, x: f32) -> f32 { let x2 = x * x; x2 * x2 * x - 2f32 * x2 * x + x } fn h(&self, x: f32) -> f32 { let x2 = x * x; x2 * x2 * x2 - 2f32 * x2 * x2 + x2 } } impl NeuroProblem for SymbolicRegressionProblem { fn get_inputs_num(&self) -> usize { 1 } fn get_outputs_num(&self) -> usize { 1 } fn get_default_net(&self) -> MultilayeredNetwork { let mut rng = rand::thread_rng(); let mut net: MultilayeredNetwork = MultilayeredNetwork::new(self.get_inputs_num(), self.get_outputs_num()); net.add_hidden_layer(5 as usize, ActivationFunctionType::Sigmoid) .build(&mut rng, NeuralArchitecture::Multilayered); net } fn compute_with_net<T: NeuralNetwork>(&self, nn: &mut T) -> f32 { const PTS_COUNT: u32 = 20; let mut er = 0f32; let mut input = vec![0f32]; let mut output; let mut rng: StdRng = StdRng::from_seed(&[0]); for _ in 0..PTS_COUNT { let x = rng.gen::<f32>(); // sample from [-1, 1] let y = (self.func)(&self, x); input[0] = x; output = nn.compute(&input); er += (output[0] - y).abs(); } er } } //========================================================= #[cfg(test)] #[allow(unused_imports)] mod test { use rand; use math::*; use ne::*; use neproblem::*; use problem::*; use settings::*; #[test] fn test_xor_problem() { let (pop_size, gen_count, param_count) = (20, 20, 100); // gene_count does not matter here as NN structure is defined by a problem. let settings = EASettings::new(pop_size, gen_count, param_count); let problem = XorProblem::new(); let mut ne: NE<XorProblem> = NE::new(&problem); let res = ne.run(settings).expect("Error: NE result is empty"); println!("result: {:?}", res); println!("\nbest individual: {:?}", res.best); } #[test] fn test_symb_regression_problem() { for prob_type in vec!['f', 'g', 'h'] { let mut rng = rand::thread_rng(); let prob = SymbolicRegressionProblem::new(prob_type); println!("Created problem of type: {}", prob_type); let mut net = prob.get_default_net(); println!("Created default net with {} inputs, {} outputs, and {} hidden layers ", net.get_inputs_num(), net.get_outputs_num(), net.len()-1); println!(" Network weights: {:?}", net.get_weights()); let mut ind: NEIndividual = prob.get_random_individual(0, &mut rng); println!(" Random individual: {:?}", ind.to_vec().unwrap()); println!(" Random individual ANN: {:?}", ind.to_net().unwrap()); let input_size = net.get_inputs_num(); let mut ys = Vec::with_capacity(100); for _ in 0..100 {<|fim▁hole|> println!(" Network outputs for 100 random inputs: {:?}", ys); println!(" Network evaluation: {:?}\n", prob.compute_with_net(&mut net)); } } }<|fim▁end|>
let x = rand_vector_std_gauss(input_size, &mut rng); let y = net.compute(&x); ys.push(y); }
<|file_name|>look_feel_ui.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Form implementation generated from reading ui file '/home/yc/code/calibre/calibre/src/calibre/gui2/preferences/look_feel.ui' # # Created: Thu Oct 25 16:54:55 2012 # by: PyQt4 UI code generator 4.8.5 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: _fromUtf8 = lambda s: s class Ui_Form(object): def setupUi(self, Form): Form.setObjectName(_fromUtf8("Form")) Form.resize(820, 519) Form.setWindowTitle(_("Form")) self.gridLayout_2 = QtGui.QGridLayout(Form) self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2")) self.tabWidget = QtGui.QTabWidget(Form) self.tabWidget.setObjectName(_fromUtf8("tabWidget")) self.tab = QtGui.QWidget() self.tab.setObjectName(_fromUtf8("tab")) self.gridLayout_9 = QtGui.QGridLayout(self.tab) self.gridLayout_9.setObjectName(_fromUtf8("gridLayout_9")) self.label_7 = QtGui.QLabel(self.tab) self.label_7.setText(_("Choose &language (requires restart):")) self.label_7.setObjectName(_fromUtf8("label_7")) self.gridLayout_9.addWidget(self.label_7, 2, 0, 1, 1) self.opt_language = QtGui.QComboBox(self.tab) self.opt_language.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToMinimumContentsLengthWithIcon) self.opt_language.setMinimumContentsLength(20) self.opt_language.setObjectName(_fromUtf8("opt_language")) self.gridLayout_9.addWidget(self.opt_language, 2, 1, 1, 1) self.opt_systray_icon = QtGui.QCheckBox(self.tab) self.opt_systray_icon.setText(_("Enable system &tray icon (needs restart)")) self.opt_systray_icon.setObjectName(_fromUtf8("opt_systray_icon")) self.gridLayout_9.addWidget(self.opt_systray_icon, 3, 0, 1, 1) self.label_17 = QtGui.QLabel(self.tab) self.label_17.setText(_("User Interface &layout (needs restart):")) self.label_17.setObjectName(_fromUtf8("label_17")) self.gridLayout_9.addWidget(self.label_17, 1, 0, 1, 1) self.opt_gui_layout = QtGui.QComboBox(self.tab) self.opt_gui_layout.setMaximumSize(QtCore.QSize(250, 16777215)) self.opt_gui_layout.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToMinimumContentsLengthWithIcon) self.opt_gui_layout.setMinimumContentsLength(20) self.opt_gui_layout.setObjectName(_fromUtf8("opt_gui_layout")) self.gridLayout_9.addWidget(self.opt_gui_layout, 1, 1, 1, 1) self.opt_disable_animations = QtGui.QCheckBox(self.tab) self.opt_disable_animations.setToolTip(_("Disable all animations. Useful if you have a slow/old computer.")) self.opt_disable_animations.setText(_("Disable &animations")) self.opt_disable_animations.setObjectName(_fromUtf8("opt_disable_animations")) self.gridLayout_9.addWidget(self.opt_disable_animations, 3, 1, 1, 1) self.opt_disable_tray_notification = QtGui.QCheckBox(self.tab) self.opt_disable_tray_notification.setText(_("Disable &notifications in system tray")) self.opt_disable_tray_notification.setObjectName(_fromUtf8("opt_disable_tray_notification")) self.gridLayout_9.addWidget(self.opt_disable_tray_notification, 4, 0, 1, 1) self.opt_show_splash_screen = QtGui.QCheckBox(self.tab) self.opt_show_splash_screen.setText(_("Show &splash screen at startup")) self.opt_show_splash_screen.setObjectName(_fromUtf8("opt_show_splash_screen")) self.gridLayout_9.addWidget(self.opt_show_splash_screen, 4, 1, 1, 1) self.groupBox_2 = QtGui.QGroupBox(self.tab) self.groupBox_2.setTitle(_("&Toolbar")) self.groupBox_2.setObjectName(_fromUtf8("groupBox_2")) self.gridLayout_8 = QtGui.QGridLayout(self.groupBox_2) self.gridLayout_8.setObjectName(_fromUtf8("gridLayout_8")) self.opt_toolbar_icon_size = QtGui.QComboBox(self.groupBox_2) self.opt_toolbar_icon_size.setObjectName(_fromUtf8("opt_toolbar_icon_size")) self.gridLayout_8.addWidget(self.opt_toolbar_icon_size, 0, 1, 1, 1) self.label_5 = QtGui.QLabel(self.groupBox_2) self.label_5.setText(_("&Icon size:")) self.label_5.setObjectName(_fromUtf8("label_5")) self.gridLayout_8.addWidget(self.label_5, 0, 0, 1, 1) self.opt_toolbar_text = QtGui.QComboBox(self.groupBox_2) self.opt_toolbar_text.setObjectName(_fromUtf8("opt_toolbar_text")) self.gridLayout_8.addWidget(self.opt_toolbar_text, 1, 1, 1, 1) self.label_8 = QtGui.QLabel(self.groupBox_2) self.label_8.setText(_("Show &text under icons:")) self.label_8.setObjectName(_fromUtf8("label_8")) self.gridLayout_8.addWidget(self.label_8, 1, 0, 1, 1) self.gridLayout_9.addWidget(self.groupBox_2, 7, 0, 1, 2) spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.gridLayout_9.addItem(spacerItem, 8, 0, 1, 1) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout")) self.label_2 = QtGui.QLabel(self.tab) self.label_2.setText(_("Interface font:")) self.label_2.setObjectName(_fromUtf8("label_2")) self.horizontalLayout.addWidget(self.label_2) self.font_display = QtGui.QLineEdit(self.tab) self.font_display.setReadOnly(True) self.font_display.setObjectName(_fromUtf8("font_display")) self.horizontalLayout.addWidget(self.font_display) self.gridLayout_9.addLayout(self.horizontalLayout, 6, 0, 1, 1) self.change_font_button = QtGui.QPushButton(self.tab) self.change_font_button.setText(_("Change &font (needs restart)")) self.change_font_button.setObjectName(_fromUtf8("change_font_button")) self.gridLayout_9.addWidget(self.change_font_button, 6, 1, 1, 1) self.label_widget_style = QtGui.QLabel(self.tab) self.label_widget_style.setText(_("User interface &style (needs restart):")) self.label_widget_style.setObjectName(_fromUtf8("label_widget_style")) self.gridLayout_9.addWidget(self.label_widget_style, 0, 0, 1, 1) self.opt_ui_style = QtGui.QComboBox(self.tab) self.opt_ui_style.setObjectName(_fromUtf8("opt_ui_style")) self.gridLayout_9.addWidget(self.opt_ui_style, 0, 1, 1, 1) self.opt_book_list_tooltips = QtGui.QCheckBox(self.tab) self.opt_book_list_tooltips.setText(_("Show &tooltips in the book list")) self.opt_book_list_tooltips.setObjectName(_fromUtf8("opt_book_list_tooltips")) self.gridLayout_9.addWidget(self.opt_book_list_tooltips, 5, 0, 1, 1) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(I("lt.png"))), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.tabWidget.addTab(self.tab, icon, _fromUtf8("")) self.tab_4 = QtGui.QWidget() self.tab_4.setObjectName(_fromUtf8("tab_4")) self.gridLayout_12 = QtGui.QGridLayout(self.tab_4) self.gridLayout_12.setObjectName(_fromUtf8("gridLayout_12")) self.label_3 = QtGui.QLabel(self.tab_4) self.label_3.setText(_("Note that <b>comments</b> will always be displayed at the end, regardless of the position you assign here.")) self.label_3.setWordWrap(True) self.label_3.setObjectName(_fromUtf8("label_3")) self.gridLayout_12.addWidget(self.label_3, 2, 1, 1, 1) self.opt_use_roman_numerals_for_series_number = QtGui.QCheckBox(self.tab_4)<|fim▁hole|> self.opt_use_roman_numerals_for_series_number.setObjectName(_fromUtf8("opt_use_roman_numerals_for_series_number")) self.gridLayout_12.addWidget(self.opt_use_roman_numerals_for_series_number, 0, 1, 1, 1) self.groupBox = QtGui.QGroupBox(self.tab_4) self.groupBox.setTitle(_("Select displayed metadata")) self.groupBox.setObjectName(_fromUtf8("groupBox")) self.gridLayout_3 = QtGui.QGridLayout(self.groupBox) self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3")) self.df_up_button = QtGui.QToolButton(self.groupBox) self.df_up_button.setToolTip(_("Move up")) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(I("arrow-up.png"))), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.df_up_button.setIcon(icon1) self.df_up_button.setObjectName(_fromUtf8("df_up_button")) self.gridLayout_3.addWidget(self.df_up_button, 0, 1, 1, 1) self.df_down_button = QtGui.QToolButton(self.groupBox) self.df_down_button.setToolTip(_("Move down")) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(I("arrow-down.png"))), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.df_down_button.setIcon(icon2) self.df_down_button.setObjectName(_fromUtf8("df_down_button")) self.gridLayout_3.addWidget(self.df_down_button, 2, 1, 1, 1) self.field_display_order = QtGui.QListView(self.groupBox) self.field_display_order.setAlternatingRowColors(True) self.field_display_order.setObjectName(_fromUtf8("field_display_order")) self.gridLayout_3.addWidget(self.field_display_order, 0, 0, 3, 1) spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.gridLayout_3.addItem(spacerItem1, 1, 1, 1, 1) self.gridLayout_12.addWidget(self.groupBox, 2, 0, 2, 1) self.hboxlayout = QtGui.QHBoxLayout() self.hboxlayout.setObjectName(_fromUtf8("hboxlayout")) self.label = QtGui.QLabel(self.tab_4) self.label.setText(_("Default author link template:")) self.label.setObjectName(_fromUtf8("label")) self.hboxlayout.addWidget(self.label) self.opt_default_author_link = QtGui.QLineEdit(self.tab_4) self.opt_default_author_link.setToolTip(_("<p>Enter a template to be used to create a link for\n" "an author in the books information dialog. This template will\n" "be used when no link has been provided for the author using\n" "Manage Authors. You can use the values {author} and\n" "{author_sort}, and any template function.")) self.opt_default_author_link.setObjectName(_fromUtf8("opt_default_author_link")) self.hboxlayout.addWidget(self.opt_default_author_link) self.gridLayout_12.addLayout(self.hboxlayout, 0, 0, 1, 1) self.opt_bd_show_cover = QtGui.QCheckBox(self.tab_4) self.opt_bd_show_cover.setText(_("Show &cover in the book details panel")) self.opt_bd_show_cover.setObjectName(_fromUtf8("opt_bd_show_cover")) self.gridLayout_12.addWidget(self.opt_bd_show_cover, 1, 0, 1, 2) icon3 = QtGui.QIcon() icon3.addPixmap(QtGui.QPixmap(_fromUtf8(I("book.png"))), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.tabWidget.addTab(self.tab_4, icon3, _fromUtf8("")) self.tab_2 = QtGui.QWidget() self.tab_2.setObjectName(_fromUtf8("tab_2")) self.gridLayout_10 = QtGui.QGridLayout(self.tab_2) self.gridLayout_10.setObjectName(_fromUtf8("gridLayout_10")) self.opt_categories_using_hierarchy = EditWithComplete(self.tab_2) self.opt_categories_using_hierarchy.setToolTip(_("A comma-separated list of categories in which items containing\n" "periods are displayed in the tag browser trees. For example, if\n" "this box contains \'tags\' then tags of the form \'Mystery.English\'\n" "and \'Mystery.Thriller\' will be displayed with English and Thriller\n" "both under \'Mystery\'. If \'tags\' is not in this box,\n" "then the tags will be displayed each on their own line.")) self.opt_categories_using_hierarchy.setObjectName(_fromUtf8("opt_categories_using_hierarchy")) self.gridLayout_10.addWidget(self.opt_categories_using_hierarchy, 3, 2, 1, 3) self.label_9 = QtGui.QLabel(self.tab_2) self.label_9.setText(_("Tags browser category &partitioning method:")) self.label_9.setObjectName(_fromUtf8("label_9")) self.gridLayout_10.addWidget(self.label_9, 0, 0, 1, 2) self.opt_tags_browser_partition_method = QtGui.QComboBox(self.tab_2) self.opt_tags_browser_partition_method.setToolTip(_("Choose how tag browser subcategories are displayed when\n" "there are more items than the limit. Select by first\n" "letter to see an A, B, C list. Choose partitioned to\n" "have a list of fixed-sized groups. Set to disabled\n" "if you never want subcategories")) self.opt_tags_browser_partition_method.setObjectName(_fromUtf8("opt_tags_browser_partition_method")) self.gridLayout_10.addWidget(self.opt_tags_browser_partition_method, 0, 2, 1, 1) self.label_10 = QtGui.QLabel(self.tab_2) self.label_10.setText(_("&Collapse when more items than:")) self.label_10.setObjectName(_fromUtf8("label_10")) self.gridLayout_10.addWidget(self.label_10, 0, 3, 1, 1) self.opt_tags_browser_collapse_at = QtGui.QSpinBox(self.tab_2) self.opt_tags_browser_collapse_at.setToolTip(_("If a Tag Browser category has more than this number of items, it is divided\n" "up into subcategories. If the partition method is set to disable, this value is ignored.")) self.opt_tags_browser_collapse_at.setMaximum(10000) self.opt_tags_browser_collapse_at.setObjectName(_fromUtf8("opt_tags_browser_collapse_at")) self.gridLayout_10.addWidget(self.opt_tags_browser_collapse_at, 0, 4, 1, 1) spacerItem2 = QtGui.QSpacerItem(690, 252, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.gridLayout_10.addItem(spacerItem2, 5, 0, 1, 5) self.label_8111 = QtGui.QLabel(self.tab_2) self.label_8111.setText(_("Categories not to partition:")) self.label_8111.setObjectName(_fromUtf8("label_8111")) self.gridLayout_10.addWidget(self.label_8111, 1, 2, 1, 1) self.opt_tag_browser_dont_collapse = EditWithComplete(self.tab_2) self.opt_tag_browser_dont_collapse.setToolTip(_("A comma-separated list of categories that are not to\n" "be partitioned even if the number of items is larger than\n" "the value shown above. This option can be used to\n" "avoid collapsing hierarchical categories that have only\n" "a few top-level elements.")) self.opt_tag_browser_dont_collapse.setObjectName(_fromUtf8("opt_tag_browser_dont_collapse")) self.gridLayout_10.addWidget(self.opt_tag_browser_dont_collapse, 1, 3, 1, 2) self.opt_show_avg_rating = QtGui.QCheckBox(self.tab_2) self.opt_show_avg_rating.setText(_("Show &average ratings in the tags browser")) self.opt_show_avg_rating.setChecked(True) self.opt_show_avg_rating.setObjectName(_fromUtf8("opt_show_avg_rating")) self.gridLayout_10.addWidget(self.opt_show_avg_rating, 2, 0, 1, 5) self.label_81 = QtGui.QLabel(self.tab_2) self.label_81.setText(_("Categories with &hierarchical items:")) self.label_81.setObjectName(_fromUtf8("label_81")) self.gridLayout_10.addWidget(self.label_81, 3, 0, 1, 1) self.opt_tag_browser_old_look = QtGui.QCheckBox(self.tab_2) self.opt_tag_browser_old_look.setText(_("Use &alternating row colors in the Tag Browser")) self.opt_tag_browser_old_look.setObjectName(_fromUtf8("opt_tag_browser_old_look")) self.gridLayout_10.addWidget(self.opt_tag_browser_old_look, 4, 0, 1, 5) icon4 = QtGui.QIcon() icon4.addPixmap(QtGui.QPixmap(_fromUtf8(I("tags.png"))), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.tabWidget.addTab(self.tab_2, icon4, _fromUtf8("")) self.tab_3 = QtGui.QWidget() self.tab_3.setObjectName(_fromUtf8("tab_3")) self.gridLayout_11 = QtGui.QGridLayout(self.tab_3) self.gridLayout_11.setObjectName(_fromUtf8("gridLayout_11")) self.opt_separate_cover_flow = QtGui.QCheckBox(self.tab_3) self.opt_separate_cover_flow.setText(_("Show cover &browser in a separate window (needs restart)")) self.opt_separate_cover_flow.setObjectName(_fromUtf8("opt_separate_cover_flow")) self.gridLayout_11.addWidget(self.opt_separate_cover_flow, 0, 0, 1, 2) self.label_6 = QtGui.QLabel(self.tab_3) self.label_6.setText(_("&Number of covers to show in browse mode (needs restart):")) self.label_6.setObjectName(_fromUtf8("label_6")) self.gridLayout_11.addWidget(self.label_6, 1, 0, 1, 1) self.opt_cover_flow_queue_length = QtGui.QSpinBox(self.tab_3) self.opt_cover_flow_queue_length.setObjectName(_fromUtf8("opt_cover_flow_queue_length")) self.gridLayout_11.addWidget(self.opt_cover_flow_queue_length, 1, 1, 1, 1) spacerItem3 = QtGui.QSpacerItem(690, 283, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.gridLayout_11.addItem(spacerItem3, 4, 0, 1, 2) self.opt_cb_fullscreen = QtGui.QCheckBox(self.tab_3) self.opt_cb_fullscreen.setText(_("When showing cover browser in separate window, show it &fullscreen")) self.opt_cb_fullscreen.setObjectName(_fromUtf8("opt_cb_fullscreen")) self.gridLayout_11.addWidget(self.opt_cb_fullscreen, 2, 0, 1, 2) self.fs_help_msg = QtGui.QLabel(self.tab_3) self.fs_help_msg.setStyleSheet(_fromUtf8("margin-left: 1.5em")) self.fs_help_msg.setText(_("You can press the %s keys to toggle full screen mode.")) self.fs_help_msg.setWordWrap(True) self.fs_help_msg.setObjectName(_fromUtf8("fs_help_msg")) self.gridLayout_11.addWidget(self.fs_help_msg, 3, 0, 1, 2) icon5 = QtGui.QIcon() icon5.addPixmap(QtGui.QPixmap(_fromUtf8(I("cover_flow.png"))), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.tabWidget.addTab(self.tab_3, icon5, _fromUtf8("")) self.gridLayout_2.addWidget(self.tabWidget, 0, 0, 1, 1) self.label_7.setBuddy(self.opt_language) self.label_17.setBuddy(self.opt_gui_layout) self.label_5.setBuddy(self.opt_toolbar_icon_size) self.label_8.setBuddy(self.opt_toolbar_text) self.label_2.setBuddy(self.font_display) self.label_widget_style.setBuddy(self.opt_ui_style) self.label.setBuddy(self.opt_default_author_link) self.label_9.setBuddy(self.opt_tags_browser_partition_method) self.label_10.setBuddy(self.opt_tags_browser_collapse_at) self.label_8111.setBuddy(self.opt_tag_browser_dont_collapse) self.label_81.setBuddy(self.opt_categories_using_hierarchy) self.label_6.setBuddy(self.opt_cover_flow_queue_length) self.retranslateUi(Form) self.tabWidget.setCurrentIndex(0) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _("Main Interface")) self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_4), _("Book Details")) self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _("Tag Browser")) self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_3), _("Cover Browser")) from calibre.gui2.complete2 import EditWithComplete<|fim▁end|>
self.opt_use_roman_numerals_for_series_number.setText(_("Use &Roman numerals for series")) self.opt_use_roman_numerals_for_series_number.setChecked(True)
<|file_name|>memory.go<|end_file_name|><|fim▁begin|>package storage import ( "crypto/sha256" "encoding/hex" "fmt" "sort" "strconv" "strings" "sync" "time" "github.com/docker/notary/tuf/data" ) type key struct { algorithm string public []byte } type ver struct { version int data []byte createupdate time.Time } // we want to keep these sorted by version so that it's in increasing version // order type verList []ver func (k verList) Len() int { return len(k) } func (k verList) Swap(i, j int) { k[i], k[j] = k[j], k[i] } func (k verList) Less(i, j int) bool { return k[i].version < k[j].version } // MemStorage is really just designed for dev and testing. It is very // inefficient in many scenarios type MemStorage struct { lock sync.Mutex tufMeta map[string]verList keys map[string]map[string]*key checksums map[string]map[string]ver changes []Change } // NewMemStorage instantiates a memStorage instance func NewMemStorage() *MemStorage { return &MemStorage{ tufMeta: make(map[string]verList), keys: make(map[string]map[string]*key), checksums: make(map[string]map[string]ver), } } // UpdateCurrent updates the meta data for a specific role func (st *MemStorage) UpdateCurrent(gun data.GUN, update MetaUpdate) error { id := entryKey(gun, update.Role) st.lock.Lock() defer st.lock.Unlock() if space, ok := st.tufMeta[id]; ok { for _, v := range space { if v.version >= update.Version { return ErrOldVersion{} } } } version := ver{version: update.Version, data: update.Data, createupdate: time.Now()} st.tufMeta[id] = append(st.tufMeta[id], version) checksumBytes := sha256.Sum256(update.Data) checksum := hex.EncodeToString(checksumBytes[:]) _, ok := st.checksums[gun.String()] if !ok { st.checksums[gun.String()] = make(map[string]ver) } st.checksums[gun.String()][checksum] = version if update.Role == data.CanonicalTimestampRole { st.writeChange(gun, update.Version, checksum) } return nil } // writeChange must only be called by a function already holding a lock on // the MemStorage. Behaviour is undefined otherwise func (st *MemStorage) writeChange(gun data.GUN, version int, checksum string) { c := Change{ ID: uint(len(st.changes) + 1), GUN: gun.String(), Version: version, SHA256: checksum, CreatedAt: time.Now(), Category: changeCategoryUpdate, } st.changes = append(st.changes, c) } // UpdateMany updates multiple TUF records func (st *MemStorage) UpdateMany(gun data.GUN, updates []MetaUpdate) error { st.lock.Lock() defer st.lock.Unlock() versioner := make(map[string]map[int]struct{}) constant := struct{}{} // ensure that we only update in one transaction for _, u := range updates { id := entryKey(gun, u.Role) // prevent duplicate versions of the same role if _, ok := versioner[u.Role.String()][u.Version]; ok { return ErrOldVersion{} } if _, ok := versioner[u.Role.String()]; !ok { versioner[u.Role.String()] = make(map[int]struct{}) } versioner[u.Role.String()][u.Version] = constant if space, ok := st.tufMeta[id]; ok { for _, v := range space { if v.version >= u.Version { return ErrOldVersion{} } } } } for _, u := range updates { id := entryKey(gun, u.Role) version := ver{version: u.Version, data: u.Data, createupdate: time.Now()} st.tufMeta[id] = append(st.tufMeta[id], version) sort.Sort(st.tufMeta[id]) // ensure that it's sorted checksumBytes := sha256.Sum256(u.Data) checksum := hex.EncodeToString(checksumBytes[:]) _, ok := st.checksums[gun.String()] if !ok { st.checksums[gun.String()] = make(map[string]ver) } st.checksums[gun.String()][checksum] = version if u.Role == data.CanonicalTimestampRole { st.writeChange(gun, u.Version, checksum) } } return nil } // GetCurrent returns the createupdate date metadata for a given role, under a GUN. func (st *MemStorage) GetCurrent(gun data.GUN, role data.RoleName) (*time.Time, []byte, error) { id := entryKey(gun, role) st.lock.Lock() defer st.lock.Unlock() space, ok := st.tufMeta[id] if !ok || len(space) == 0 { return nil, nil, ErrNotFound{} } return &(space[len(space)-1].createupdate), space[len(space)-1].data, nil } // GetChecksum returns the createupdate date and metadata for a given role, under a GUN. func (st *MemStorage) GetChecksum(gun data.GUN, role data.RoleName, checksum string) (*time.Time, []byte, error) { st.lock.Lock() defer st.lock.Unlock() space, ok := st.checksums[gun.String()][checksum] if !ok || len(space.data) == 0 { return nil, nil, ErrNotFound{} } return &(space.createupdate), space.data, nil } // GetVersion gets a specific TUF record by its version func (st *MemStorage) GetVersion(gun data.GUN, role data.RoleName, version int) (*time.Time, []byte, error) { st.lock.Lock() defer st.lock.Unlock() id := entryKey(gun, role) for _, ver := range st.tufMeta[id] { if ver.version == version { return &(ver.createupdate), ver.data, nil } } return nil, nil, ErrNotFound{} } // Delete deletes all the metadata for a given GUN func (st *MemStorage) Delete(gun data.GUN) error { st.lock.Lock() defer st.lock.Unlock() l := len(st.tufMeta) for k := range st.tufMeta { if strings.HasPrefix(k, gun.String()) { delete(st.tufMeta, k) } } if l == len(st.tufMeta) { // we didn't delete anything, don't write change. return nil } delete(st.checksums, gun.String()) c := Change{ ID: uint(len(st.changes) + 1), GUN: gun.String(), Category: changeCategoryDeletion, CreatedAt: time.Now(), } st.changes = append(st.changes, c) return nil } // GetChanges returns a []Change starting from but excluding the record // identified by changeID. In the context of the memory store, changeID // is simply an index into st.changes. The ID of a change is its // index+1, both to match the SQL implementations, and so that the first // change can be retrieved by providing ID 0. func (st *MemStorage) GetChanges(changeID string, records int, filterName string) ([]Change, error) { var ( id int64 err error ) if changeID == "" { id = 0 } else { id, err = strconv.ParseInt(changeID, 10, 32) if err != nil { return nil, err } } var ( start = int(id) toInspect []Change ) if err != nil { return nil, err } <|fim▁hole|> reversed := id < 0 if records < 0 { reversed = true records = -records } if len(st.changes) <= int(id) && !reversed { // no records to return as we're essentially trying to retrieve // changes that haven't happened yet. return nil, nil } // technically only -1 is a valid negative input, but we're going to be // broad in what we accept here to reduce the need to error and instead // act in a "do what I mean not what I say" fashion. Same logic for // requesting changeID < 0 but not asking for reversed, we're just going // to force it to be reversed. if start < 0 { // need to add one so we don't later slice off the last element // when calculating toInspect. start = len(st.changes) + 1 } // reduce to only look at changes we're interested in if reversed { if start > len(st.changes) { toInspect = st.changes } else { toInspect = st.changes[:start-1] } } else { toInspect = st.changes[start:] } // if we're not doing any filtering if filterName == "" { // if the pageSize is larger than the total records // that could be returned, return them all if records >= len(toInspect) { return toInspect, nil } // if we're going backwards, return the last pageSize records if reversed { return toInspect[len(toInspect)-records:], nil } // otherwise return pageSize records from front return toInspect[:records], nil } return getFilteredChanges(toInspect, filterName, records, reversed), nil } func getFilteredChanges(toInspect []Change, filterName string, records int, reversed bool) []Change { res := make([]Change, 0, records) if reversed { for i := len(toInspect) - 1; i >= 0; i-- { if toInspect[i].GUN == filterName { res = append(res, toInspect[i]) } if len(res) == records { break } } // results are currently newest to oldest, should be oldest to newest for i, j := 0, len(res)-1; i < j; i, j = i+1, j-1 { res[i], res[j] = res[j], res[i] } } else { for _, c := range toInspect { if c.GUN == filterName { res = append(res, c) } if len(res) == records { break } } } return res } func entryKey(gun data.GUN, role data.RoleName) string { return fmt.Sprintf("%s.%s", gun, role) }<|fim▁end|>
<|file_name|>wwan_status.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Display current network and ip address for newer Huwei modems. It is tested for Huawei E3276 (usb-id 12d1:1506) aka Telekom Speed Stick LTE III but may work on other devices, too. DEPENDENCIES: - netifaces - pyserial Configuration parameters: - baudrate : There should be no need to configure this, but feel free to experiment. Default is 115200. - cache_timeout : How often we refresh this module in seconds. Default is 5. - consider_3G_degraded : If set to True, only 4G-networks will be considered 'good'; 3G connections are shown as 'degraded', which is yellow by default. Mostly useful if you want to keep track of where there is a 4G connection. Default is False. - format_down : What to display when the modem is not plugged in Default is: 'WWAN: down' - format_error : What to display when modem can't be accessed. Default is 'WWAN: {error}' - format_no_service : What to display when the modem does not have a network connection. This allows to omit the then meaningless network generation. Therefore the default is 'WWAN: ({status}) {ip}' - format_up : What to display upon regular connection Default is 'WWAN: ({status}/{netgen}) {ip}' - interface : The default interface to obtain the IP address from. For wvdial this is most likely ppp0. For netctl it can be different. Default is: ppp0 - modem : The device to send commands to. Default is - modem_timeout : The timespan betwenn querying the modem and collecting the response. Default is 0.4 (which should be sufficient) @author Timo Kohorst [email protected] PGP: B383 6AE6 6B46 5C45 E594 96AB 89D2 209D DBF3 2BB5 """ import subprocess import netifaces as ni import os import stat import serial from time import time, sleep class Py3status: baudrate = 115200 cache_timeout = 5 consider_3G_degraded = False format_down = 'WWAN: down' format_error = 'WWAN: {error}' format_no_service = 'WWAN: {status} {ip}' format_up = 'WWAN: {status} ({netgen}) {ip}' interface = "ppp0" modem = "/dev/ttyUSB1" modem_timeout = 0.4 def wwan_status(self, i3s_output_list, i3s_config): query = "AT^SYSINFOEX" target_line = "^SYSINFOEX" # Set up the highest network generation to display as degraded if self.consider_3G_degraded: degraded_netgen = 3 else: degraded_netgen = 2 response = {} response['cached_until'] = time() + self.cache_timeout # Check if path exists and is a character device if os.path.exists(self.modem) and stat.S_ISCHR(os.stat( self.modem).st_mode): print("Found modem " + self.modem) try: ser = serial.Serial( port=self.modem, baudrate=self.baudrate, # Values below work for my modem. Not sure if # they neccessarily work for all modems parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS) if ser.isOpen(): ser.close() ser.open() ser.write((query + "\r").encode()) print("Issued query to " + self.modem) sleep(self.modem_timeout) n = ser.inWaiting() modem_response = ser.read(n) ser.close() except: # This will happen... # 1) in the short timespan between the creation of the device node # and udev changing the permissions. If this message persists, # double check if you are using the proper device file # 2) if/when you unplug the device PermissionError print("Permission error") response['full_text'] = self.format_error.format( error="no access to " + self.modem) response['color'] = i3s_config['color_bad'] return response # Dissect response for line in modem_response.decode("utf-8").split('\n'): print(line) if line.startswith(target_line): # Determine IP once the modem responds ip = self._get_ip(self.interface) if not ip: ip = "no ip" modem_answer = line.split(',') netgen = len(modem_answer[-2]) + 1 netmode = modem_answer[-1].rstrip()[1:-1] if netmode == "NO SERVICE": response['full_text'] = self.format_no_service.format( status=netmode, ip=ip) response['color'] = i3s_config['color_bad'] else: response['full_text'] = self.format_up.format( status=netmode, netgen=str(netgen) + "G", ip=ip) if netgen <= degraded_netgen: response['color'] = i3s_config['color_degraded'] else: response['color'] = i3s_config['color_good'] elif line.startswith("COMMAND NOT SUPPORT") or line.startswith(<|fim▁hole|> "ERROR"): response['color'] = i3s_config['color_bad'] response['full_text'] = self.format_error.format( error="unsupported modem") else: # Outputs can be multiline, so just try the next one pass else: print(self.modem + " not found") response['color'] = i3s_config['color_bad'] response['full_text'] = self.format_down return response def _get_ip(self, interface): """ Returns the interface's IPv4 address if device exists and has a valid ip address. Otherwise, returns an empty string """ if interface in ni.interfaces(): addresses = ni.ifaddresses(interface) if ni.AF_INET in addresses: return addresses[ni.AF_INET][0]['addr'] return "" if __name__ == "__main__": from time import sleep x = Py3status() config = { 'color_good': '#00FF00', 'color_bad': '#FF0000', 'color_degraded': '#FFFF00', } while True: print(x.wwan_status([], config)) sleep(1)<|fim▁end|>
<|file_name|>log.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- """ Log Config """ __author__ = 'Zagfai' __date__ = '2018-06' <|fim▁hole|> SANIC_LOGGING_CONFIG = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'default': { 'format': '%(levelname)s [%(asctime)s %(name)s:%(lineno)d] %(message)s', 'datefmt': '%y%m%d %H:%M:%S', }, "access": { "format": "VISIT [%(asctime)s %(host)s]: " + "%(request)s %(message)s %(status)d %(byte)d", 'datefmt': '%y%m%d %H:%M:%S', }, }, 'handlers': { 'console': { 'class': 'logging.StreamHandler', 'formatter': 'default', }, "access_console": { "class": "logging.StreamHandler", "formatter": "access", }, }, 'loggers': { '': { 'level': 'INFO', 'handlers': ['console'], 'propagate': True }, 'sanic.access': { 'level': 'INFO', 'handlers': ['access_console'], 'propagate': False }, } }<|fim▁end|>
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|># (C) British Crown Copyright 2020, Met Office # # This file is part of cartopy. # # cartopy is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 3 of the License, or # (at your option) any later version.<|fim▁hole|># # cartopy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with cartopy. If not, see <https://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) def pytest_configure(config): # Register additional markers. config.addinivalue_line('markers', 'natural_earth: mark tests that use Natural Earth ' 'data, and the network, if not cached.') config.addinivalue_line('markers', 'network: mark tests that use the network.')<|fim▁end|>
<|file_name|>blame.py<|end_file_name|><|fim▁begin|>############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os import re import llnl.util.tty as tty from llnl.util.lang import pretty_date from llnl.util.filesystem import working_dir from llnl.util.tty.colify import colify_table import spack from spack.util.executable import which from spack.cmd import spack_is_git_repo description = "show contributors to packages" section = "developer" level = "long" def setup_parser(subparser): view_group = subparser.add_mutually_exclusive_group() view_group.add_argument( '-t', '--time', dest='view', action='store_const', const='time', default='time', help='sort by last modification date (default)') view_group.add_argument( '-p', '--percent', dest='view', action='store_const', const='percent', help='sort by percent of code') view_group.add_argument( '-g', '--git', dest='view', action='store_const', const='git', help='show git blame output instead of summary') subparser.add_argument( 'package_name', help='name of package to show contributions for, ' 'or path to a file in the spack repo') def blame(parser, args): # make sure this is a git repo if not spack_is_git_repo(): tty.die("This spack is not a git clone. Can't use 'spack blame'") git = which('git', required=True) # Get name of file to blame blame_file = None if os.path.isfile(args.package_name): path = os.path.realpath(args.package_name) if path.startswith(spack.prefix): blame_file = path if not blame_file: pkg = spack.repo.get(args.package_name) blame_file = pkg.module.__file__.rstrip('c') # .pyc -> .py # get git blame for the package with working_dir(spack.prefix): if args.view == 'git': git('blame', blame_file) return else: output = git('blame', '--line-porcelain', blame_file, output=str) lines = output.split('\n') # Histogram authors counts = {} emails = {} last_mod = {} total_lines = 0 for line in lines: match = re.match(r'^author (.*)', line) if match: author = match.group(1) match = re.match(r'^author-mail (.*)', line) if match: email = match.group(1) match = re.match(r'^author-time (.*)', line) if match: mod = int(match.group(1)) last_mod[author] = max(last_mod.setdefault(author, 0), mod) # ignore comments if re.match(r'^\t[^#]', line): counts[author] = counts.setdefault(author, 0) + 1 emails.setdefault(author, email) total_lines += 1 if args.view == 'time': rows = sorted( counts.items(), key=lambda t: last_mod[t[0]], reverse=True) else: # args.view == 'percent' rows = sorted(counts.items(), key=lambda t: t[1], reverse=True) # Print a nice table with authors and emails table = [['LAST_COMMIT', 'LINES', '%', 'AUTHOR', 'EMAIL']] for author, nlines in rows: table += [[ pretty_date(last_mod[author]), nlines, round(nlines / float(total_lines) * 100, 1), author, emails[author]]]<|fim▁hole|> table += [[''] * 5] table += [[pretty_date(max(last_mod.values())), total_lines, '100.0'] + [''] * 3] colify_table(table)<|fim▁end|>
<|file_name|>climate.py<|end_file_name|><|fim▁begin|>"""MySensors platform that offers a Climate (MySensors-HVAC) component.""" from homeassistant.components import mysensors from homeassistant.components.climate import ClimateDevice from homeassistant.components.climate.const import ( ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, DOMAIN, HVAC_MODE_AUTO, HVAC_MODE_COOL, HVAC_MODE_HEAT, SUPPORT_FAN_MODE, SUPPORT_TARGET_TEMPERATURE, SUPPORT_TARGET_TEMPERATURE_RANGE, HVAC_MODE_OFF) from homeassistant.const import ( ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT) DICT_HA_TO_MYS = { HVAC_MODE_AUTO: 'AutoChangeOver', HVAC_MODE_COOL: 'CoolOn', HVAC_MODE_HEAT: 'HeatOn', HVAC_MODE_OFF: 'Off', } DICT_MYS_TO_HA = { 'AutoChangeOver': HVAC_MODE_AUTO, 'CoolOn': HVAC_MODE_COOL, 'HeatOn': HVAC_MODE_HEAT, 'Off': HVAC_MODE_OFF, } FAN_LIST = ['Auto', 'Min', 'Normal', 'Max'] OPERATION_LIST = [HVAC_MODE_OFF, HVAC_MODE_AUTO, HVAC_MODE_COOL, HVAC_MODE_HEAT] async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up the mysensors climate.""" mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, MySensorsHVAC, async_add_entities=async_add_entities) class MySensorsHVAC(mysensors.device.MySensorsEntity, ClimateDevice): """Representation of a MySensors HVAC.""" @property def supported_features(self): """Return the list of supported features.""" features = 0 set_req = self.gateway.const.SetReq if set_req.V_HVAC_SPEED in self._values: features = features | SUPPORT_FAN_MODE if (set_req.V_HVAC_SETPOINT_COOL in self._values and set_req.V_HVAC_SETPOINT_HEAT in self._values): features = ( features | SUPPORT_TARGET_TEMPERATURE_RANGE) else: features = features | SUPPORT_TARGET_TEMPERATURE return features @property def assumed_state(self): """Return True if unable to access real state of entity.""" return self.gateway.optimistic @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS if self.gateway.metric else TEMP_FAHRENHEIT @property def current_temperature(self): """Return the current temperature.""" value = self._values.get(self.gateway.const.SetReq.V_TEMP) if value is not None: value = float(value) return value @property def target_temperature(self): """Return the temperature we try to reach.""" set_req = self.gateway.const.SetReq if set_req.V_HVAC_SETPOINT_COOL in self._values and \ set_req.V_HVAC_SETPOINT_HEAT in self._values: return None temp = self._values.get(set_req.V_HVAC_SETPOINT_COOL) if temp is None: temp = self._values.get(set_req.V_HVAC_SETPOINT_HEAT) return float(temp) if temp is not None else None @property def target_temperature_high(self): """Return the highbound target temperature we try to reach.""" set_req = self.gateway.const.SetReq if set_req.V_HVAC_SETPOINT_HEAT in self._values: temp = self._values.get(set_req.V_HVAC_SETPOINT_COOL) return float(temp) if temp is not None else None @property def target_temperature_low(self): """Return the lowbound target temperature we try to reach.""" set_req = self.gateway.const.SetReq if set_req.V_HVAC_SETPOINT_COOL in self._values: temp = self._values.get(set_req.V_HVAC_SETPOINT_HEAT) return float(temp) if temp is not None else None @property def hvac_mode(self): """Return current operation ie. heat, cool, idle.""" return self._values.get(self.value_type) @property def hvac_modes(self): """List of available operation modes.""" return OPERATION_LIST @property def fan_mode(self): """Return the fan setting.""" return self._values.get(self.gateway.const.SetReq.V_HVAC_SPEED) @property def fan_modes(self): """List of available fan modes.""" return FAN_LIST async def async_set_temperature(self, **kwargs): """Set new target temperature.""" set_req = self.gateway.const.SetReq temp = kwargs.get(ATTR_TEMPERATURE) low = kwargs.get(ATTR_TARGET_TEMP_LOW) high = kwargs.get(ATTR_TARGET_TEMP_HIGH) heat = self._values.get(set_req.V_HVAC_SETPOINT_HEAT) cool = self._values.get(set_req.V_HVAC_SETPOINT_COOL) updates = [] if temp is not None: if heat is not None: # Set HEAT Target temperature value_type = set_req.V_HVAC_SETPOINT_HEAT elif cool is not None: # Set COOL Target temperature value_type = set_req.V_HVAC_SETPOINT_COOL if heat is not None or cool is not None: updates = [(value_type, temp)] elif all(val is not None for val in (low, high, heat, cool)): updates = [ (set_req.V_HVAC_SETPOINT_HEAT, low), (set_req.V_HVAC_SETPOINT_COOL, high)] for value_type, value in updates: self.gateway.set_child_value( self.node_id, self.child_id, value_type, value) if self.gateway.optimistic: # Optimistically assume that device has changed state self._values[value_type] = value self.async_schedule_update_ha_state() async def async_set_fan_mode(self, fan_mode): """Set new target temperature.""" set_req = self.gateway.const.SetReq self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_HVAC_SPEED, fan_mode) if self.gateway.optimistic: # Optimistically assume that device has changed state self._values[set_req.V_HVAC_SPEED] = fan_mode self.async_schedule_update_ha_state() async def async_set_hvac_mode(self, hvac_mode): """Set new target temperature.""" self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, DICT_HA_TO_MYS[hvac_mode]) if self.gateway.optimistic: # Optimistically assume that device has changed state self._values[self.value_type] = hvac_mode self.async_schedule_update_ha_state() async def async_update(self):<|fim▁hole|> """Update the controller with the latest value from a sensor.""" await super().async_update() self._values[self.value_type] = DICT_MYS_TO_HA[ self._values[self.value_type]]<|fim▁end|>
<|file_name|>test_0050_general.js<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ /* General nsIUpdateCheckListener onload and onerror error code and statusText Tests */ // Errors tested: // 200, 403, 404, 500, 2152398849, 2152398862, 2152398864, 2152398867, // 2152398868, 2152398878, 2152398890, 2152398919, 2152398920, 2153390069, // 2152398918, 2152398861 var gNextRunFunc; var gExpectedStatusCode; var gExpectedStatusText; function run_test() { do_test_pending(); do_register_cleanup(end_test); removeUpdateDirsAndFiles(); setUpdateURLOverride(); standardInit(); // The mock XMLHttpRequest is MUCH faster overrideXHR(callHandleEvent); do_execute_soon(run_test_pt1); } function end_test() { cleanUp(); } // Callback function used by the custom XMLHttpRequest implementation to // call the nsIDOMEventListener's handleEvent method for onload. function callHandleEvent() { gXHR.status = gExpectedStatusCode; var e = { target: gXHR }; gXHR.onload(e); } // Helper functions for testing nsIUpdateCheckListener statusText function run_test_helper(aNextRunFunc, aExpectedStatusCode, aMsg) { gStatusCode = null; gStatusText = null; gCheckFunc = check_test_helper; gNextRunFunc = aNextRunFunc; gExpectedStatusCode = aExpectedStatusCode; logTestInfo(aMsg, Components.stack.caller); gUpdateChecker.checkForUpdates(updateCheckListener, true); } function check_test_helper() { do_check_eq(gStatusCode, gExpectedStatusCode); var expectedStatusText = getStatusText(gExpectedStatusCode); do_check_eq(gStatusText, expectedStatusText); gNextRunFunc(); } /** * The following tests use a custom XMLHttpRequest to return the status codes */ // default onerror error message (error code 399 is not defined) function run_test_pt1() { gStatusCode = null; gStatusText = null; gCheckFunc = check_test_pt1; gExpectedStatusCode = 399; logTestInfo("testing default onerror error message"); gUpdateChecker.checkForUpdates(updateCheckListener, true); } function check_test_pt1() { do_check_eq(gStatusCode, gExpectedStatusCode); var expectedStatusText = getStatusText(404); do_check_eq(gStatusText, expectedStatusText); run_test_pt2(); } // file malformed - 200 function run_test_pt2() { run_test_helper(run_test_pt3, 200, "testing file malformed"); } // access denied - 403 function run_test_pt3() { run_test_helper(run_test_pt4, 403, "testing access denied"); } // file not found - 404 function run_test_pt4() { run_test_helper(run_test_pt5, 404, "testing file not found"); } // internal server error - 500 function run_test_pt5() { run_test_helper(run_test_pt6, 500, "testing internal server error"); } // failed (unknown reason) - NS_BINDING_FAILED (2152398849) function run_test_pt6() { run_test_helper(run_test_pt7, AUS_Cr.NS_BINDING_FAILED, "testing failed (unknown reason)"); } // connection timed out - NS_ERROR_NET_TIMEOUT (2152398862) function run_test_pt7() { run_test_helper(run_test_pt8, AUS_Cr.NS_ERROR_NET_TIMEOUT, "testing connection timed out"); } // network offline - NS_ERROR_OFFLINE (2152398864) function run_test_pt8() { run_test_helper(run_test_pt9, AUS_Cr.NS_ERROR_OFFLINE, "testing network offline"); } // port not allowed - NS_ERROR_PORT_ACCESS_NOT_ALLOWED (2152398867) function run_test_pt9() { run_test_helper(run_test_pt10, AUS_Cr.NS_ERROR_PORT_ACCESS_NOT_ALLOWED, "testing port not allowed"); } // no data was received - NS_ERROR_NET_RESET (2152398868) function run_test_pt10() { run_test_helper(run_test_pt11, AUS_Cr.NS_ERROR_NET_RESET, "testing no data was received"); } // update server not found - NS_ERROR_UNKNOWN_HOST (2152398878) function run_test_pt11() { run_test_helper(run_test_pt12, AUS_Cr.NS_ERROR_UNKNOWN_HOST, "testing update server not found"); } // proxy server not found - NS_ERROR_UNKNOWN_PROXY_HOST (2152398890) function run_test_pt12() { run_test_helper(run_test_pt13, AUS_Cr.NS_ERROR_UNKNOWN_PROXY_HOST, "testing proxy server not found"); } // data transfer interrupted - NS_ERROR_NET_INTERRUPT (2152398919) function run_test_pt13() { run_test_helper(run_test_pt14, AUS_Cr.NS_ERROR_NET_INTERRUPT, "testing data transfer interrupted"); } // proxy server connection refused - NS_ERROR_PROXY_CONNECTION_REFUSED (2152398920) function run_test_pt14() { run_test_helper(run_test_pt15, AUS_Cr.NS_ERROR_PROXY_CONNECTION_REFUSED, "testing proxy server connection refused"); } // server certificate expired - 2153390069 function run_test_pt15() { run_test_helper(run_test_pt16, 2153390069, "testing server certificate expired"); } // network is offline - NS_ERROR_DOCUMENT_NOT_CACHED (2152398918) function run_test_pt16() { run_test_helper(run_test_pt17, AUS_Cr.NS_ERROR_DOCUMENT_NOT_CACHED, "testing network is offline"); } <|fim▁hole|>function run_test_pt17() { run_test_helper(do_test_finished, AUS_Cr.NS_ERROR_CONNECTION_REFUSED, "testing connection refused"); }<|fim▁end|>
// connection refused - NS_ERROR_CONNECTION_REFUSED (2152398861)
<|file_name|>datasets.py<|end_file_name|><|fim▁begin|>""" Simple datasets to be used for unit tests. """ __authors__ = "Ian Goodfellow" __copyright__ = "Copyright 2010-2012, Universite de Montreal" __credits__ = ["Ian Goodfellow"] __license__ = "3-clause BSD" __maintainer__ = "LISA Lab" __email__ = "pylearn-dev@googlegroups" import numpy as np from theano.compat.six.moves import xrange from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class ArangeDataset(DenseDesignMatrix): """ A dataset where example i is just the number i. Makes it easy to track which sets of examples are visited.<|fim▁hole|> ---------- num_examples : WRITEME To see the other parameters, look at the DenseDesignMatrix class documentation """ def __init__(self, num_examples, *args, **kwargs): X = np.zeros((num_examples, 1)) X[:, 0] = np.arange(num_examples) super(ArangeDataset, self).__init__(X, *args, **kwargs) def random_dense_design_matrix(rng, num_examples, dim, num_classes): """ Creates a random dense design matrix that has class labels. Parameters ---------- rng : numpy.random.RandomState The random number generator used to generate the dataset. num_examples : int The number of examples to create. dim : int The number of features in each example. num_classes : int The number of classes to assign the examples to. 0 indicates that no class labels will be generated. """ X = rng.randn(num_examples, dim) if num_classes: Y = rng.randint(0, num_classes, (num_examples, 1)) y_labels = num_classes else: Y = None y_labels = None return DenseDesignMatrix(X=X, y=Y, y_labels=y_labels) def random_one_hot_dense_design_matrix(rng, num_examples, dim, num_classes): X = rng.randn(num_examples, dim) idx = rng.randint(0, num_classes, (num_examples, )) Y = np.zeros((num_examples, num_classes)) for i in xrange(num_examples): Y[i, idx[i]] = 1 return DenseDesignMatrix(X=X, y=Y) def random_one_hot_topological_dense_design_matrix(rng, num_examples, shape, channels, axes, num_classes): dims = {'b': num_examples, 'c': channels} for i, dim in enumerate(shape): dims[i] = dim shape = [dims[axis] for axis in axes] X = rng.randn(*shape) idx = rng.randint(0, num_classes, (num_examples,)) Y = np.zeros((num_examples, num_classes)) for i in xrange(num_examples): Y[i, idx[i]] = 1 return DenseDesignMatrix(topo_view=X, axes=axes, y=Y)<|fim▁end|>
Parameters
<|file_name|>all.js<|end_file_name|><|fim▁begin|>'use strict'; module.exports = { app: { title: 'Surf Around The Corner', description: 'Full-Stack JavaScript with MongoDB, Express, AngularJS, and Node.js', keywords: 'MongoDB, Express, AngularJS, Node.js' }, port: process.env.PORT || 3000, templateEngine: 'swig', sessionSecret: 'MEAN', sessionCollection: 'sessions', assets: { lib: { css: [ 'public/lib/components-font-awesome/css/font-awesome.css', 'public/lib/angular-ui-select/dist/select.css', 'http://fonts.googleapis.com/css?family=Bree+Serif', 'http://fonts.googleapis.com/css?family=Open+Sans',<|fim▁hole|> 'http://fonts.googleapis.com/css?family=Nunito' //'http://netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap-glyphicons.css' ], js: [ 'public/lib/angular/angular.js', 'public/lib/angular-resource/angular-resource.js', 'public/lib/angular-cookies/angular-cookies.js', 'public/lib/angular-animate/angular-animate.js', 'public/lib/angular-touch/angular-touch.js', 'public/lib/angular-sanitize/angular-sanitize.js', 'public/lib/angular-ui-router/release/angular-ui-router.js', 'public/lib/angular-ui-utils/ui-utils.js', 'public/lib/jquery/dist/jquery.js', 'public/lib/angular-bootstrap/ui-bootstrap-tpls.js', 'public/lib/angular-ui-select/dist/select.js', 'public/lib/ng-lodash/build/ng-lodash.js', 'public/lib/ng-backstretch/dist/ng-backstretch.js', 'public/lib/ngFitText/src/ng-FitText.js' ] }, css: [ 'public/less/*.css', 'public/modules/**/css/*.css' ], js: [ 'public/config.js', 'public/application.js', 'public/modules/*/*.js', 'public/modules/*/*[!tests]*/*.js' ], tests: [ 'public/lib/angular-mocks/angular-mocks.js', 'public/modules/*/tests/*.js' ] } };<|fim▁end|>
'http://fonts.googleapis.com/css?family=Playfair+Display', 'http://fonts.googleapis.com/css?family=Dancing+Script',
<|file_name|>datadirsetup.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The rkt Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package testutils import ( "crypto/sha1" "fmt" "io" "os" "path/filepath" "github.com/coreos/rkt/pkg/group" "github.com/hashicorp/errwrap" ) const casDbPerm = os.FileMode(0660) var ( // dirs relative to data directory dirs = map[string]os.FileMode{ ".": os.FileMode(0750 | os.ModeSetgid), "tmp": os.FileMode(0750 | os.ModeSetgid), // Cas directories. // Please keep in sync with dist/init/systemd/tmpfiles.d/rkt.conf // Make sure 'rkt' group can read/write some of the 'cas' // directories so that users in the group can fetch images "cas": os.FileMode(0770 | os.ModeSetgid), "cas/db": os.FileMode(0770 | os.ModeSetgid), "cas/imagelocks": os.FileMode(0770 | os.ModeSetgid), "cas/imageManifest": os.FileMode(0770 | os.ModeSetgid), "cas/blob": os.FileMode(0770 | os.ModeSetgid), "cas/tmp": os.FileMode(0770 | os.ModeSetgid), "cas/tree": os.FileMode(0700 | os.ModeSetgid), "cas/treestorelocks": os.FileMode(0700 | os.ModeSetgid), "locks": os.FileMode(0750 | os.ModeSetgid), // Pods directories. "pods": os.FileMode(0750 | os.ModeSetgid), "pods/embryo": os.FileMode(0750 | os.ModeSetgid), "pods/prepare": os.FileMode(0750 | os.ModeSetgid), "pods/prepared": os.FileMode(0750 | os.ModeSetgid), "pods/run": os.FileMode(0750 | os.ModeSetgid), "pods/exited-garbage": os.FileMode(0750 | os.ModeSetgid), "pods/garbage": os.FileMode(0750 | os.ModeSetgid), } ) func createFileWithPermissions(path string, uid int, gid int, perm os.FileMode) error { _, err := os.OpenFile(path, os.O_CREATE|os.O_EXCL|os.O_RDWR, 0666) if err != nil { if !os.IsExist(err) { return err } // file exists } return setPermissions(path, uid, gid, perm) } func setPermissions(path string, uid int, gid int, perm os.FileMode) error { if err := os.Chown(path, uid, gid); err != nil { return errwrap.Wrap(fmt.Errorf("error setting %q directory group", path), err) } if err := os.Chmod(path, perm); err != nil { return errwrap.Wrap(fmt.Errorf("error setting %q directory permissions", path), err) } return nil } func createDirStructure(dataDir string, gid int) error { for dir, perm := range dirs { path := filepath.Join(dataDir, dir) if err := os.MkdirAll(path, perm); err != nil { return errwrap.Wrap(fmt.Errorf("error creating %q directory", path), err) } if err := setPermissions(path, 0, gid, perm); err != nil { return err } } return nil } <|fim▁hole|>func setCasDbFilesPermissions(casDbPath string, gid int, perm os.FileMode) error { casDbWalker := func(path string, info os.FileInfo, err error) error { if err != nil { return err } if info.Mode().IsRegular() { if err := setPermissions(path, 0, gid, perm); err != nil { return err } } return nil } if err := filepath.Walk(casDbPath, casDbWalker); err != nil { return err } return nil } func createDbFiles(casDbPath string, gid int, perm os.FileMode) error { // HACK: to avoid some import cycles we don't use store.DbFilename DbFilename := "ql.db" dbPath := filepath.Join(casDbPath, DbFilename) if err := createFileWithPermissions(dbPath, 0, gid, perm); err != nil { return errwrap.Wrap(fmt.Errorf("error creating %s", dbPath), err) } // ql database uses a Write-Ahead Logging (WAL) file whose name is // generated from the sha1 hash of the database name h := sha1.New() io.WriteString(h, DbFilename) walFilename := fmt.Sprintf(".%x", h.Sum(nil)) walFilePath := filepath.Join(casDbPath, walFilename) if err := createFileWithPermissions(walFilePath, 0, gid, perm); err != nil { return errwrap.Wrap(fmt.Errorf("error creating %s", walFilename), err) } return nil } func setupDataDir(dataDir string) error { gid, err := group.LookupGid("rkt") if err != nil { return err } if err := createDirStructure(dataDir, gid); err != nil { return err } casDbPath := filepath.Join(dataDir, "cas", "db") if err := setCasDbFilesPermissions(casDbPath, gid, casDbPerm); err != nil { return err } if err := createDbFiles(casDbPath, gid, casDbPerm); err != nil { return err } return nil }<|fim▁end|>
<|file_name|>conversion_generated.go<|end_file_name|><|fim▁begin|>/* Copyright 2015 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // DO NOT EDIT. THIS FILE IS AUTO-GENERATED BY $KUBEROOT/hack/update-generated-conversions.sh package v1 import ( reflect "reflect" api "k8s.io/kubernetes/pkg/api" resource "k8s.io/kubernetes/pkg/api/resource" conversion "k8s.io/kubernetes/pkg/conversion" ) func convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource(in *api.AWSElasticBlockStoreVolumeSource, out *AWSElasticBlockStoreVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.AWSElasticBlockStoreVolumeSource))(in) } out.VolumeID = in.VolumeID out.FSType = in.FSType out.Partition = in.Partition out.ReadOnly = in.ReadOnly return nil } func convert_api_Binding_To_v1_Binding(in *api.Binding, out *Binding, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Binding))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.Target, &out.Target, s); err != nil { return err } return nil } func convert_api_Capabilities_To_v1_Capabilities(in *api.Capabilities, out *Capabilities, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Capabilities))(in) } if in.Add != nil { out.Add = make([]Capability, len(in.Add)) for i := range in.Add { out.Add[i] = Capability(in.Add[i]) } } else { out.Add = nil } if in.Drop != nil { out.Drop = make([]Capability, len(in.Drop)) for i := range in.Drop { out.Drop[i] = Capability(in.Drop[i]) } } else { out.Drop = nil } return nil } func convert_api_ComponentCondition_To_v1_ComponentCondition(in *api.ComponentCondition, out *ComponentCondition, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ComponentCondition))(in) } out.Type = ComponentConditionType(in.Type) out.Status = ConditionStatus(in.Status) out.Message = in.Message out.Error = in.Error return nil } func convert_api_ComponentStatus_To_v1_ComponentStatus(in *api.ComponentStatus, out *ComponentStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ComponentStatus))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if in.Conditions != nil { out.Conditions = make([]ComponentCondition, len(in.Conditions)) for i := range in.Conditions { if err := convert_api_ComponentCondition_To_v1_ComponentCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil { return err } } } else { out.Conditions = nil } return nil } func convert_api_ComponentStatusList_To_v1_ComponentStatusList(in *api.ComponentStatusList, out *ComponentStatusList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ComponentStatusList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]ComponentStatus, len(in.Items)) for i := range in.Items { if err := convert_api_ComponentStatus_To_v1_ComponentStatus(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_Container_To_v1_Container(in *api.Container, out *Container, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Container))(in) } out.Name = in.Name out.Image = in.Image if in.Command != nil { out.Command = make([]string, len(in.Command)) for i := range in.Command { out.Command[i] = in.Command[i] } } else { out.Command = nil } if in.Args != nil { out.Args = make([]string, len(in.Args)) for i := range in.Args { out.Args[i] = in.Args[i] } } else { out.Args = nil } out.WorkingDir = in.WorkingDir if in.Ports != nil { out.Ports = make([]ContainerPort, len(in.Ports)) for i := range in.Ports { if err := convert_api_ContainerPort_To_v1_ContainerPort(&in.Ports[i], &out.Ports[i], s); err != nil { return err } } } else { out.Ports = nil } if in.Env != nil { out.Env = make([]EnvVar, len(in.Env)) for i := range in.Env { if err := convert_api_EnvVar_To_v1_EnvVar(&in.Env[i], &out.Env[i], s); err != nil { return err } } } else { out.Env = nil } if err := convert_api_ResourceRequirements_To_v1_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil { return err } if in.VolumeMounts != nil { out.VolumeMounts = make([]VolumeMount, len(in.VolumeMounts)) for i := range in.VolumeMounts { if err := convert_api_VolumeMount_To_v1_VolumeMount(&in.VolumeMounts[i], &out.VolumeMounts[i], s); err != nil { return err } } } else { out.VolumeMounts = nil } if in.LivenessProbe != nil { out.LivenessProbe = new(Probe) if err := convert_api_Probe_To_v1_Probe(in.LivenessProbe, out.LivenessProbe, s); err != nil { return err } } else { out.LivenessProbe = nil } if in.ReadinessProbe != nil { out.ReadinessProbe = new(Probe) if err := convert_api_Probe_To_v1_Probe(in.ReadinessProbe, out.ReadinessProbe, s); err != nil { return err } } else { out.ReadinessProbe = nil } if in.Lifecycle != nil { out.Lifecycle = new(Lifecycle) if err := convert_api_Lifecycle_To_v1_Lifecycle(in.Lifecycle, out.Lifecycle, s); err != nil { return err } } else { out.Lifecycle = nil } out.TerminationMessagePath = in.TerminationMessagePath out.ImagePullPolicy = PullPolicy(in.ImagePullPolicy) if in.SecurityContext != nil { out.SecurityContext = new(SecurityContext) if err := convert_api_SecurityContext_To_v1_SecurityContext(in.SecurityContext, out.SecurityContext, s); err != nil { return err } } else { out.SecurityContext = nil } out.Stdin = in.Stdin out.TTY = in.TTY return nil } func convert_api_ContainerPort_To_v1_ContainerPort(in *api.ContainerPort, out *ContainerPort, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ContainerPort))(in) } out.Name = in.Name out.HostPort = in.HostPort out.ContainerPort = in.ContainerPort out.Protocol = Protocol(in.Protocol) out.HostIP = in.HostIP return nil } func convert_api_ContainerState_To_v1_ContainerState(in *api.ContainerState, out *ContainerState, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ContainerState))(in) } if in.Waiting != nil { out.Waiting = new(ContainerStateWaiting) if err := convert_api_ContainerStateWaiting_To_v1_ContainerStateWaiting(in.Waiting, out.Waiting, s); err != nil { return err } } else { out.Waiting = nil } if in.Running != nil { out.Running = new(ContainerStateRunning) if err := convert_api_ContainerStateRunning_To_v1_ContainerStateRunning(in.Running, out.Running, s); err != nil { return err } } else { out.Running = nil } if in.Terminated != nil { out.Terminated = new(ContainerStateTerminated) if err := convert_api_ContainerStateTerminated_To_v1_ContainerStateTerminated(in.Terminated, out.Terminated, s); err != nil { return err } } else { out.Terminated = nil } return nil } func convert_api_ContainerStateRunning_To_v1_ContainerStateRunning(in *api.ContainerStateRunning, out *ContainerStateRunning, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ContainerStateRunning))(in) } if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil { return err } return nil } func convert_api_ContainerStateTerminated_To_v1_ContainerStateTerminated(in *api.ContainerStateTerminated, out *ContainerStateTerminated, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ContainerStateTerminated))(in) } out.ExitCode = in.ExitCode out.Signal = in.Signal out.Reason = in.Reason out.Message = in.Message if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil { return err } if err := s.Convert(&in.FinishedAt, &out.FinishedAt, 0); err != nil { return err } out.ContainerID = in.ContainerID return nil } func convert_api_ContainerStateWaiting_To_v1_ContainerStateWaiting(in *api.ContainerStateWaiting, out *ContainerStateWaiting, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ContainerStateWaiting))(in) } out.Reason = in.Reason return nil } func convert_api_ContainerStatus_To_v1_ContainerStatus(in *api.ContainerStatus, out *ContainerStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ContainerStatus))(in) } out.Name = in.Name if err := convert_api_ContainerState_To_v1_ContainerState(&in.State, &out.State, s); err != nil { return err } if err := convert_api_ContainerState_To_v1_ContainerState(&in.LastTerminationState, &out.LastTerminationState, s); err != nil { return err } out.Ready = in.Ready out.RestartCount = in.RestartCount out.Image = in.Image out.ImageID = in.ImageID out.ContainerID = in.ContainerID return nil } func convert_api_Daemon_To_v1_Daemon(in *api.Daemon, out *Daemon, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Daemon))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_DaemonSpec_To_v1_DaemonSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_api_DaemonStatus_To_v1_DaemonStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_api_DaemonList_To_v1_DaemonList(in *api.DaemonList, out *DaemonList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.DaemonList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]Daemon, len(in.Items)) for i := range in.Items { if err := convert_api_Daemon_To_v1_Daemon(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_DaemonSpec_To_v1_DaemonSpec(in *api.DaemonSpec, out *DaemonSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.DaemonSpec))(in) } if in.Selector != nil { out.Selector = make(map[string]string) for key, val := range in.Selector { out.Selector[key] = val } } else { out.Selector = nil } if in.Template != nil { out.Template = new(PodTemplateSpec) if err := convert_api_PodTemplateSpec_To_v1_PodTemplateSpec(in.Template, out.Template, s); err != nil { return err } } else { out.Template = nil } return nil } func convert_api_DaemonStatus_To_v1_DaemonStatus(in *api.DaemonStatus, out *DaemonStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.DaemonStatus))(in) } out.CurrentNumberScheduled = in.CurrentNumberScheduled out.NumberMisscheduled = in.NumberMisscheduled out.DesiredNumberScheduled = in.DesiredNumberScheduled return nil } func convert_api_DeleteOptions_To_v1_DeleteOptions(in *api.DeleteOptions, out *DeleteOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.DeleteOptions))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if in.GracePeriodSeconds != nil { out.GracePeriodSeconds = new(int64) *out.GracePeriodSeconds = *in.GracePeriodSeconds } else { out.GracePeriodSeconds = nil } return nil } func convert_api_EmptyDirVolumeSource_To_v1_EmptyDirVolumeSource(in *api.EmptyDirVolumeSource, out *EmptyDirVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.EmptyDirVolumeSource))(in) } out.Medium = StorageMedium(in.Medium) return nil } func convert_api_EndpointAddress_To_v1_EndpointAddress(in *api.EndpointAddress, out *EndpointAddress, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.EndpointAddress))(in) } out.IP = in.IP if in.TargetRef != nil { out.TargetRef = new(ObjectReference) if err := convert_api_ObjectReference_To_v1_ObjectReference(in.TargetRef, out.TargetRef, s); err != nil { return err } } else { out.TargetRef = nil } return nil } func convert_api_EndpointPort_To_v1_EndpointPort(in *api.EndpointPort, out *EndpointPort, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.EndpointPort))(in) } out.Name = in.Name out.Port = in.Port out.Protocol = Protocol(in.Protocol) return nil } func convert_api_EndpointSubset_To_v1_EndpointSubset(in *api.EndpointSubset, out *EndpointSubset, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.EndpointSubset))(in) } if in.Addresses != nil { out.Addresses = make([]EndpointAddress, len(in.Addresses)) for i := range in.Addresses { if err := convert_api_EndpointAddress_To_v1_EndpointAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil { return err } } } else { out.Addresses = nil } if in.Ports != nil { out.Ports = make([]EndpointPort, len(in.Ports)) for i := range in.Ports { if err := convert_api_EndpointPort_To_v1_EndpointPort(&in.Ports[i], &out.Ports[i], s); err != nil { return err } } } else { out.Ports = nil } return nil } func convert_api_Endpoints_To_v1_Endpoints(in *api.Endpoints, out *Endpoints, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Endpoints))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if in.Subsets != nil { out.Subsets = make([]EndpointSubset, len(in.Subsets)) for i := range in.Subsets { if err := convert_api_EndpointSubset_To_v1_EndpointSubset(&in.Subsets[i], &out.Subsets[i], s); err != nil { return err } } } else { out.Subsets = nil } return nil } func convert_api_EndpointsList_To_v1_EndpointsList(in *api.EndpointsList, out *EndpointsList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.EndpointsList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]Endpoints, len(in.Items)) for i := range in.Items { if err := convert_api_Endpoints_To_v1_Endpoints(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_EnvVar_To_v1_EnvVar(in *api.EnvVar, out *EnvVar, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.EnvVar))(in) } out.Name = in.Name out.Value = in.Value if in.ValueFrom != nil { out.ValueFrom = new(EnvVarSource) if err := convert_api_EnvVarSource_To_v1_EnvVarSource(in.ValueFrom, out.ValueFrom, s); err != nil { return err } } else { out.ValueFrom = nil } return nil } func convert_api_EnvVarSource_To_v1_EnvVarSource(in *api.EnvVarSource, out *EnvVarSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.EnvVarSource))(in) } if in.FieldRef != nil { out.FieldRef = new(ObjectFieldSelector) if err := convert_api_ObjectFieldSelector_To_v1_ObjectFieldSelector(in.FieldRef, out.FieldRef, s); err != nil { return err } } else { out.FieldRef = nil } return nil } func convert_api_Event_To_v1_Event(in *api.Event, out *Event, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Event))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.InvolvedObject, &out.InvolvedObject, s); err != nil { return err } out.Reason = in.Reason out.Message = in.Message if err := convert_api_EventSource_To_v1_EventSource(&in.Source, &out.Source, s); err != nil { return err } if err := s.Convert(&in.FirstTimestamp, &out.FirstTimestamp, 0); err != nil { return err } if err := s.Convert(&in.LastTimestamp, &out.LastTimestamp, 0); err != nil { return err } out.Count = in.Count return nil } func convert_api_EventList_To_v1_EventList(in *api.EventList, out *EventList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.EventList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]Event, len(in.Items)) for i := range in.Items { if err := convert_api_Event_To_v1_Event(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_EventSource_To_v1_EventSource(in *api.EventSource, out *EventSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.EventSource))(in) } out.Component = in.Component out.Host = in.Host return nil } func convert_api_ExecAction_To_v1_ExecAction(in *api.ExecAction, out *ExecAction, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ExecAction))(in) } if in.Command != nil { out.Command = make([]string, len(in.Command)) for i := range in.Command { out.Command[i] = in.Command[i] } } else { out.Command = nil } return nil } func convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource(in *api.GCEPersistentDiskVolumeSource, out *GCEPersistentDiskVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.GCEPersistentDiskVolumeSource))(in) } out.PDName = in.PDName out.FSType = in.FSType out.Partition = in.Partition out.ReadOnly = in.ReadOnly return nil } func convert_api_GitRepoVolumeSource_To_v1_GitRepoVolumeSource(in *api.GitRepoVolumeSource, out *GitRepoVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.GitRepoVolumeSource))(in) } out.Repository = in.Repository out.Revision = in.Revision return nil } func convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource(in *api.GlusterfsVolumeSource, out *GlusterfsVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.GlusterfsVolumeSource))(in) } out.EndpointsName = in.EndpointsName out.Path = in.Path out.ReadOnly = in.ReadOnly return nil } func convert_api_HTTPGetAction_To_v1_HTTPGetAction(in *api.HTTPGetAction, out *HTTPGetAction, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.HTTPGetAction))(in) } out.Path = in.Path if err := s.Convert(&in.Port, &out.Port, 0); err != nil { return err } out.Host = in.Host out.Scheme = URIScheme(in.Scheme) return nil } func convert_api_Handler_To_v1_Handler(in *api.Handler, out *Handler, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Handler))(in) } if in.Exec != nil { out.Exec = new(ExecAction) if err := convert_api_ExecAction_To_v1_ExecAction(in.Exec, out.Exec, s); err != nil { return err } } else { out.Exec = nil } if in.HTTPGet != nil { out.HTTPGet = new(HTTPGetAction) if err := convert_api_HTTPGetAction_To_v1_HTTPGetAction(in.HTTPGet, out.HTTPGet, s); err != nil { return err } } else { out.HTTPGet = nil } if in.TCPSocket != nil { out.TCPSocket = new(TCPSocketAction) if err := convert_api_TCPSocketAction_To_v1_TCPSocketAction(in.TCPSocket, out.TCPSocket, s); err != nil { return err } } else { out.TCPSocket = nil } return nil } func convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource(in *api.HostPathVolumeSource, out *HostPathVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.HostPathVolumeSource))(in) } out.Path = in.Path return nil } func convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource(in *api.ISCSIVolumeSource, out *ISCSIVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ISCSIVolumeSource))(in) } out.TargetPortal = in.TargetPortal out.IQN = in.IQN out.Lun = in.Lun out.FSType = in.FSType out.ReadOnly = in.ReadOnly return nil } func convert_api_Lifecycle_To_v1_Lifecycle(in *api.Lifecycle, out *Lifecycle, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Lifecycle))(in) } if in.PostStart != nil { out.PostStart = new(Handler) if err := convert_api_Handler_To_v1_Handler(in.PostStart, out.PostStart, s); err != nil { return err } } else { out.PostStart = nil } if in.PreStop != nil { out.PreStop = new(Handler) if err := convert_api_Handler_To_v1_Handler(in.PreStop, out.PreStop, s); err != nil { return err } } else { out.PreStop = nil } return nil } func convert_api_LimitRange_To_v1_LimitRange(in *api.LimitRange, out *LimitRange, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.LimitRange))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_LimitRangeSpec_To_v1_LimitRangeSpec(&in.Spec, &out.Spec, s); err != nil { return err } return nil } func convert_api_LimitRangeItem_To_v1_LimitRangeItem(in *api.LimitRangeItem, out *LimitRangeItem, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.LimitRangeItem))(in) } out.Type = LimitType(in.Type) if in.Max != nil { out.Max = make(ResourceList) for key, val := range in.Max { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Max[ResourceName(key)] = newVal } } else { out.Max = nil } if in.Min != nil { out.Min = make(ResourceList) for key, val := range in.Min { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Min[ResourceName(key)] = newVal } } else { out.Min = nil } if in.Default != nil { out.Default = make(ResourceList) for key, val := range in.Default { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Default[ResourceName(key)] = newVal } } else { out.Default = nil } return nil } func convert_api_LimitRangeList_To_v1_LimitRangeList(in *api.LimitRangeList, out *LimitRangeList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.LimitRangeList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]LimitRange, len(in.Items)) for i := range in.Items { if err := convert_api_LimitRange_To_v1_LimitRange(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_LimitRangeSpec_To_v1_LimitRangeSpec(in *api.LimitRangeSpec, out *LimitRangeSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.LimitRangeSpec))(in) } if in.Limits != nil { out.Limits = make([]LimitRangeItem, len(in.Limits)) for i := range in.Limits { if err := convert_api_LimitRangeItem_To_v1_LimitRangeItem(&in.Limits[i], &out.Limits[i], s); err != nil { return err } } } else { out.Limits = nil } return nil } func convert_api_List_To_v1_List(in *api.List, out *List, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.List))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if err := s.Convert(&in.Items, &out.Items, 0); err != nil { return err } return nil } func convert_api_ListMeta_To_v1_ListMeta(in *api.ListMeta, out *ListMeta, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ListMeta))(in) } out.SelfLink = in.SelfLink out.ResourceVersion = in.ResourceVersion return nil } func convert_api_ListOptions_To_v1_ListOptions(in *api.ListOptions, out *ListOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ListOptions))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := s.Convert(&in.LabelSelector, &out.LabelSelector, 0); err != nil { return err } if err := s.Convert(&in.FieldSelector, &out.FieldSelector, 0); err != nil { return err } out.Watch = in.Watch out.ResourceVersion = in.ResourceVersion return nil } func convert_api_LoadBalancerIngress_To_v1_LoadBalancerIngress(in *api.LoadBalancerIngress, out *LoadBalancerIngress, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.LoadBalancerIngress))(in) } out.IP = in.IP out.Hostname = in.Hostname return nil } func convert_api_LoadBalancerStatus_To_v1_LoadBalancerStatus(in *api.LoadBalancerStatus, out *LoadBalancerStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.LoadBalancerStatus))(in) } if in.Ingress != nil { out.Ingress = make([]LoadBalancerIngress, len(in.Ingress)) for i := range in.Ingress { if err := convert_api_LoadBalancerIngress_To_v1_LoadBalancerIngress(&in.Ingress[i], &out.Ingress[i], s); err != nil { return err } } } else { out.Ingress = nil } return nil } func convert_api_LocalObjectReference_To_v1_LocalObjectReference(in *api.LocalObjectReference, out *LocalObjectReference, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.LocalObjectReference))(in) } out.Name = in.Name return nil } func convert_api_NFSVolumeSource_To_v1_NFSVolumeSource(in *api.NFSVolumeSource, out *NFSVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.NFSVolumeSource))(in) } out.Server = in.Server out.Path = in.Path out.ReadOnly = in.ReadOnly return nil } func convert_api_Namespace_To_v1_Namespace(in *api.Namespace, out *Namespace, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Namespace))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_NamespaceSpec_To_v1_NamespaceSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_api_NamespaceStatus_To_v1_NamespaceStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_api_NamespaceList_To_v1_NamespaceList(in *api.NamespaceList, out *NamespaceList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.NamespaceList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]Namespace, len(in.Items)) for i := range in.Items { if err := convert_api_Namespace_To_v1_Namespace(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_NamespaceSpec_To_v1_NamespaceSpec(in *api.NamespaceSpec, out *NamespaceSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.NamespaceSpec))(in) } if in.Finalizers != nil { out.Finalizers = make([]FinalizerName, len(in.Finalizers)) for i := range in.Finalizers { out.Finalizers[i] = FinalizerName(in.Finalizers[i]) } } else { out.Finalizers = nil } return nil } func convert_api_NamespaceStatus_To_v1_NamespaceStatus(in *api.NamespaceStatus, out *NamespaceStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.NamespaceStatus))(in) } out.Phase = NamespacePhase(in.Phase) return nil } func convert_api_Node_To_v1_Node(in *api.Node, out *Node, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Node))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_NodeSpec_To_v1_NodeSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_api_NodeStatus_To_v1_NodeStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_api_NodeAddress_To_v1_NodeAddress(in *api.NodeAddress, out *NodeAddress, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.NodeAddress))(in) } out.Type = NodeAddressType(in.Type) out.Address = in.Address return nil } func convert_api_NodeCondition_To_v1_NodeCondition(in *api.NodeCondition, out *NodeCondition, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.NodeCondition))(in) } out.Type = NodeConditionType(in.Type) out.Status = ConditionStatus(in.Status) if err := s.Convert(&in.LastHeartbeatTime, &out.LastHeartbeatTime, 0); err != nil { return err } if err := s.Convert(&in.LastTransitionTime, &out.LastTransitionTime, 0); err != nil { return err } out.Reason = in.Reason out.Message = in.Message return nil } func convert_api_NodeList_To_v1_NodeList(in *api.NodeList, out *NodeList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.NodeList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]Node, len(in.Items)) for i := range in.Items { if err := convert_api_Node_To_v1_Node(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_NodeSpec_To_v1_NodeSpec(in *api.NodeSpec, out *NodeSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.NodeSpec))(in) } out.PodCIDR = in.PodCIDR out.ExternalID = in.ExternalID out.ProviderID = in.ProviderID out.Unschedulable = in.Unschedulable return nil } func convert_api_NodeStatus_To_v1_NodeStatus(in *api.NodeStatus, out *NodeStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.NodeStatus))(in) } if in.Capacity != nil { out.Capacity = make(ResourceList) for key, val := range in.Capacity { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Capacity[ResourceName(key)] = newVal } } else { out.Capacity = nil } out.Phase = NodePhase(in.Phase) if in.Conditions != nil { out.Conditions = make([]NodeCondition, len(in.Conditions)) for i := range in.Conditions { if err := convert_api_NodeCondition_To_v1_NodeCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil { return err } } } else { out.Conditions = nil } if in.Addresses != nil { out.Addresses = make([]NodeAddress, len(in.Addresses)) for i := range in.Addresses { if err := convert_api_NodeAddress_To_v1_NodeAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil { return err } } } else { out.Addresses = nil } if err := convert_api_NodeSystemInfo_To_v1_NodeSystemInfo(&in.NodeInfo, &out.NodeInfo, s); err != nil { return err } return nil } func convert_api_NodeSystemInfo_To_v1_NodeSystemInfo(in *api.NodeSystemInfo, out *NodeSystemInfo, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.NodeSystemInfo))(in) } out.MachineID = in.MachineID out.SystemUUID = in.SystemUUID out.BootID = in.BootID out.KernelVersion = in.KernelVersion out.OsImage = in.OsImage out.ContainerRuntimeVersion = in.ContainerRuntimeVersion out.KubeletVersion = in.KubeletVersion out.KubeProxyVersion = in.KubeProxyVersion return nil } func convert_api_ObjectFieldSelector_To_v1_ObjectFieldSelector(in *api.ObjectFieldSelector, out *ObjectFieldSelector, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ObjectFieldSelector))(in) } out.APIVersion = in.APIVersion out.FieldPath = in.FieldPath return nil } func convert_api_ObjectMeta_To_v1_ObjectMeta(in *api.ObjectMeta, out *ObjectMeta, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ObjectMeta))(in) } out.Name = in.Name out.GenerateName = in.GenerateName out.Namespace = in.Namespace out.SelfLink = in.SelfLink out.UID = in.UID out.ResourceVersion = in.ResourceVersion out.Generation = in.Generation if err := s.Convert(&in.CreationTimestamp, &out.CreationTimestamp, 0); err != nil { return err } if in.DeletionTimestamp != nil { if err := s.Convert(&in.DeletionTimestamp, &out.DeletionTimestamp, 0); err != nil { return err } } else { out.DeletionTimestamp = nil } if in.DeletionGracePeriodSeconds != nil { out.DeletionGracePeriodSeconds = new(int64) *out.DeletionGracePeriodSeconds = *in.DeletionGracePeriodSeconds } else { out.DeletionGracePeriodSeconds = nil } if in.Labels != nil { out.Labels = make(map[string]string) for key, val := range in.Labels { out.Labels[key] = val } } else { out.Labels = nil } if in.Annotations != nil { out.Annotations = make(map[string]string) for key, val := range in.Annotations { out.Annotations[key] = val } } else { out.Annotations = nil } return nil } func convert_api_ObjectReference_To_v1_ObjectReference(in *api.ObjectReference, out *ObjectReference, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ObjectReference))(in) } out.Kind = in.Kind out.Namespace = in.Namespace out.Name = in.Name out.UID = in.UID out.APIVersion = in.APIVersion out.ResourceVersion = in.ResourceVersion out.FieldPath = in.FieldPath return nil } func convert_api_PersistentVolume_To_v1_PersistentVolume(in *api.PersistentVolume, out *PersistentVolume, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PersistentVolume))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_PersistentVolumeSpec_To_v1_PersistentVolumeSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_api_PersistentVolumeStatus_To_v1_PersistentVolumeStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_api_PersistentVolumeClaim_To_v1_PersistentVolumeClaim(in *api.PersistentVolumeClaim, out *PersistentVolumeClaim, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PersistentVolumeClaim))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_PersistentVolumeClaimSpec_To_v1_PersistentVolumeClaimSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_api_PersistentVolumeClaimStatus_To_v1_PersistentVolumeClaimStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_api_PersistentVolumeClaimList_To_v1_PersistentVolumeClaimList(in *api.PersistentVolumeClaimList, out *PersistentVolumeClaimList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PersistentVolumeClaimList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]PersistentVolumeClaim, len(in.Items)) for i := range in.Items { if err := convert_api_PersistentVolumeClaim_To_v1_PersistentVolumeClaim(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_PersistentVolumeClaimSpec_To_v1_PersistentVolumeClaimSpec(in *api.PersistentVolumeClaimSpec, out *PersistentVolumeClaimSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PersistentVolumeClaimSpec))(in) } if in.AccessModes != nil { out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes)) for i := range in.AccessModes { out.AccessModes[i] = PersistentVolumeAccessMode(in.AccessModes[i]) } } else { out.AccessModes = nil } if err := convert_api_ResourceRequirements_To_v1_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil { return err } out.VolumeName = in.VolumeName return nil } func convert_api_PersistentVolumeClaimStatus_To_v1_PersistentVolumeClaimStatus(in *api.PersistentVolumeClaimStatus, out *PersistentVolumeClaimStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PersistentVolumeClaimStatus))(in) } out.Phase = PersistentVolumeClaimPhase(in.Phase) if in.AccessModes != nil { out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes)) for i := range in.AccessModes { out.AccessModes[i] = PersistentVolumeAccessMode(in.AccessModes[i]) } } else { out.AccessModes = nil } if in.Capacity != nil { out.Capacity = make(ResourceList) for key, val := range in.Capacity { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Capacity[ResourceName(key)] = newVal } } else { out.Capacity = nil } return nil } func convert_api_PersistentVolumeClaimVolumeSource_To_v1_PersistentVolumeClaimVolumeSource(in *api.PersistentVolumeClaimVolumeSource, out *PersistentVolumeClaimVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PersistentVolumeClaimVolumeSource))(in)<|fim▁hole|>} func convert_api_PersistentVolumeList_To_v1_PersistentVolumeList(in *api.PersistentVolumeList, out *PersistentVolumeList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PersistentVolumeList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]PersistentVolume, len(in.Items)) for i := range in.Items { if err := convert_api_PersistentVolume_To_v1_PersistentVolume(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_PersistentVolumeSource_To_v1_PersistentVolumeSource(in *api.PersistentVolumeSource, out *PersistentVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PersistentVolumeSource))(in) } if in.GCEPersistentDisk != nil { out.GCEPersistentDisk = new(GCEPersistentDiskVolumeSource) if err := convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil { return err } } else { out.GCEPersistentDisk = nil } if in.AWSElasticBlockStore != nil { out.AWSElasticBlockStore = new(AWSElasticBlockStoreVolumeSource) if err := convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil { return err } } else { out.AWSElasticBlockStore = nil } if in.HostPath != nil { out.HostPath = new(HostPathVolumeSource) if err := convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil { return err } } else { out.HostPath = nil } if in.Glusterfs != nil { out.Glusterfs = new(GlusterfsVolumeSource) if err := convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil { return err } } else { out.Glusterfs = nil } if in.NFS != nil { out.NFS = new(NFSVolumeSource) if err := convert_api_NFSVolumeSource_To_v1_NFSVolumeSource(in.NFS, out.NFS, s); err != nil { return err } } else { out.NFS = nil } if in.RBD != nil { out.RBD = new(RBDVolumeSource) if err := convert_api_RBDVolumeSource_To_v1_RBDVolumeSource(in.RBD, out.RBD, s); err != nil { return err } } else { out.RBD = nil } if in.ISCSI != nil { out.ISCSI = new(ISCSIVolumeSource) if err := convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil { return err } } else { out.ISCSI = nil } return nil } func convert_api_PersistentVolumeSpec_To_v1_PersistentVolumeSpec(in *api.PersistentVolumeSpec, out *PersistentVolumeSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PersistentVolumeSpec))(in) } if in.Capacity != nil { out.Capacity = make(ResourceList) for key, val := range in.Capacity { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Capacity[ResourceName(key)] = newVal } } else { out.Capacity = nil } if err := convert_api_PersistentVolumeSource_To_v1_PersistentVolumeSource(&in.PersistentVolumeSource, &out.PersistentVolumeSource, s); err != nil { return err } if in.AccessModes != nil { out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes)) for i := range in.AccessModes { out.AccessModes[i] = PersistentVolumeAccessMode(in.AccessModes[i]) } } else { out.AccessModes = nil } if in.ClaimRef != nil { out.ClaimRef = new(ObjectReference) if err := convert_api_ObjectReference_To_v1_ObjectReference(in.ClaimRef, out.ClaimRef, s); err != nil { return err } } else { out.ClaimRef = nil } out.PersistentVolumeReclaimPolicy = PersistentVolumeReclaimPolicy(in.PersistentVolumeReclaimPolicy) return nil } func convert_api_PersistentVolumeStatus_To_v1_PersistentVolumeStatus(in *api.PersistentVolumeStatus, out *PersistentVolumeStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PersistentVolumeStatus))(in) } out.Phase = PersistentVolumePhase(in.Phase) out.Message = in.Message out.Reason = in.Reason return nil } func convert_api_Pod_To_v1_Pod(in *api.Pod, out *Pod, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Pod))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_PodSpec_To_v1_PodSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_api_PodStatus_To_v1_PodStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_api_PodAttachOptions_To_v1_PodAttachOptions(in *api.PodAttachOptions, out *PodAttachOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodAttachOptions))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } out.Stdin = in.Stdin out.Stdout = in.Stdout out.Stderr = in.Stderr out.TTY = in.TTY out.Container = in.Container return nil } func convert_api_PodCondition_To_v1_PodCondition(in *api.PodCondition, out *PodCondition, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodCondition))(in) } out.Type = PodConditionType(in.Type) out.Status = ConditionStatus(in.Status) return nil } func convert_api_PodExecOptions_To_v1_PodExecOptions(in *api.PodExecOptions, out *PodExecOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodExecOptions))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } out.Stdin = in.Stdin out.Stdout = in.Stdout out.Stderr = in.Stderr out.TTY = in.TTY out.Container = in.Container if in.Command != nil { out.Command = make([]string, len(in.Command)) for i := range in.Command { out.Command[i] = in.Command[i] } } else { out.Command = nil } return nil } func convert_api_PodList_To_v1_PodList(in *api.PodList, out *PodList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]Pod, len(in.Items)) for i := range in.Items { if err := convert_api_Pod_To_v1_Pod(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_PodLogOptions_To_v1_PodLogOptions(in *api.PodLogOptions, out *PodLogOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodLogOptions))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } out.Container = in.Container out.Follow = in.Follow out.Previous = in.Previous return nil } func convert_api_PodProxyOptions_To_v1_PodProxyOptions(in *api.PodProxyOptions, out *PodProxyOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodProxyOptions))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } out.Path = in.Path return nil } func convert_api_PodStatus_To_v1_PodStatus(in *api.PodStatus, out *PodStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodStatus))(in) } out.Phase = PodPhase(in.Phase) if in.Conditions != nil { out.Conditions = make([]PodCondition, len(in.Conditions)) for i := range in.Conditions { if err := convert_api_PodCondition_To_v1_PodCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil { return err } } } else { out.Conditions = nil } out.Message = in.Message out.Reason = in.Reason out.HostIP = in.HostIP out.PodIP = in.PodIP if in.StartTime != nil { if err := s.Convert(&in.StartTime, &out.StartTime, 0); err != nil { return err } } else { out.StartTime = nil } if in.ContainerStatuses != nil { out.ContainerStatuses = make([]ContainerStatus, len(in.ContainerStatuses)) for i := range in.ContainerStatuses { if err := convert_api_ContainerStatus_To_v1_ContainerStatus(&in.ContainerStatuses[i], &out.ContainerStatuses[i], s); err != nil { return err } } } else { out.ContainerStatuses = nil } return nil } func convert_api_PodStatusResult_To_v1_PodStatusResult(in *api.PodStatusResult, out *PodStatusResult, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodStatusResult))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_PodStatus_To_v1_PodStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_api_PodTemplate_To_v1_PodTemplate(in *api.PodTemplate, out *PodTemplate, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodTemplate))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_PodTemplateSpec_To_v1_PodTemplateSpec(&in.Template, &out.Template, s); err != nil { return err } return nil } func convert_api_PodTemplateList_To_v1_PodTemplateList(in *api.PodTemplateList, out *PodTemplateList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodTemplateList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]PodTemplate, len(in.Items)) for i := range in.Items { if err := convert_api_PodTemplate_To_v1_PodTemplate(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_PodTemplateSpec_To_v1_PodTemplateSpec(in *api.PodTemplateSpec, out *PodTemplateSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.PodTemplateSpec))(in) } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_PodSpec_To_v1_PodSpec(&in.Spec, &out.Spec, s); err != nil { return err } return nil } func convert_api_Probe_To_v1_Probe(in *api.Probe, out *Probe, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Probe))(in) } if err := convert_api_Handler_To_v1_Handler(&in.Handler, &out.Handler, s); err != nil { return err } out.InitialDelaySeconds = in.InitialDelaySeconds out.TimeoutSeconds = in.TimeoutSeconds return nil } func convert_api_RBDVolumeSource_To_v1_RBDVolumeSource(in *api.RBDVolumeSource, out *RBDVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.RBDVolumeSource))(in) } if in.CephMonitors != nil { out.CephMonitors = make([]string, len(in.CephMonitors)) for i := range in.CephMonitors { out.CephMonitors[i] = in.CephMonitors[i] } } else { out.CephMonitors = nil } out.RBDImage = in.RBDImage out.FSType = in.FSType out.RBDPool = in.RBDPool out.RadosUser = in.RadosUser out.Keyring = in.Keyring if in.SecretRef != nil { out.SecretRef = new(LocalObjectReference) if err := convert_api_LocalObjectReference_To_v1_LocalObjectReference(in.SecretRef, out.SecretRef, s); err != nil { return err } } else { out.SecretRef = nil } out.ReadOnly = in.ReadOnly return nil } func convert_api_RangeAllocation_To_v1_RangeAllocation(in *api.RangeAllocation, out *RangeAllocation, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.RangeAllocation))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } out.Range = in.Range if err := s.Convert(&in.Data, &out.Data, 0); err != nil { return err } return nil } func convert_api_ReplicationController_To_v1_ReplicationController(in *api.ReplicationController, out *ReplicationController, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ReplicationController))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_ReplicationControllerSpec_To_v1_ReplicationControllerSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_api_ReplicationControllerStatus_To_v1_ReplicationControllerStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_api_ReplicationControllerList_To_v1_ReplicationControllerList(in *api.ReplicationControllerList, out *ReplicationControllerList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ReplicationControllerList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]ReplicationController, len(in.Items)) for i := range in.Items { if err := convert_api_ReplicationController_To_v1_ReplicationController(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_ReplicationControllerStatus_To_v1_ReplicationControllerStatus(in *api.ReplicationControllerStatus, out *ReplicationControllerStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ReplicationControllerStatus))(in) } out.Replicas = in.Replicas out.ObservedGeneration = in.ObservedGeneration return nil } func convert_api_ResourceQuota_To_v1_ResourceQuota(in *api.ResourceQuota, out *ResourceQuota, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ResourceQuota))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_ResourceQuotaSpec_To_v1_ResourceQuotaSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_api_ResourceQuotaStatus_To_v1_ResourceQuotaStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_api_ResourceQuotaList_To_v1_ResourceQuotaList(in *api.ResourceQuotaList, out *ResourceQuotaList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ResourceQuotaList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]ResourceQuota, len(in.Items)) for i := range in.Items { if err := convert_api_ResourceQuota_To_v1_ResourceQuota(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_ResourceQuotaSpec_To_v1_ResourceQuotaSpec(in *api.ResourceQuotaSpec, out *ResourceQuotaSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ResourceQuotaSpec))(in) } if in.Hard != nil { out.Hard = make(ResourceList) for key, val := range in.Hard { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Hard[ResourceName(key)] = newVal } } else { out.Hard = nil } return nil } func convert_api_ResourceQuotaStatus_To_v1_ResourceQuotaStatus(in *api.ResourceQuotaStatus, out *ResourceQuotaStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ResourceQuotaStatus))(in) } if in.Hard != nil { out.Hard = make(ResourceList) for key, val := range in.Hard { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Hard[ResourceName(key)] = newVal } } else { out.Hard = nil } if in.Used != nil { out.Used = make(ResourceList) for key, val := range in.Used { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Used[ResourceName(key)] = newVal } } else { out.Used = nil } return nil } func convert_api_ResourceRequirements_To_v1_ResourceRequirements(in *api.ResourceRequirements, out *ResourceRequirements, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ResourceRequirements))(in) } if in.Limits != nil { out.Limits = make(ResourceList) for key, val := range in.Limits { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Limits[ResourceName(key)] = newVal } } else { out.Limits = nil } if in.Requests != nil { out.Requests = make(ResourceList) for key, val := range in.Requests { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Requests[ResourceName(key)] = newVal } } else { out.Requests = nil } return nil } func convert_api_SELinuxOptions_To_v1_SELinuxOptions(in *api.SELinuxOptions, out *SELinuxOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.SELinuxOptions))(in) } out.User = in.User out.Role = in.Role out.Type = in.Type out.Level = in.Level return nil } func convert_api_Secret_To_v1_Secret(in *api.Secret, out *Secret, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Secret))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if in.Data != nil { out.Data = make(map[string][]uint8) for key, val := range in.Data { newVal := []uint8{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Data[key] = newVal } } else { out.Data = nil } out.Type = SecretType(in.Type) return nil } func convert_api_SecretList_To_v1_SecretList(in *api.SecretList, out *SecretList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.SecretList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]Secret, len(in.Items)) for i := range in.Items { if err := convert_api_Secret_To_v1_Secret(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_SecretVolumeSource_To_v1_SecretVolumeSource(in *api.SecretVolumeSource, out *SecretVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.SecretVolumeSource))(in) } out.SecretName = in.SecretName return nil } func convert_api_SecurityContext_To_v1_SecurityContext(in *api.SecurityContext, out *SecurityContext, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.SecurityContext))(in) } if in.Capabilities != nil { out.Capabilities = new(Capabilities) if err := convert_api_Capabilities_To_v1_Capabilities(in.Capabilities, out.Capabilities, s); err != nil { return err } } else { out.Capabilities = nil } if in.Privileged != nil { out.Privileged = new(bool) *out.Privileged = *in.Privileged } else { out.Privileged = nil } if in.SELinuxOptions != nil { out.SELinuxOptions = new(SELinuxOptions) if err := convert_api_SELinuxOptions_To_v1_SELinuxOptions(in.SELinuxOptions, out.SELinuxOptions, s); err != nil { return err } } else { out.SELinuxOptions = nil } if in.RunAsUser != nil { out.RunAsUser = new(int64) *out.RunAsUser = *in.RunAsUser } else { out.RunAsUser = nil } out.RunAsNonRoot = in.RunAsNonRoot return nil } func convert_api_SerializedReference_To_v1_SerializedReference(in *api.SerializedReference, out *SerializedReference, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.SerializedReference))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.Reference, &out.Reference, s); err != nil { return err } return nil } func convert_api_Service_To_v1_Service(in *api.Service, out *Service, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Service))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_api_ServiceSpec_To_v1_ServiceSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_api_ServiceStatus_To_v1_ServiceStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_api_ServiceAccount_To_v1_ServiceAccount(in *api.ServiceAccount, out *ServiceAccount, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ServiceAccount))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if in.Secrets != nil { out.Secrets = make([]ObjectReference, len(in.Secrets)) for i := range in.Secrets { if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.Secrets[i], &out.Secrets[i], s); err != nil { return err } } } else { out.Secrets = nil } if in.ImagePullSecrets != nil { out.ImagePullSecrets = make([]LocalObjectReference, len(in.ImagePullSecrets)) for i := range in.ImagePullSecrets { if err := convert_api_LocalObjectReference_To_v1_LocalObjectReference(&in.ImagePullSecrets[i], &out.ImagePullSecrets[i], s); err != nil { return err } } } else { out.ImagePullSecrets = nil } return nil } func convert_api_ServiceAccountList_To_v1_ServiceAccountList(in *api.ServiceAccountList, out *ServiceAccountList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ServiceAccountList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]ServiceAccount, len(in.Items)) for i := range in.Items { if err := convert_api_ServiceAccount_To_v1_ServiceAccount(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_ServiceList_To_v1_ServiceList(in *api.ServiceList, out *ServiceList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ServiceList))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]Service, len(in.Items)) for i := range in.Items { if err := convert_api_Service_To_v1_Service(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_api_ServicePort_To_v1_ServicePort(in *api.ServicePort, out *ServicePort, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ServicePort))(in) } out.Name = in.Name out.Protocol = Protocol(in.Protocol) out.Port = in.Port if err := s.Convert(&in.TargetPort, &out.TargetPort, 0); err != nil { return err } out.NodePort = in.NodePort return nil } func convert_api_ServiceSpec_To_v1_ServiceSpec(in *api.ServiceSpec, out *ServiceSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ServiceSpec))(in) } if in.Ports != nil { out.Ports = make([]ServicePort, len(in.Ports)) for i := range in.Ports { if err := convert_api_ServicePort_To_v1_ServicePort(&in.Ports[i], &out.Ports[i], s); err != nil { return err } } } else { out.Ports = nil } if in.Selector != nil { out.Selector = make(map[string]string) for key, val := range in.Selector { out.Selector[key] = val } } else { out.Selector = nil } out.ClusterIP = in.ClusterIP out.Type = ServiceType(in.Type) if in.ExternalIPs != nil { out.ExternalIPs = make([]string, len(in.ExternalIPs)) for i := range in.ExternalIPs { out.ExternalIPs[i] = in.ExternalIPs[i] } } else { out.ExternalIPs = nil } out.SessionAffinity = ServiceAffinity(in.SessionAffinity) return nil } func convert_api_ServiceStatus_To_v1_ServiceStatus(in *api.ServiceStatus, out *ServiceStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ServiceStatus))(in) } if err := convert_api_LoadBalancerStatus_To_v1_LoadBalancerStatus(&in.LoadBalancer, &out.LoadBalancer, s); err != nil { return err } return nil } func convert_api_Status_To_v1_Status(in *api.Status, out *Status, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Status))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } out.Status = in.Status out.Message = in.Message out.Reason = StatusReason(in.Reason) if in.Details != nil { out.Details = new(StatusDetails) if err := convert_api_StatusDetails_To_v1_StatusDetails(in.Details, out.Details, s); err != nil { return err } } else { out.Details = nil } out.Code = in.Code return nil } func convert_api_StatusCause_To_v1_StatusCause(in *api.StatusCause, out *StatusCause, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.StatusCause))(in) } out.Type = CauseType(in.Type) out.Message = in.Message out.Field = in.Field return nil } func convert_api_StatusDetails_To_v1_StatusDetails(in *api.StatusDetails, out *StatusDetails, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.StatusDetails))(in) } out.Name = in.Name out.Kind = in.Kind if in.Causes != nil { out.Causes = make([]StatusCause, len(in.Causes)) for i := range in.Causes { if err := convert_api_StatusCause_To_v1_StatusCause(&in.Causes[i], &out.Causes[i], s); err != nil { return err } } } else { out.Causes = nil } out.RetryAfterSeconds = in.RetryAfterSeconds return nil } func convert_api_TCPSocketAction_To_v1_TCPSocketAction(in *api.TCPSocketAction, out *TCPSocketAction, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.TCPSocketAction))(in) } if err := s.Convert(&in.Port, &out.Port, 0); err != nil { return err } return nil } func convert_api_ThirdPartyResourceData_To_v1_ThirdPartyResourceData(in *api.ThirdPartyResourceData, out *ThirdPartyResourceData, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.ThirdPartyResourceData))(in) } if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := s.Convert(&in.Data, &out.Data, 0); err != nil { return err } return nil } func convert_api_TypeMeta_To_v1_TypeMeta(in *api.TypeMeta, out *TypeMeta, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.TypeMeta))(in) } out.Kind = in.Kind out.APIVersion = in.APIVersion return nil } func convert_api_Volume_To_v1_Volume(in *api.Volume, out *Volume, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.Volume))(in) } out.Name = in.Name if err := convert_api_VolumeSource_To_v1_VolumeSource(&in.VolumeSource, &out.VolumeSource, s); err != nil { return err } return nil } func convert_api_VolumeMount_To_v1_VolumeMount(in *api.VolumeMount, out *VolumeMount, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.VolumeMount))(in) } out.Name = in.Name out.ReadOnly = in.ReadOnly out.MountPath = in.MountPath return nil } func convert_api_VolumeSource_To_v1_VolumeSource(in *api.VolumeSource, out *VolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*api.VolumeSource))(in) } if in.HostPath != nil { out.HostPath = new(HostPathVolumeSource) if err := convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil { return err } } else { out.HostPath = nil } if in.EmptyDir != nil { out.EmptyDir = new(EmptyDirVolumeSource) if err := convert_api_EmptyDirVolumeSource_To_v1_EmptyDirVolumeSource(in.EmptyDir, out.EmptyDir, s); err != nil { return err } } else { out.EmptyDir = nil } if in.GCEPersistentDisk != nil { out.GCEPersistentDisk = new(GCEPersistentDiskVolumeSource) if err := convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil { return err } } else { out.GCEPersistentDisk = nil } if in.AWSElasticBlockStore != nil { out.AWSElasticBlockStore = new(AWSElasticBlockStoreVolumeSource) if err := convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil { return err } } else { out.AWSElasticBlockStore = nil } if in.GitRepo != nil { out.GitRepo = new(GitRepoVolumeSource) if err := convert_api_GitRepoVolumeSource_To_v1_GitRepoVolumeSource(in.GitRepo, out.GitRepo, s); err != nil { return err } } else { out.GitRepo = nil } if in.Secret != nil { out.Secret = new(SecretVolumeSource) if err := convert_api_SecretVolumeSource_To_v1_SecretVolumeSource(in.Secret, out.Secret, s); err != nil { return err } } else { out.Secret = nil } if in.NFS != nil { out.NFS = new(NFSVolumeSource) if err := convert_api_NFSVolumeSource_To_v1_NFSVolumeSource(in.NFS, out.NFS, s); err != nil { return err } } else { out.NFS = nil } if in.ISCSI != nil { out.ISCSI = new(ISCSIVolumeSource) if err := convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil { return err } } else { out.ISCSI = nil } if in.Glusterfs != nil { out.Glusterfs = new(GlusterfsVolumeSource) if err := convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil { return err } } else { out.Glusterfs = nil } if in.PersistentVolumeClaim != nil { out.PersistentVolumeClaim = new(PersistentVolumeClaimVolumeSource) if err := convert_api_PersistentVolumeClaimVolumeSource_To_v1_PersistentVolumeClaimVolumeSource(in.PersistentVolumeClaim, out.PersistentVolumeClaim, s); err != nil { return err } } else { out.PersistentVolumeClaim = nil } if in.RBD != nil { out.RBD = new(RBDVolumeSource) if err := convert_api_RBDVolumeSource_To_v1_RBDVolumeSource(in.RBD, out.RBD, s); err != nil { return err } } else { out.RBD = nil } return nil } func convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource(in *AWSElasticBlockStoreVolumeSource, out *api.AWSElasticBlockStoreVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*AWSElasticBlockStoreVolumeSource))(in) } out.VolumeID = in.VolumeID out.FSType = in.FSType out.Partition = in.Partition out.ReadOnly = in.ReadOnly return nil } func convert_v1_Binding_To_api_Binding(in *Binding, out *api.Binding, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Binding))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.Target, &out.Target, s); err != nil { return err } return nil } func convert_v1_Capabilities_To_api_Capabilities(in *Capabilities, out *api.Capabilities, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Capabilities))(in) } if in.Add != nil { out.Add = make([]api.Capability, len(in.Add)) for i := range in.Add { out.Add[i] = api.Capability(in.Add[i]) } } else { out.Add = nil } if in.Drop != nil { out.Drop = make([]api.Capability, len(in.Drop)) for i := range in.Drop { out.Drop[i] = api.Capability(in.Drop[i]) } } else { out.Drop = nil } return nil } func convert_v1_ComponentCondition_To_api_ComponentCondition(in *ComponentCondition, out *api.ComponentCondition, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ComponentCondition))(in) } out.Type = api.ComponentConditionType(in.Type) out.Status = api.ConditionStatus(in.Status) out.Message = in.Message out.Error = in.Error return nil } func convert_v1_ComponentStatus_To_api_ComponentStatus(in *ComponentStatus, out *api.ComponentStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ComponentStatus))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if in.Conditions != nil { out.Conditions = make([]api.ComponentCondition, len(in.Conditions)) for i := range in.Conditions { if err := convert_v1_ComponentCondition_To_api_ComponentCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil { return err } } } else { out.Conditions = nil } return nil } func convert_v1_ComponentStatusList_To_api_ComponentStatusList(in *ComponentStatusList, out *api.ComponentStatusList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ComponentStatusList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.ComponentStatus, len(in.Items)) for i := range in.Items { if err := convert_v1_ComponentStatus_To_api_ComponentStatus(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_Container_To_api_Container(in *Container, out *api.Container, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Container))(in) } out.Name = in.Name out.Image = in.Image if in.Command != nil { out.Command = make([]string, len(in.Command)) for i := range in.Command { out.Command[i] = in.Command[i] } } else { out.Command = nil } if in.Args != nil { out.Args = make([]string, len(in.Args)) for i := range in.Args { out.Args[i] = in.Args[i] } } else { out.Args = nil } out.WorkingDir = in.WorkingDir if in.Ports != nil { out.Ports = make([]api.ContainerPort, len(in.Ports)) for i := range in.Ports { if err := convert_v1_ContainerPort_To_api_ContainerPort(&in.Ports[i], &out.Ports[i], s); err != nil { return err } } } else { out.Ports = nil } if in.Env != nil { out.Env = make([]api.EnvVar, len(in.Env)) for i := range in.Env { if err := convert_v1_EnvVar_To_api_EnvVar(&in.Env[i], &out.Env[i], s); err != nil { return err } } } else { out.Env = nil } if err := convert_v1_ResourceRequirements_To_api_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil { return err } if in.VolumeMounts != nil { out.VolumeMounts = make([]api.VolumeMount, len(in.VolumeMounts)) for i := range in.VolumeMounts { if err := convert_v1_VolumeMount_To_api_VolumeMount(&in.VolumeMounts[i], &out.VolumeMounts[i], s); err != nil { return err } } } else { out.VolumeMounts = nil } if in.LivenessProbe != nil { out.LivenessProbe = new(api.Probe) if err := convert_v1_Probe_To_api_Probe(in.LivenessProbe, out.LivenessProbe, s); err != nil { return err } } else { out.LivenessProbe = nil } if in.ReadinessProbe != nil { out.ReadinessProbe = new(api.Probe) if err := convert_v1_Probe_To_api_Probe(in.ReadinessProbe, out.ReadinessProbe, s); err != nil { return err } } else { out.ReadinessProbe = nil } if in.Lifecycle != nil { out.Lifecycle = new(api.Lifecycle) if err := convert_v1_Lifecycle_To_api_Lifecycle(in.Lifecycle, out.Lifecycle, s); err != nil { return err } } else { out.Lifecycle = nil } out.TerminationMessagePath = in.TerminationMessagePath out.ImagePullPolicy = api.PullPolicy(in.ImagePullPolicy) if in.SecurityContext != nil { out.SecurityContext = new(api.SecurityContext) if err := convert_v1_SecurityContext_To_api_SecurityContext(in.SecurityContext, out.SecurityContext, s); err != nil { return err } } else { out.SecurityContext = nil } out.Stdin = in.Stdin out.TTY = in.TTY return nil } func convert_v1_ContainerPort_To_api_ContainerPort(in *ContainerPort, out *api.ContainerPort, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ContainerPort))(in) } out.Name = in.Name out.HostPort = in.HostPort out.ContainerPort = in.ContainerPort out.Protocol = api.Protocol(in.Protocol) out.HostIP = in.HostIP return nil } func convert_v1_ContainerState_To_api_ContainerState(in *ContainerState, out *api.ContainerState, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ContainerState))(in) } if in.Waiting != nil { out.Waiting = new(api.ContainerStateWaiting) if err := convert_v1_ContainerStateWaiting_To_api_ContainerStateWaiting(in.Waiting, out.Waiting, s); err != nil { return err } } else { out.Waiting = nil } if in.Running != nil { out.Running = new(api.ContainerStateRunning) if err := convert_v1_ContainerStateRunning_To_api_ContainerStateRunning(in.Running, out.Running, s); err != nil { return err } } else { out.Running = nil } if in.Terminated != nil { out.Terminated = new(api.ContainerStateTerminated) if err := convert_v1_ContainerStateTerminated_To_api_ContainerStateTerminated(in.Terminated, out.Terminated, s); err != nil { return err } } else { out.Terminated = nil } return nil } func convert_v1_ContainerStateRunning_To_api_ContainerStateRunning(in *ContainerStateRunning, out *api.ContainerStateRunning, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ContainerStateRunning))(in) } if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil { return err } return nil } func convert_v1_ContainerStateTerminated_To_api_ContainerStateTerminated(in *ContainerStateTerminated, out *api.ContainerStateTerminated, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ContainerStateTerminated))(in) } out.ExitCode = in.ExitCode out.Signal = in.Signal out.Reason = in.Reason out.Message = in.Message if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil { return err } if err := s.Convert(&in.FinishedAt, &out.FinishedAt, 0); err != nil { return err } out.ContainerID = in.ContainerID return nil } func convert_v1_ContainerStateWaiting_To_api_ContainerStateWaiting(in *ContainerStateWaiting, out *api.ContainerStateWaiting, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ContainerStateWaiting))(in) } out.Reason = in.Reason return nil } func convert_v1_ContainerStatus_To_api_ContainerStatus(in *ContainerStatus, out *api.ContainerStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ContainerStatus))(in) } out.Name = in.Name if err := convert_v1_ContainerState_To_api_ContainerState(&in.State, &out.State, s); err != nil { return err } if err := convert_v1_ContainerState_To_api_ContainerState(&in.LastTerminationState, &out.LastTerminationState, s); err != nil { return err } out.Ready = in.Ready out.RestartCount = in.RestartCount out.Image = in.Image out.ImageID = in.ImageID out.ContainerID = in.ContainerID return nil } func convert_v1_Daemon_To_api_Daemon(in *Daemon, out *api.Daemon, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Daemon))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_DaemonSpec_To_api_DaemonSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_v1_DaemonStatus_To_api_DaemonStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_v1_DaemonList_To_api_DaemonList(in *DaemonList, out *api.DaemonList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*DaemonList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.Daemon, len(in.Items)) for i := range in.Items { if err := convert_v1_Daemon_To_api_Daemon(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_DaemonSpec_To_api_DaemonSpec(in *DaemonSpec, out *api.DaemonSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*DaemonSpec))(in) } if in.Selector != nil { out.Selector = make(map[string]string) for key, val := range in.Selector { out.Selector[key] = val } } else { out.Selector = nil } if in.Template != nil { out.Template = new(api.PodTemplateSpec) if err := convert_v1_PodTemplateSpec_To_api_PodTemplateSpec(in.Template, out.Template, s); err != nil { return err } } else { out.Template = nil } return nil } func convert_v1_DaemonStatus_To_api_DaemonStatus(in *DaemonStatus, out *api.DaemonStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*DaemonStatus))(in) } out.CurrentNumberScheduled = in.CurrentNumberScheduled out.NumberMisscheduled = in.NumberMisscheduled out.DesiredNumberScheduled = in.DesiredNumberScheduled return nil } func convert_v1_DeleteOptions_To_api_DeleteOptions(in *DeleteOptions, out *api.DeleteOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*DeleteOptions))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if in.GracePeriodSeconds != nil { out.GracePeriodSeconds = new(int64) *out.GracePeriodSeconds = *in.GracePeriodSeconds } else { out.GracePeriodSeconds = nil } return nil } func convert_v1_EmptyDirVolumeSource_To_api_EmptyDirVolumeSource(in *EmptyDirVolumeSource, out *api.EmptyDirVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*EmptyDirVolumeSource))(in) } out.Medium = api.StorageMedium(in.Medium) return nil } func convert_v1_EndpointAddress_To_api_EndpointAddress(in *EndpointAddress, out *api.EndpointAddress, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*EndpointAddress))(in) } out.IP = in.IP if in.TargetRef != nil { out.TargetRef = new(api.ObjectReference) if err := convert_v1_ObjectReference_To_api_ObjectReference(in.TargetRef, out.TargetRef, s); err != nil { return err } } else { out.TargetRef = nil } return nil } func convert_v1_EndpointPort_To_api_EndpointPort(in *EndpointPort, out *api.EndpointPort, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*EndpointPort))(in) } out.Name = in.Name out.Port = in.Port out.Protocol = api.Protocol(in.Protocol) return nil } func convert_v1_EndpointSubset_To_api_EndpointSubset(in *EndpointSubset, out *api.EndpointSubset, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*EndpointSubset))(in) } if in.Addresses != nil { out.Addresses = make([]api.EndpointAddress, len(in.Addresses)) for i := range in.Addresses { if err := convert_v1_EndpointAddress_To_api_EndpointAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil { return err } } } else { out.Addresses = nil } if in.Ports != nil { out.Ports = make([]api.EndpointPort, len(in.Ports)) for i := range in.Ports { if err := convert_v1_EndpointPort_To_api_EndpointPort(&in.Ports[i], &out.Ports[i], s); err != nil { return err } } } else { out.Ports = nil } return nil } func convert_v1_Endpoints_To_api_Endpoints(in *Endpoints, out *api.Endpoints, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Endpoints))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if in.Subsets != nil { out.Subsets = make([]api.EndpointSubset, len(in.Subsets)) for i := range in.Subsets { if err := convert_v1_EndpointSubset_To_api_EndpointSubset(&in.Subsets[i], &out.Subsets[i], s); err != nil { return err } } } else { out.Subsets = nil } return nil } func convert_v1_EndpointsList_To_api_EndpointsList(in *EndpointsList, out *api.EndpointsList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*EndpointsList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.Endpoints, len(in.Items)) for i := range in.Items { if err := convert_v1_Endpoints_To_api_Endpoints(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_EnvVar_To_api_EnvVar(in *EnvVar, out *api.EnvVar, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*EnvVar))(in) } out.Name = in.Name out.Value = in.Value if in.ValueFrom != nil { out.ValueFrom = new(api.EnvVarSource) if err := convert_v1_EnvVarSource_To_api_EnvVarSource(in.ValueFrom, out.ValueFrom, s); err != nil { return err } } else { out.ValueFrom = nil } return nil } func convert_v1_EnvVarSource_To_api_EnvVarSource(in *EnvVarSource, out *api.EnvVarSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*EnvVarSource))(in) } if in.FieldRef != nil { out.FieldRef = new(api.ObjectFieldSelector) if err := convert_v1_ObjectFieldSelector_To_api_ObjectFieldSelector(in.FieldRef, out.FieldRef, s); err != nil { return err } } else { out.FieldRef = nil } return nil } func convert_v1_Event_To_api_Event(in *Event, out *api.Event, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Event))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.InvolvedObject, &out.InvolvedObject, s); err != nil { return err } out.Reason = in.Reason out.Message = in.Message if err := convert_v1_EventSource_To_api_EventSource(&in.Source, &out.Source, s); err != nil { return err } if err := s.Convert(&in.FirstTimestamp, &out.FirstTimestamp, 0); err != nil { return err } if err := s.Convert(&in.LastTimestamp, &out.LastTimestamp, 0); err != nil { return err } out.Count = in.Count return nil } func convert_v1_EventList_To_api_EventList(in *EventList, out *api.EventList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*EventList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.Event, len(in.Items)) for i := range in.Items { if err := convert_v1_Event_To_api_Event(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_EventSource_To_api_EventSource(in *EventSource, out *api.EventSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*EventSource))(in) } out.Component = in.Component out.Host = in.Host return nil } func convert_v1_ExecAction_To_api_ExecAction(in *ExecAction, out *api.ExecAction, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ExecAction))(in) } if in.Command != nil { out.Command = make([]string, len(in.Command)) for i := range in.Command { out.Command[i] = in.Command[i] } } else { out.Command = nil } return nil } func convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource(in *GCEPersistentDiskVolumeSource, out *api.GCEPersistentDiskVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*GCEPersistentDiskVolumeSource))(in) } out.PDName = in.PDName out.FSType = in.FSType out.Partition = in.Partition out.ReadOnly = in.ReadOnly return nil } func convert_v1_GitRepoVolumeSource_To_api_GitRepoVolumeSource(in *GitRepoVolumeSource, out *api.GitRepoVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*GitRepoVolumeSource))(in) } out.Repository = in.Repository out.Revision = in.Revision return nil } func convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource(in *GlusterfsVolumeSource, out *api.GlusterfsVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*GlusterfsVolumeSource))(in) } out.EndpointsName = in.EndpointsName out.Path = in.Path out.ReadOnly = in.ReadOnly return nil } func convert_v1_HTTPGetAction_To_api_HTTPGetAction(in *HTTPGetAction, out *api.HTTPGetAction, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*HTTPGetAction))(in) } out.Path = in.Path if err := s.Convert(&in.Port, &out.Port, 0); err != nil { return err } out.Host = in.Host out.Scheme = api.URIScheme(in.Scheme) return nil } func convert_v1_Handler_To_api_Handler(in *Handler, out *api.Handler, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Handler))(in) } if in.Exec != nil { out.Exec = new(api.ExecAction) if err := convert_v1_ExecAction_To_api_ExecAction(in.Exec, out.Exec, s); err != nil { return err } } else { out.Exec = nil } if in.HTTPGet != nil { out.HTTPGet = new(api.HTTPGetAction) if err := convert_v1_HTTPGetAction_To_api_HTTPGetAction(in.HTTPGet, out.HTTPGet, s); err != nil { return err } } else { out.HTTPGet = nil } if in.TCPSocket != nil { out.TCPSocket = new(api.TCPSocketAction) if err := convert_v1_TCPSocketAction_To_api_TCPSocketAction(in.TCPSocket, out.TCPSocket, s); err != nil { return err } } else { out.TCPSocket = nil } return nil } func convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource(in *HostPathVolumeSource, out *api.HostPathVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*HostPathVolumeSource))(in) } out.Path = in.Path return nil } func convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource(in *ISCSIVolumeSource, out *api.ISCSIVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ISCSIVolumeSource))(in) } out.TargetPortal = in.TargetPortal out.IQN = in.IQN out.Lun = in.Lun out.FSType = in.FSType out.ReadOnly = in.ReadOnly return nil } func convert_v1_Lifecycle_To_api_Lifecycle(in *Lifecycle, out *api.Lifecycle, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Lifecycle))(in) } if in.PostStart != nil { out.PostStart = new(api.Handler) if err := convert_v1_Handler_To_api_Handler(in.PostStart, out.PostStart, s); err != nil { return err } } else { out.PostStart = nil } if in.PreStop != nil { out.PreStop = new(api.Handler) if err := convert_v1_Handler_To_api_Handler(in.PreStop, out.PreStop, s); err != nil { return err } } else { out.PreStop = nil } return nil } func convert_v1_LimitRange_To_api_LimitRange(in *LimitRange, out *api.LimitRange, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*LimitRange))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_LimitRangeSpec_To_api_LimitRangeSpec(&in.Spec, &out.Spec, s); err != nil { return err } return nil } func convert_v1_LimitRangeItem_To_api_LimitRangeItem(in *LimitRangeItem, out *api.LimitRangeItem, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*LimitRangeItem))(in) } out.Type = api.LimitType(in.Type) if in.Max != nil { out.Max = make(api.ResourceList) for key, val := range in.Max { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Max[api.ResourceName(key)] = newVal } } else { out.Max = nil } if in.Min != nil { out.Min = make(api.ResourceList) for key, val := range in.Min { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Min[api.ResourceName(key)] = newVal } } else { out.Min = nil } if in.Default != nil { out.Default = make(api.ResourceList) for key, val := range in.Default { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Default[api.ResourceName(key)] = newVal } } else { out.Default = nil } return nil } func convert_v1_LimitRangeList_To_api_LimitRangeList(in *LimitRangeList, out *api.LimitRangeList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*LimitRangeList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.LimitRange, len(in.Items)) for i := range in.Items { if err := convert_v1_LimitRange_To_api_LimitRange(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_LimitRangeSpec_To_api_LimitRangeSpec(in *LimitRangeSpec, out *api.LimitRangeSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*LimitRangeSpec))(in) } if in.Limits != nil { out.Limits = make([]api.LimitRangeItem, len(in.Limits)) for i := range in.Limits { if err := convert_v1_LimitRangeItem_To_api_LimitRangeItem(&in.Limits[i], &out.Limits[i], s); err != nil { return err } } } else { out.Limits = nil } return nil } func convert_v1_List_To_api_List(in *List, out *api.List, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*List))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if err := s.Convert(&in.Items, &out.Items, 0); err != nil { return err } return nil } func convert_v1_ListMeta_To_api_ListMeta(in *ListMeta, out *api.ListMeta, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ListMeta))(in) } out.SelfLink = in.SelfLink out.ResourceVersion = in.ResourceVersion return nil } func convert_v1_ListOptions_To_api_ListOptions(in *ListOptions, out *api.ListOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ListOptions))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := s.Convert(&in.LabelSelector, &out.LabelSelector, 0); err != nil { return err } if err := s.Convert(&in.FieldSelector, &out.FieldSelector, 0); err != nil { return err } out.Watch = in.Watch out.ResourceVersion = in.ResourceVersion return nil } func convert_v1_LoadBalancerIngress_To_api_LoadBalancerIngress(in *LoadBalancerIngress, out *api.LoadBalancerIngress, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*LoadBalancerIngress))(in) } out.IP = in.IP out.Hostname = in.Hostname return nil } func convert_v1_LoadBalancerStatus_To_api_LoadBalancerStatus(in *LoadBalancerStatus, out *api.LoadBalancerStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*LoadBalancerStatus))(in) } if in.Ingress != nil { out.Ingress = make([]api.LoadBalancerIngress, len(in.Ingress)) for i := range in.Ingress { if err := convert_v1_LoadBalancerIngress_To_api_LoadBalancerIngress(&in.Ingress[i], &out.Ingress[i], s); err != nil { return err } } } else { out.Ingress = nil } return nil } func convert_v1_LocalObjectReference_To_api_LocalObjectReference(in *LocalObjectReference, out *api.LocalObjectReference, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*LocalObjectReference))(in) } out.Name = in.Name return nil } func convert_v1_NFSVolumeSource_To_api_NFSVolumeSource(in *NFSVolumeSource, out *api.NFSVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*NFSVolumeSource))(in) } out.Server = in.Server out.Path = in.Path out.ReadOnly = in.ReadOnly return nil } func convert_v1_Namespace_To_api_Namespace(in *Namespace, out *api.Namespace, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Namespace))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_NamespaceSpec_To_api_NamespaceSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_v1_NamespaceStatus_To_api_NamespaceStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_v1_NamespaceList_To_api_NamespaceList(in *NamespaceList, out *api.NamespaceList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*NamespaceList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.Namespace, len(in.Items)) for i := range in.Items { if err := convert_v1_Namespace_To_api_Namespace(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_NamespaceSpec_To_api_NamespaceSpec(in *NamespaceSpec, out *api.NamespaceSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*NamespaceSpec))(in) } if in.Finalizers != nil { out.Finalizers = make([]api.FinalizerName, len(in.Finalizers)) for i := range in.Finalizers { out.Finalizers[i] = api.FinalizerName(in.Finalizers[i]) } } else { out.Finalizers = nil } return nil } func convert_v1_NamespaceStatus_To_api_NamespaceStatus(in *NamespaceStatus, out *api.NamespaceStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*NamespaceStatus))(in) } out.Phase = api.NamespacePhase(in.Phase) return nil } func convert_v1_Node_To_api_Node(in *Node, out *api.Node, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Node))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_NodeSpec_To_api_NodeSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_v1_NodeStatus_To_api_NodeStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_v1_NodeAddress_To_api_NodeAddress(in *NodeAddress, out *api.NodeAddress, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*NodeAddress))(in) } out.Type = api.NodeAddressType(in.Type) out.Address = in.Address return nil } func convert_v1_NodeCondition_To_api_NodeCondition(in *NodeCondition, out *api.NodeCondition, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*NodeCondition))(in) } out.Type = api.NodeConditionType(in.Type) out.Status = api.ConditionStatus(in.Status) if err := s.Convert(&in.LastHeartbeatTime, &out.LastHeartbeatTime, 0); err != nil { return err } if err := s.Convert(&in.LastTransitionTime, &out.LastTransitionTime, 0); err != nil { return err } out.Reason = in.Reason out.Message = in.Message return nil } func convert_v1_NodeList_To_api_NodeList(in *NodeList, out *api.NodeList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*NodeList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.Node, len(in.Items)) for i := range in.Items { if err := convert_v1_Node_To_api_Node(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_NodeSpec_To_api_NodeSpec(in *NodeSpec, out *api.NodeSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*NodeSpec))(in) } out.PodCIDR = in.PodCIDR out.ExternalID = in.ExternalID out.ProviderID = in.ProviderID out.Unschedulable = in.Unschedulable return nil } func convert_v1_NodeStatus_To_api_NodeStatus(in *NodeStatus, out *api.NodeStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*NodeStatus))(in) } if in.Capacity != nil { out.Capacity = make(api.ResourceList) for key, val := range in.Capacity { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Capacity[api.ResourceName(key)] = newVal } } else { out.Capacity = nil } out.Phase = api.NodePhase(in.Phase) if in.Conditions != nil { out.Conditions = make([]api.NodeCondition, len(in.Conditions)) for i := range in.Conditions { if err := convert_v1_NodeCondition_To_api_NodeCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil { return err } } } else { out.Conditions = nil } if in.Addresses != nil { out.Addresses = make([]api.NodeAddress, len(in.Addresses)) for i := range in.Addresses { if err := convert_v1_NodeAddress_To_api_NodeAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil { return err } } } else { out.Addresses = nil } if err := convert_v1_NodeSystemInfo_To_api_NodeSystemInfo(&in.NodeInfo, &out.NodeInfo, s); err != nil { return err } return nil } func convert_v1_NodeSystemInfo_To_api_NodeSystemInfo(in *NodeSystemInfo, out *api.NodeSystemInfo, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*NodeSystemInfo))(in) } out.MachineID = in.MachineID out.SystemUUID = in.SystemUUID out.BootID = in.BootID out.KernelVersion = in.KernelVersion out.OsImage = in.OsImage out.ContainerRuntimeVersion = in.ContainerRuntimeVersion out.KubeletVersion = in.KubeletVersion out.KubeProxyVersion = in.KubeProxyVersion return nil } func convert_v1_ObjectFieldSelector_To_api_ObjectFieldSelector(in *ObjectFieldSelector, out *api.ObjectFieldSelector, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ObjectFieldSelector))(in) } out.APIVersion = in.APIVersion out.FieldPath = in.FieldPath return nil } func convert_v1_ObjectMeta_To_api_ObjectMeta(in *ObjectMeta, out *api.ObjectMeta, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ObjectMeta))(in) } out.Name = in.Name out.GenerateName = in.GenerateName out.Namespace = in.Namespace out.SelfLink = in.SelfLink out.UID = in.UID out.ResourceVersion = in.ResourceVersion out.Generation = in.Generation if err := s.Convert(&in.CreationTimestamp, &out.CreationTimestamp, 0); err != nil { return err } if in.DeletionTimestamp != nil { if err := s.Convert(&in.DeletionTimestamp, &out.DeletionTimestamp, 0); err != nil { return err } } else { out.DeletionTimestamp = nil } if in.DeletionGracePeriodSeconds != nil { out.DeletionGracePeriodSeconds = new(int64) *out.DeletionGracePeriodSeconds = *in.DeletionGracePeriodSeconds } else { out.DeletionGracePeriodSeconds = nil } if in.Labels != nil { out.Labels = make(map[string]string) for key, val := range in.Labels { out.Labels[key] = val } } else { out.Labels = nil } if in.Annotations != nil { out.Annotations = make(map[string]string) for key, val := range in.Annotations { out.Annotations[key] = val } } else { out.Annotations = nil } return nil } func convert_v1_ObjectReference_To_api_ObjectReference(in *ObjectReference, out *api.ObjectReference, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ObjectReference))(in) } out.Kind = in.Kind out.Namespace = in.Namespace out.Name = in.Name out.UID = in.UID out.APIVersion = in.APIVersion out.ResourceVersion = in.ResourceVersion out.FieldPath = in.FieldPath return nil } func convert_v1_PersistentVolume_To_api_PersistentVolume(in *PersistentVolume, out *api.PersistentVolume, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PersistentVolume))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_PersistentVolumeSpec_To_api_PersistentVolumeSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_v1_PersistentVolumeStatus_To_api_PersistentVolumeStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_v1_PersistentVolumeClaim_To_api_PersistentVolumeClaim(in *PersistentVolumeClaim, out *api.PersistentVolumeClaim, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PersistentVolumeClaim))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_PersistentVolumeClaimSpec_To_api_PersistentVolumeClaimSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_v1_PersistentVolumeClaimStatus_To_api_PersistentVolumeClaimStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_v1_PersistentVolumeClaimList_To_api_PersistentVolumeClaimList(in *PersistentVolumeClaimList, out *api.PersistentVolumeClaimList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PersistentVolumeClaimList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.PersistentVolumeClaim, len(in.Items)) for i := range in.Items { if err := convert_v1_PersistentVolumeClaim_To_api_PersistentVolumeClaim(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_PersistentVolumeClaimSpec_To_api_PersistentVolumeClaimSpec(in *PersistentVolumeClaimSpec, out *api.PersistentVolumeClaimSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PersistentVolumeClaimSpec))(in) } if in.AccessModes != nil { out.AccessModes = make([]api.PersistentVolumeAccessMode, len(in.AccessModes)) for i := range in.AccessModes { out.AccessModes[i] = api.PersistentVolumeAccessMode(in.AccessModes[i]) } } else { out.AccessModes = nil } if err := convert_v1_ResourceRequirements_To_api_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil { return err } out.VolumeName = in.VolumeName return nil } func convert_v1_PersistentVolumeClaimStatus_To_api_PersistentVolumeClaimStatus(in *PersistentVolumeClaimStatus, out *api.PersistentVolumeClaimStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PersistentVolumeClaimStatus))(in) } out.Phase = api.PersistentVolumeClaimPhase(in.Phase) if in.AccessModes != nil { out.AccessModes = make([]api.PersistentVolumeAccessMode, len(in.AccessModes)) for i := range in.AccessModes { out.AccessModes[i] = api.PersistentVolumeAccessMode(in.AccessModes[i]) } } else { out.AccessModes = nil } if in.Capacity != nil { out.Capacity = make(api.ResourceList) for key, val := range in.Capacity { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Capacity[api.ResourceName(key)] = newVal } } else { out.Capacity = nil } return nil } func convert_v1_PersistentVolumeClaimVolumeSource_To_api_PersistentVolumeClaimVolumeSource(in *PersistentVolumeClaimVolumeSource, out *api.PersistentVolumeClaimVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PersistentVolumeClaimVolumeSource))(in) } out.ClaimName = in.ClaimName out.ReadOnly = in.ReadOnly return nil } func convert_v1_PersistentVolumeList_To_api_PersistentVolumeList(in *PersistentVolumeList, out *api.PersistentVolumeList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PersistentVolumeList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.PersistentVolume, len(in.Items)) for i := range in.Items { if err := convert_v1_PersistentVolume_To_api_PersistentVolume(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_PersistentVolumeSource_To_api_PersistentVolumeSource(in *PersistentVolumeSource, out *api.PersistentVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PersistentVolumeSource))(in) } if in.GCEPersistentDisk != nil { out.GCEPersistentDisk = new(api.GCEPersistentDiskVolumeSource) if err := convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil { return err } } else { out.GCEPersistentDisk = nil } if in.AWSElasticBlockStore != nil { out.AWSElasticBlockStore = new(api.AWSElasticBlockStoreVolumeSource) if err := convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil { return err } } else { out.AWSElasticBlockStore = nil } if in.HostPath != nil { out.HostPath = new(api.HostPathVolumeSource) if err := convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil { return err } } else { out.HostPath = nil } if in.Glusterfs != nil { out.Glusterfs = new(api.GlusterfsVolumeSource) if err := convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil { return err } } else { out.Glusterfs = nil } if in.NFS != nil { out.NFS = new(api.NFSVolumeSource) if err := convert_v1_NFSVolumeSource_To_api_NFSVolumeSource(in.NFS, out.NFS, s); err != nil { return err } } else { out.NFS = nil } if in.RBD != nil { out.RBD = new(api.RBDVolumeSource) if err := convert_v1_RBDVolumeSource_To_api_RBDVolumeSource(in.RBD, out.RBD, s); err != nil { return err } } else { out.RBD = nil } if in.ISCSI != nil { out.ISCSI = new(api.ISCSIVolumeSource) if err := convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil { return err } } else { out.ISCSI = nil } return nil } func convert_v1_PersistentVolumeSpec_To_api_PersistentVolumeSpec(in *PersistentVolumeSpec, out *api.PersistentVolumeSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PersistentVolumeSpec))(in) } if in.Capacity != nil { out.Capacity = make(api.ResourceList) for key, val := range in.Capacity { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Capacity[api.ResourceName(key)] = newVal } } else { out.Capacity = nil } if err := convert_v1_PersistentVolumeSource_To_api_PersistentVolumeSource(&in.PersistentVolumeSource, &out.PersistentVolumeSource, s); err != nil { return err } if in.AccessModes != nil { out.AccessModes = make([]api.PersistentVolumeAccessMode, len(in.AccessModes)) for i := range in.AccessModes { out.AccessModes[i] = api.PersistentVolumeAccessMode(in.AccessModes[i]) } } else { out.AccessModes = nil } if in.ClaimRef != nil { out.ClaimRef = new(api.ObjectReference) if err := convert_v1_ObjectReference_To_api_ObjectReference(in.ClaimRef, out.ClaimRef, s); err != nil { return err } } else { out.ClaimRef = nil } out.PersistentVolumeReclaimPolicy = api.PersistentVolumeReclaimPolicy(in.PersistentVolumeReclaimPolicy) return nil } func convert_v1_PersistentVolumeStatus_To_api_PersistentVolumeStatus(in *PersistentVolumeStatus, out *api.PersistentVolumeStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PersistentVolumeStatus))(in) } out.Phase = api.PersistentVolumePhase(in.Phase) out.Message = in.Message out.Reason = in.Reason return nil } func convert_v1_Pod_To_api_Pod(in *Pod, out *api.Pod, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Pod))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_PodSpec_To_api_PodSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_v1_PodStatus_To_api_PodStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_v1_PodAttachOptions_To_api_PodAttachOptions(in *PodAttachOptions, out *api.PodAttachOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodAttachOptions))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } out.Stdin = in.Stdin out.Stdout = in.Stdout out.Stderr = in.Stderr out.TTY = in.TTY out.Container = in.Container return nil } func convert_v1_PodCondition_To_api_PodCondition(in *PodCondition, out *api.PodCondition, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodCondition))(in) } out.Type = api.PodConditionType(in.Type) out.Status = api.ConditionStatus(in.Status) return nil } func convert_v1_PodExecOptions_To_api_PodExecOptions(in *PodExecOptions, out *api.PodExecOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodExecOptions))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } out.Stdin = in.Stdin out.Stdout = in.Stdout out.Stderr = in.Stderr out.TTY = in.TTY out.Container = in.Container if in.Command != nil { out.Command = make([]string, len(in.Command)) for i := range in.Command { out.Command[i] = in.Command[i] } } else { out.Command = nil } return nil } func convert_v1_PodList_To_api_PodList(in *PodList, out *api.PodList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.Pod, len(in.Items)) for i := range in.Items { if err := convert_v1_Pod_To_api_Pod(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_PodLogOptions_To_api_PodLogOptions(in *PodLogOptions, out *api.PodLogOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodLogOptions))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } out.Container = in.Container out.Follow = in.Follow out.Previous = in.Previous return nil } func convert_v1_PodProxyOptions_To_api_PodProxyOptions(in *PodProxyOptions, out *api.PodProxyOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodProxyOptions))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } out.Path = in.Path return nil } func convert_v1_PodStatus_To_api_PodStatus(in *PodStatus, out *api.PodStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodStatus))(in) } out.Phase = api.PodPhase(in.Phase) if in.Conditions != nil { out.Conditions = make([]api.PodCondition, len(in.Conditions)) for i := range in.Conditions { if err := convert_v1_PodCondition_To_api_PodCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil { return err } } } else { out.Conditions = nil } out.Message = in.Message out.Reason = in.Reason out.HostIP = in.HostIP out.PodIP = in.PodIP if in.StartTime != nil { if err := s.Convert(&in.StartTime, &out.StartTime, 0); err != nil { return err } } else { out.StartTime = nil } if in.ContainerStatuses != nil { out.ContainerStatuses = make([]api.ContainerStatus, len(in.ContainerStatuses)) for i := range in.ContainerStatuses { if err := convert_v1_ContainerStatus_To_api_ContainerStatus(&in.ContainerStatuses[i], &out.ContainerStatuses[i], s); err != nil { return err } } } else { out.ContainerStatuses = nil } return nil } func convert_v1_PodStatusResult_To_api_PodStatusResult(in *PodStatusResult, out *api.PodStatusResult, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodStatusResult))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_PodStatus_To_api_PodStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_v1_PodTemplate_To_api_PodTemplate(in *PodTemplate, out *api.PodTemplate, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodTemplate))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_PodTemplateSpec_To_api_PodTemplateSpec(&in.Template, &out.Template, s); err != nil { return err } return nil } func convert_v1_PodTemplateList_To_api_PodTemplateList(in *PodTemplateList, out *api.PodTemplateList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodTemplateList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.PodTemplate, len(in.Items)) for i := range in.Items { if err := convert_v1_PodTemplate_To_api_PodTemplate(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_PodTemplateSpec_To_api_PodTemplateSpec(in *PodTemplateSpec, out *api.PodTemplateSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*PodTemplateSpec))(in) } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_PodSpec_To_api_PodSpec(&in.Spec, &out.Spec, s); err != nil { return err } return nil } func convert_v1_Probe_To_api_Probe(in *Probe, out *api.Probe, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Probe))(in) } if err := convert_v1_Handler_To_api_Handler(&in.Handler, &out.Handler, s); err != nil { return err } out.InitialDelaySeconds = in.InitialDelaySeconds out.TimeoutSeconds = in.TimeoutSeconds return nil } func convert_v1_RBDVolumeSource_To_api_RBDVolumeSource(in *RBDVolumeSource, out *api.RBDVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*RBDVolumeSource))(in) } if in.CephMonitors != nil { out.CephMonitors = make([]string, len(in.CephMonitors)) for i := range in.CephMonitors { out.CephMonitors[i] = in.CephMonitors[i] } } else { out.CephMonitors = nil } out.RBDImage = in.RBDImage out.FSType = in.FSType out.RBDPool = in.RBDPool out.RadosUser = in.RadosUser out.Keyring = in.Keyring if in.SecretRef != nil { out.SecretRef = new(api.LocalObjectReference) if err := convert_v1_LocalObjectReference_To_api_LocalObjectReference(in.SecretRef, out.SecretRef, s); err != nil { return err } } else { out.SecretRef = nil } out.ReadOnly = in.ReadOnly return nil } func convert_v1_RangeAllocation_To_api_RangeAllocation(in *RangeAllocation, out *api.RangeAllocation, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*RangeAllocation))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } out.Range = in.Range if err := s.Convert(&in.Data, &out.Data, 0); err != nil { return err } return nil } func convert_v1_ReplicationController_To_api_ReplicationController(in *ReplicationController, out *api.ReplicationController, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ReplicationController))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_ReplicationControllerSpec_To_api_ReplicationControllerSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_v1_ReplicationControllerStatus_To_api_ReplicationControllerStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_v1_ReplicationControllerList_To_api_ReplicationControllerList(in *ReplicationControllerList, out *api.ReplicationControllerList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ReplicationControllerList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.ReplicationController, len(in.Items)) for i := range in.Items { if err := convert_v1_ReplicationController_To_api_ReplicationController(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_ReplicationControllerStatus_To_api_ReplicationControllerStatus(in *ReplicationControllerStatus, out *api.ReplicationControllerStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ReplicationControllerStatus))(in) } out.Replicas = in.Replicas out.ObservedGeneration = in.ObservedGeneration return nil } func convert_v1_ResourceQuota_To_api_ResourceQuota(in *ResourceQuota, out *api.ResourceQuota, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ResourceQuota))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_ResourceQuotaSpec_To_api_ResourceQuotaSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_v1_ResourceQuotaStatus_To_api_ResourceQuotaStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_v1_ResourceQuotaList_To_api_ResourceQuotaList(in *ResourceQuotaList, out *api.ResourceQuotaList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ResourceQuotaList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.ResourceQuota, len(in.Items)) for i := range in.Items { if err := convert_v1_ResourceQuota_To_api_ResourceQuota(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_ResourceQuotaSpec_To_api_ResourceQuotaSpec(in *ResourceQuotaSpec, out *api.ResourceQuotaSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ResourceQuotaSpec))(in) } if in.Hard != nil { out.Hard = make(api.ResourceList) for key, val := range in.Hard { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Hard[api.ResourceName(key)] = newVal } } else { out.Hard = nil } return nil } func convert_v1_ResourceQuotaStatus_To_api_ResourceQuotaStatus(in *ResourceQuotaStatus, out *api.ResourceQuotaStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ResourceQuotaStatus))(in) } if in.Hard != nil { out.Hard = make(api.ResourceList) for key, val := range in.Hard { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Hard[api.ResourceName(key)] = newVal } } else { out.Hard = nil } if in.Used != nil { out.Used = make(api.ResourceList) for key, val := range in.Used { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Used[api.ResourceName(key)] = newVal } } else { out.Used = nil } return nil } func convert_v1_ResourceRequirements_To_api_ResourceRequirements(in *ResourceRequirements, out *api.ResourceRequirements, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ResourceRequirements))(in) } if in.Limits != nil { out.Limits = make(api.ResourceList) for key, val := range in.Limits { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Limits[api.ResourceName(key)] = newVal } } else { out.Limits = nil } if in.Requests != nil { out.Requests = make(api.ResourceList) for key, val := range in.Requests { newVal := resource.Quantity{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Requests[api.ResourceName(key)] = newVal } } else { out.Requests = nil } return nil } func convert_v1_SELinuxOptions_To_api_SELinuxOptions(in *SELinuxOptions, out *api.SELinuxOptions, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*SELinuxOptions))(in) } out.User = in.User out.Role = in.Role out.Type = in.Type out.Level = in.Level return nil } func convert_v1_Secret_To_api_Secret(in *Secret, out *api.Secret, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Secret))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if in.Data != nil { out.Data = make(map[string][]uint8) for key, val := range in.Data { newVal := []uint8{} if err := s.Convert(&val, &newVal, 0); err != nil { return err } out.Data[key] = newVal } } else { out.Data = nil } out.Type = api.SecretType(in.Type) return nil } func convert_v1_SecretList_To_api_SecretList(in *SecretList, out *api.SecretList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*SecretList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.Secret, len(in.Items)) for i := range in.Items { if err := convert_v1_Secret_To_api_Secret(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_SecretVolumeSource_To_api_SecretVolumeSource(in *SecretVolumeSource, out *api.SecretVolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*SecretVolumeSource))(in) } out.SecretName = in.SecretName return nil } func convert_v1_SecurityContext_To_api_SecurityContext(in *SecurityContext, out *api.SecurityContext, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*SecurityContext))(in) } if in.Capabilities != nil { out.Capabilities = new(api.Capabilities) if err := convert_v1_Capabilities_To_api_Capabilities(in.Capabilities, out.Capabilities, s); err != nil { return err } } else { out.Capabilities = nil } if in.Privileged != nil { out.Privileged = new(bool) *out.Privileged = *in.Privileged } else { out.Privileged = nil } if in.SELinuxOptions != nil { out.SELinuxOptions = new(api.SELinuxOptions) if err := convert_v1_SELinuxOptions_To_api_SELinuxOptions(in.SELinuxOptions, out.SELinuxOptions, s); err != nil { return err } } else { out.SELinuxOptions = nil } if in.RunAsUser != nil { out.RunAsUser = new(int64) *out.RunAsUser = *in.RunAsUser } else { out.RunAsUser = nil } out.RunAsNonRoot = in.RunAsNonRoot return nil } func convert_v1_SerializedReference_To_api_SerializedReference(in *SerializedReference, out *api.SerializedReference, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*SerializedReference))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.Reference, &out.Reference, s); err != nil { return err } return nil } func convert_v1_Service_To_api_Service(in *Service, out *api.Service, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Service))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := convert_v1_ServiceSpec_To_api_ServiceSpec(&in.Spec, &out.Spec, s); err != nil { return err } if err := convert_v1_ServiceStatus_To_api_ServiceStatus(&in.Status, &out.Status, s); err != nil { return err } return nil } func convert_v1_ServiceAccount_To_api_ServiceAccount(in *ServiceAccount, out *api.ServiceAccount, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ServiceAccount))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if in.Secrets != nil { out.Secrets = make([]api.ObjectReference, len(in.Secrets)) for i := range in.Secrets { if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.Secrets[i], &out.Secrets[i], s); err != nil { return err } } } else { out.Secrets = nil } if in.ImagePullSecrets != nil { out.ImagePullSecrets = make([]api.LocalObjectReference, len(in.ImagePullSecrets)) for i := range in.ImagePullSecrets { if err := convert_v1_LocalObjectReference_To_api_LocalObjectReference(&in.ImagePullSecrets[i], &out.ImagePullSecrets[i], s); err != nil { return err } } } else { out.ImagePullSecrets = nil } return nil } func convert_v1_ServiceAccountList_To_api_ServiceAccountList(in *ServiceAccountList, out *api.ServiceAccountList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ServiceAccountList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.ServiceAccount, len(in.Items)) for i := range in.Items { if err := convert_v1_ServiceAccount_To_api_ServiceAccount(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_ServiceList_To_api_ServiceList(in *ServiceList, out *api.ServiceList, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ServiceList))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } if in.Items != nil { out.Items = make([]api.Service, len(in.Items)) for i := range in.Items { if err := convert_v1_Service_To_api_Service(&in.Items[i], &out.Items[i], s); err != nil { return err } } } else { out.Items = nil } return nil } func convert_v1_ServicePort_To_api_ServicePort(in *ServicePort, out *api.ServicePort, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ServicePort))(in) } out.Name = in.Name out.Protocol = api.Protocol(in.Protocol) out.Port = in.Port if err := s.Convert(&in.TargetPort, &out.TargetPort, 0); err != nil { return err } out.NodePort = in.NodePort return nil } func convert_v1_ServiceSpec_To_api_ServiceSpec(in *ServiceSpec, out *api.ServiceSpec, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ServiceSpec))(in) } if in.Ports != nil { out.Ports = make([]api.ServicePort, len(in.Ports)) for i := range in.Ports { if err := convert_v1_ServicePort_To_api_ServicePort(&in.Ports[i], &out.Ports[i], s); err != nil { return err } } } else { out.Ports = nil } if in.Selector != nil { out.Selector = make(map[string]string) for key, val := range in.Selector { out.Selector[key] = val } } else { out.Selector = nil } out.ClusterIP = in.ClusterIP out.Type = api.ServiceType(in.Type) if in.ExternalIPs != nil { out.ExternalIPs = make([]string, len(in.ExternalIPs)) for i := range in.ExternalIPs { out.ExternalIPs[i] = in.ExternalIPs[i] } } else { out.ExternalIPs = nil } out.SessionAffinity = api.ServiceAffinity(in.SessionAffinity) return nil } func convert_v1_ServiceStatus_To_api_ServiceStatus(in *ServiceStatus, out *api.ServiceStatus, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ServiceStatus))(in) } if err := convert_v1_LoadBalancerStatus_To_api_LoadBalancerStatus(&in.LoadBalancer, &out.LoadBalancer, s); err != nil { return err } return nil } func convert_v1_Status_To_api_Status(in *Status, out *api.Status, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Status))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil { return err } out.Status = in.Status out.Message = in.Message out.Reason = api.StatusReason(in.Reason) if in.Details != nil { out.Details = new(api.StatusDetails) if err := convert_v1_StatusDetails_To_api_StatusDetails(in.Details, out.Details, s); err != nil { return err } } else { out.Details = nil } out.Code = in.Code return nil } func convert_v1_StatusCause_To_api_StatusCause(in *StatusCause, out *api.StatusCause, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*StatusCause))(in) } out.Type = api.CauseType(in.Type) out.Message = in.Message out.Field = in.Field return nil } func convert_v1_StatusDetails_To_api_StatusDetails(in *StatusDetails, out *api.StatusDetails, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*StatusDetails))(in) } out.Name = in.Name out.Kind = in.Kind if in.Causes != nil { out.Causes = make([]api.StatusCause, len(in.Causes)) for i := range in.Causes { if err := convert_v1_StatusCause_To_api_StatusCause(&in.Causes[i], &out.Causes[i], s); err != nil { return err } } } else { out.Causes = nil } out.RetryAfterSeconds = in.RetryAfterSeconds return nil } func convert_v1_TCPSocketAction_To_api_TCPSocketAction(in *TCPSocketAction, out *api.TCPSocketAction, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*TCPSocketAction))(in) } if err := s.Convert(&in.Port, &out.Port, 0); err != nil { return err } return nil } func convert_v1_ThirdPartyResourceData_To_api_ThirdPartyResourceData(in *ThirdPartyResourceData, out *api.ThirdPartyResourceData, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*ThirdPartyResourceData))(in) } if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil { return err } if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil { return err } if err := s.Convert(&in.Data, &out.Data, 0); err != nil { return err } return nil } func convert_v1_TypeMeta_To_api_TypeMeta(in *TypeMeta, out *api.TypeMeta, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*TypeMeta))(in) } out.Kind = in.Kind out.APIVersion = in.APIVersion return nil } func convert_v1_Volume_To_api_Volume(in *Volume, out *api.Volume, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*Volume))(in) } out.Name = in.Name if err := convert_v1_VolumeSource_To_api_VolumeSource(&in.VolumeSource, &out.VolumeSource, s); err != nil { return err } return nil } func convert_v1_VolumeMount_To_api_VolumeMount(in *VolumeMount, out *api.VolumeMount, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*VolumeMount))(in) } out.Name = in.Name out.ReadOnly = in.ReadOnly out.MountPath = in.MountPath return nil } func convert_v1_VolumeSource_To_api_VolumeSource(in *VolumeSource, out *api.VolumeSource, s conversion.Scope) error { if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found { defaulting.(func(*VolumeSource))(in) } if in.HostPath != nil { out.HostPath = new(api.HostPathVolumeSource) if err := convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil { return err } } else { out.HostPath = nil } if in.EmptyDir != nil { out.EmptyDir = new(api.EmptyDirVolumeSource) if err := convert_v1_EmptyDirVolumeSource_To_api_EmptyDirVolumeSource(in.EmptyDir, out.EmptyDir, s); err != nil { return err } } else { out.EmptyDir = nil } if in.GCEPersistentDisk != nil { out.GCEPersistentDisk = new(api.GCEPersistentDiskVolumeSource) if err := convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil { return err } } else { out.GCEPersistentDisk = nil } if in.AWSElasticBlockStore != nil { out.AWSElasticBlockStore = new(api.AWSElasticBlockStoreVolumeSource) if err := convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil { return err } } else { out.AWSElasticBlockStore = nil } if in.GitRepo != nil { out.GitRepo = new(api.GitRepoVolumeSource) if err := convert_v1_GitRepoVolumeSource_To_api_GitRepoVolumeSource(in.GitRepo, out.GitRepo, s); err != nil { return err } } else { out.GitRepo = nil } if in.Secret != nil { out.Secret = new(api.SecretVolumeSource) if err := convert_v1_SecretVolumeSource_To_api_SecretVolumeSource(in.Secret, out.Secret, s); err != nil { return err } } else { out.Secret = nil } if in.NFS != nil { out.NFS = new(api.NFSVolumeSource) if err := convert_v1_NFSVolumeSource_To_api_NFSVolumeSource(in.NFS, out.NFS, s); err != nil { return err } } else { out.NFS = nil } if in.ISCSI != nil { out.ISCSI = new(api.ISCSIVolumeSource) if err := convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil { return err } } else { out.ISCSI = nil } if in.Glusterfs != nil { out.Glusterfs = new(api.GlusterfsVolumeSource) if err := convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil { return err } } else { out.Glusterfs = nil } if in.PersistentVolumeClaim != nil { out.PersistentVolumeClaim = new(api.PersistentVolumeClaimVolumeSource) if err := convert_v1_PersistentVolumeClaimVolumeSource_To_api_PersistentVolumeClaimVolumeSource(in.PersistentVolumeClaim, out.PersistentVolumeClaim, s); err != nil { return err } } else { out.PersistentVolumeClaim = nil } if in.RBD != nil { out.RBD = new(api.RBDVolumeSource) if err := convert_v1_RBDVolumeSource_To_api_RBDVolumeSource(in.RBD, out.RBD, s); err != nil { return err } } else { out.RBD = nil } return nil } func init() { err := api.Scheme.AddGeneratedConversionFuncs( convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource, convert_api_Binding_To_v1_Binding, convert_api_Capabilities_To_v1_Capabilities, convert_api_ComponentCondition_To_v1_ComponentCondition, convert_api_ComponentStatusList_To_v1_ComponentStatusList, convert_api_ComponentStatus_To_v1_ComponentStatus, convert_api_ContainerPort_To_v1_ContainerPort, convert_api_ContainerStateRunning_To_v1_ContainerStateRunning, convert_api_ContainerStateTerminated_To_v1_ContainerStateTerminated, convert_api_ContainerStateWaiting_To_v1_ContainerStateWaiting, convert_api_ContainerState_To_v1_ContainerState, convert_api_ContainerStatus_To_v1_ContainerStatus, convert_api_Container_To_v1_Container, convert_api_DaemonList_To_v1_DaemonList, convert_api_DaemonSpec_To_v1_DaemonSpec, convert_api_DaemonStatus_To_v1_DaemonStatus, convert_api_Daemon_To_v1_Daemon, convert_api_DeleteOptions_To_v1_DeleteOptions, convert_api_EmptyDirVolumeSource_To_v1_EmptyDirVolumeSource, convert_api_EndpointAddress_To_v1_EndpointAddress, convert_api_EndpointPort_To_v1_EndpointPort, convert_api_EndpointSubset_To_v1_EndpointSubset, convert_api_EndpointsList_To_v1_EndpointsList, convert_api_Endpoints_To_v1_Endpoints, convert_api_EnvVarSource_To_v1_EnvVarSource, convert_api_EnvVar_To_v1_EnvVar, convert_api_EventList_To_v1_EventList, convert_api_EventSource_To_v1_EventSource, convert_api_Event_To_v1_Event, convert_api_ExecAction_To_v1_ExecAction, convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource, convert_api_GitRepoVolumeSource_To_v1_GitRepoVolumeSource, convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource, convert_api_HTTPGetAction_To_v1_HTTPGetAction, convert_api_Handler_To_v1_Handler, convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource, convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource, convert_api_Lifecycle_To_v1_Lifecycle, convert_api_LimitRangeItem_To_v1_LimitRangeItem, convert_api_LimitRangeList_To_v1_LimitRangeList, convert_api_LimitRangeSpec_To_v1_LimitRangeSpec, convert_api_LimitRange_To_v1_LimitRange, convert_api_ListMeta_To_v1_ListMeta, convert_api_ListOptions_To_v1_ListOptions, convert_api_List_To_v1_List, convert_api_LoadBalancerIngress_To_v1_LoadBalancerIngress, convert_api_LoadBalancerStatus_To_v1_LoadBalancerStatus, convert_api_LocalObjectReference_To_v1_LocalObjectReference, convert_api_NFSVolumeSource_To_v1_NFSVolumeSource, convert_api_NamespaceList_To_v1_NamespaceList, convert_api_NamespaceSpec_To_v1_NamespaceSpec, convert_api_NamespaceStatus_To_v1_NamespaceStatus, convert_api_Namespace_To_v1_Namespace, convert_api_NodeAddress_To_v1_NodeAddress, convert_api_NodeCondition_To_v1_NodeCondition, convert_api_NodeList_To_v1_NodeList, convert_api_NodeSpec_To_v1_NodeSpec, convert_api_NodeStatus_To_v1_NodeStatus, convert_api_NodeSystemInfo_To_v1_NodeSystemInfo, convert_api_Node_To_v1_Node, convert_api_ObjectFieldSelector_To_v1_ObjectFieldSelector, convert_api_ObjectMeta_To_v1_ObjectMeta, convert_api_ObjectReference_To_v1_ObjectReference, convert_api_PersistentVolumeClaimList_To_v1_PersistentVolumeClaimList, convert_api_PersistentVolumeClaimSpec_To_v1_PersistentVolumeClaimSpec, convert_api_PersistentVolumeClaimStatus_To_v1_PersistentVolumeClaimStatus, convert_api_PersistentVolumeClaimVolumeSource_To_v1_PersistentVolumeClaimVolumeSource, convert_api_PersistentVolumeClaim_To_v1_PersistentVolumeClaim, convert_api_PersistentVolumeList_To_v1_PersistentVolumeList, convert_api_PersistentVolumeSource_To_v1_PersistentVolumeSource, convert_api_PersistentVolumeSpec_To_v1_PersistentVolumeSpec, convert_api_PersistentVolumeStatus_To_v1_PersistentVolumeStatus, convert_api_PersistentVolume_To_v1_PersistentVolume, convert_api_PodAttachOptions_To_v1_PodAttachOptions, convert_api_PodCondition_To_v1_PodCondition, convert_api_PodExecOptions_To_v1_PodExecOptions, convert_api_PodList_To_v1_PodList, convert_api_PodLogOptions_To_v1_PodLogOptions, convert_api_PodProxyOptions_To_v1_PodProxyOptions, convert_api_PodStatusResult_To_v1_PodStatusResult, convert_api_PodStatus_To_v1_PodStatus, convert_api_PodTemplateList_To_v1_PodTemplateList, convert_api_PodTemplateSpec_To_v1_PodTemplateSpec, convert_api_PodTemplate_To_v1_PodTemplate, convert_api_Pod_To_v1_Pod, convert_api_Probe_To_v1_Probe, convert_api_RBDVolumeSource_To_v1_RBDVolumeSource, convert_api_RangeAllocation_To_v1_RangeAllocation, convert_api_ReplicationControllerList_To_v1_ReplicationControllerList, convert_api_ReplicationControllerStatus_To_v1_ReplicationControllerStatus, convert_api_ReplicationController_To_v1_ReplicationController, convert_api_ResourceQuotaList_To_v1_ResourceQuotaList, convert_api_ResourceQuotaSpec_To_v1_ResourceQuotaSpec, convert_api_ResourceQuotaStatus_To_v1_ResourceQuotaStatus, convert_api_ResourceQuota_To_v1_ResourceQuota, convert_api_ResourceRequirements_To_v1_ResourceRequirements, convert_api_SELinuxOptions_To_v1_SELinuxOptions, convert_api_SecretList_To_v1_SecretList, convert_api_SecretVolumeSource_To_v1_SecretVolumeSource, convert_api_Secret_To_v1_Secret, convert_api_SecurityContext_To_v1_SecurityContext, convert_api_SerializedReference_To_v1_SerializedReference, convert_api_ServiceAccountList_To_v1_ServiceAccountList, convert_api_ServiceAccount_To_v1_ServiceAccount, convert_api_ServiceList_To_v1_ServiceList, convert_api_ServicePort_To_v1_ServicePort, convert_api_ServiceSpec_To_v1_ServiceSpec, convert_api_ServiceStatus_To_v1_ServiceStatus, convert_api_Service_To_v1_Service, convert_api_StatusCause_To_v1_StatusCause, convert_api_StatusDetails_To_v1_StatusDetails, convert_api_Status_To_v1_Status, convert_api_TCPSocketAction_To_v1_TCPSocketAction, convert_api_ThirdPartyResourceData_To_v1_ThirdPartyResourceData, convert_api_TypeMeta_To_v1_TypeMeta, convert_api_VolumeMount_To_v1_VolumeMount, convert_api_VolumeSource_To_v1_VolumeSource, convert_api_Volume_To_v1_Volume, convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource, convert_v1_Binding_To_api_Binding, convert_v1_Capabilities_To_api_Capabilities, convert_v1_ComponentCondition_To_api_ComponentCondition, convert_v1_ComponentStatusList_To_api_ComponentStatusList, convert_v1_ComponentStatus_To_api_ComponentStatus, convert_v1_ContainerPort_To_api_ContainerPort, convert_v1_ContainerStateRunning_To_api_ContainerStateRunning, convert_v1_ContainerStateTerminated_To_api_ContainerStateTerminated, convert_v1_ContainerStateWaiting_To_api_ContainerStateWaiting, convert_v1_ContainerState_To_api_ContainerState, convert_v1_ContainerStatus_To_api_ContainerStatus, convert_v1_Container_To_api_Container, convert_v1_DaemonList_To_api_DaemonList, convert_v1_DaemonSpec_To_api_DaemonSpec, convert_v1_DaemonStatus_To_api_DaemonStatus, convert_v1_Daemon_To_api_Daemon, convert_v1_DeleteOptions_To_api_DeleteOptions, convert_v1_EmptyDirVolumeSource_To_api_EmptyDirVolumeSource, convert_v1_EndpointAddress_To_api_EndpointAddress, convert_v1_EndpointPort_To_api_EndpointPort, convert_v1_EndpointSubset_To_api_EndpointSubset, convert_v1_EndpointsList_To_api_EndpointsList, convert_v1_Endpoints_To_api_Endpoints, convert_v1_EnvVarSource_To_api_EnvVarSource, convert_v1_EnvVar_To_api_EnvVar, convert_v1_EventList_To_api_EventList, convert_v1_EventSource_To_api_EventSource, convert_v1_Event_To_api_Event, convert_v1_ExecAction_To_api_ExecAction, convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource, convert_v1_GitRepoVolumeSource_To_api_GitRepoVolumeSource, convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource, convert_v1_HTTPGetAction_To_api_HTTPGetAction, convert_v1_Handler_To_api_Handler, convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource, convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource, convert_v1_Lifecycle_To_api_Lifecycle, convert_v1_LimitRangeItem_To_api_LimitRangeItem, convert_v1_LimitRangeList_To_api_LimitRangeList, convert_v1_LimitRangeSpec_To_api_LimitRangeSpec, convert_v1_LimitRange_To_api_LimitRange, convert_v1_ListMeta_To_api_ListMeta, convert_v1_ListOptions_To_api_ListOptions, convert_v1_List_To_api_List, convert_v1_LoadBalancerIngress_To_api_LoadBalancerIngress, convert_v1_LoadBalancerStatus_To_api_LoadBalancerStatus, convert_v1_LocalObjectReference_To_api_LocalObjectReference, convert_v1_NFSVolumeSource_To_api_NFSVolumeSource, convert_v1_NamespaceList_To_api_NamespaceList, convert_v1_NamespaceSpec_To_api_NamespaceSpec, convert_v1_NamespaceStatus_To_api_NamespaceStatus, convert_v1_Namespace_To_api_Namespace, convert_v1_NodeAddress_To_api_NodeAddress, convert_v1_NodeCondition_To_api_NodeCondition, convert_v1_NodeList_To_api_NodeList, convert_v1_NodeSpec_To_api_NodeSpec, convert_v1_NodeStatus_To_api_NodeStatus, convert_v1_NodeSystemInfo_To_api_NodeSystemInfo, convert_v1_Node_To_api_Node, convert_v1_ObjectFieldSelector_To_api_ObjectFieldSelector, convert_v1_ObjectMeta_To_api_ObjectMeta, convert_v1_ObjectReference_To_api_ObjectReference, convert_v1_PersistentVolumeClaimList_To_api_PersistentVolumeClaimList, convert_v1_PersistentVolumeClaimSpec_To_api_PersistentVolumeClaimSpec, convert_v1_PersistentVolumeClaimStatus_To_api_PersistentVolumeClaimStatus, convert_v1_PersistentVolumeClaimVolumeSource_To_api_PersistentVolumeClaimVolumeSource, convert_v1_PersistentVolumeClaim_To_api_PersistentVolumeClaim, convert_v1_PersistentVolumeList_To_api_PersistentVolumeList, convert_v1_PersistentVolumeSource_To_api_PersistentVolumeSource, convert_v1_PersistentVolumeSpec_To_api_PersistentVolumeSpec, convert_v1_PersistentVolumeStatus_To_api_PersistentVolumeStatus, convert_v1_PersistentVolume_To_api_PersistentVolume, convert_v1_PodAttachOptions_To_api_PodAttachOptions, convert_v1_PodCondition_To_api_PodCondition, convert_v1_PodExecOptions_To_api_PodExecOptions, convert_v1_PodList_To_api_PodList, convert_v1_PodLogOptions_To_api_PodLogOptions, convert_v1_PodProxyOptions_To_api_PodProxyOptions, convert_v1_PodStatusResult_To_api_PodStatusResult, convert_v1_PodStatus_To_api_PodStatus, convert_v1_PodTemplateList_To_api_PodTemplateList, convert_v1_PodTemplateSpec_To_api_PodTemplateSpec, convert_v1_PodTemplate_To_api_PodTemplate, convert_v1_Pod_To_api_Pod, convert_v1_Probe_To_api_Probe, convert_v1_RBDVolumeSource_To_api_RBDVolumeSource, convert_v1_RangeAllocation_To_api_RangeAllocation, convert_v1_ReplicationControllerList_To_api_ReplicationControllerList, convert_v1_ReplicationControllerStatus_To_api_ReplicationControllerStatus, convert_v1_ReplicationController_To_api_ReplicationController, convert_v1_ResourceQuotaList_To_api_ResourceQuotaList, convert_v1_ResourceQuotaSpec_To_api_ResourceQuotaSpec, convert_v1_ResourceQuotaStatus_To_api_ResourceQuotaStatus, convert_v1_ResourceQuota_To_api_ResourceQuota, convert_v1_ResourceRequirements_To_api_ResourceRequirements, convert_v1_SELinuxOptions_To_api_SELinuxOptions, convert_v1_SecretList_To_api_SecretList, convert_v1_SecretVolumeSource_To_api_SecretVolumeSource, convert_v1_Secret_To_api_Secret, convert_v1_SecurityContext_To_api_SecurityContext, convert_v1_SerializedReference_To_api_SerializedReference, convert_v1_ServiceAccountList_To_api_ServiceAccountList, convert_v1_ServiceAccount_To_api_ServiceAccount, convert_v1_ServiceList_To_api_ServiceList, convert_v1_ServicePort_To_api_ServicePort, convert_v1_ServiceSpec_To_api_ServiceSpec, convert_v1_ServiceStatus_To_api_ServiceStatus, convert_v1_Service_To_api_Service, convert_v1_StatusCause_To_api_StatusCause, convert_v1_StatusDetails_To_api_StatusDetails, convert_v1_Status_To_api_Status, convert_v1_TCPSocketAction_To_api_TCPSocketAction, convert_v1_ThirdPartyResourceData_To_api_ThirdPartyResourceData, convert_v1_TypeMeta_To_api_TypeMeta, convert_v1_VolumeMount_To_api_VolumeMount, convert_v1_VolumeSource_To_api_VolumeSource, convert_v1_Volume_To_api_Volume, ) if err != nil { // If one of the conversion functions is malformed, detect it immediately. panic(err) } }<|fim▁end|>
} out.ClaimName = in.ClaimName out.ReadOnly = in.ReadOnly return nil
<|file_name|>flags.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. use base::prelude::*; use core::ops::{BitOr, Not, BitAnd}; use fmt::{Debug, Write}; use cty::{ c_int, c_uint, O_CLOEXEC, O_DIRECT, O_NONBLOCK, SPLICE_F_NONBLOCK, SPLICE_F_MORE, }; /// Pipe flags. #[derive(Pod, Eq)] pub struct PipeFlags(pub c_int); impl BitOr for PipeFlags { type Output = PipeFlags; fn bitor(self, other: PipeFlags) -> PipeFlags { PipeFlags(self.0 | other.0) } } impl BitAnd for PipeFlags { type Output = PipeFlags; fn bitand(self, other: PipeFlags) -> PipeFlags { PipeFlags(self.0 & other.0) } } impl Not for PipeFlags { type Output = PipeFlags; fn not(self) -> PipeFlags { PipeFlags(!self.0) } } /// Dummy flag with all flags unset. pub const PIPE_NONE: PipeFlags = PipeFlags(0); macro_rules! create_flags { ($($(#[$meta:meta])* flag $name:ident = $val:expr;)*) => { $($(#[$meta])* pub const $name: PipeFlags = PipeFlags($val);)* impl Debug for PipeFlags { fn fmt<W: Write>(&self, w: &mut W) -> Result { let mut first = true; $( if self.0 & $val != 0 { if !first { try!(w.write(b"|")); } first = false; try!(w.write_all(stringify!($name).as_bytes())); } )* if first { try!(w.write_all("PIPE_NONE".as_bytes())); } Ok(()) } } } } create_flags! { #[doc = "Close the pipe when `exec` is called.\n"] #[doc = "= See also"] #[doc = "* link:man:pipe(2) and O_CLOEXEC therein"] flag PIPE_CLOSE_ON_EXEC = O_CLOEXEC; #[doc = "Return an error instead of blocking.\n"] #[doc = "= See also"] #[doc = "* link:man:pipe(2) and O_NONBLOCK therein"] flag PIPE_DONT_BLOCK = O_NONBLOCK; #[doc = "Create a \"datagram\" pipe.\n"] #[doc = "= Remarks"] #[doc = "== Kernel versions"] #[doc = "The minimum required kernel version is 3.4."] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_DIRECT therein"] flag PIPE_PACKETS = O_DIRECT; } impl PipeFlags { /// Sets a flag. /// /// [argument, flag] /// The flag to be set. pub fn set(&mut self, flag: PipeFlags) { self.0 |= flag.0 } /// Clears a flag. /// /// [argument, flag] /// The flag to be cleared. pub fn unset(&mut self, flag: PipeFlags) { self.0 &= !flag.0 } /// Returns whether a flag is set. /// /// [argument, flag] /// The flag to be checked. pub fn is_set(&self, flag: PipeFlags) -> bool { self.0 & flag.0 != 0 } } /// Tee flags. #[derive(Pod, Eq)] pub struct TeeFlags(pub c_uint); impl BitOr for TeeFlags { type Output = TeeFlags; fn bitor(self, other: TeeFlags) -> TeeFlags { TeeFlags(self.0 | other.0) } } impl BitAnd for TeeFlags { type Output = TeeFlags; fn bitand(self, other: TeeFlags) -> TeeFlags { TeeFlags(self.0 & other.0) } } impl Not for TeeFlags { type Output = TeeFlags; fn not(self) -> TeeFlags { TeeFlags(!self.0) } } /// Dummy flag with all flags unset. pub const TEE_NONE: TeeFlags = TeeFlags(0); macro_rules! create_flags { ($($(#[$meta:meta])* flag $name:ident = $val:expr;)*) => { $($(#[$meta])* pub const $name: TeeFlags = TeeFlags($val);)* impl Debug for TeeFlags { fn fmt<W: Write>(&self, w: &mut W) -> Result { let mut first = true; $( if self.0 & $val != 0 { if !first { try!(w.write(b"|")); } first = false; try!(w.write_all(stringify!($name).as_bytes())); } )* if first { try!(w.write_all("TEE_NONE".as_bytes())); } Ok(()) } } } } create_flags! { #[doc = "Return an error instead of blocking.\n"] #[doc = "= See also"] #[doc = "* link:man:tee(2) and SPLICE_F_NONBLOCK therein"] flag TEE_DONT_BLOCK = SPLICE_F_NONBLOCK; } impl TeeFlags { /// Sets a flag. /// /// [argument, flag] /// The flag to be set. pub fn set(&mut self, flag: TeeFlags) { self.0 |= flag.0 } /// Clears a flag. /// /// [argument, flag] /// The flag to be cleared. pub fn unset(&mut self, flag: TeeFlags) { self.0 &= !flag.0 } /// Returns whether a flag is set. /// /// [argument, flag] /// The flag to be checked. pub fn is_set(&self, flag: TeeFlags) -> bool { self.0 & flag.0 != 0 } } /// Splice flags. #[derive(Pod, Eq)] pub struct SpliceFlags(pub c_uint);<|fim▁hole|>impl BitOr for SpliceFlags { type Output = SpliceFlags; fn bitor(self, other: SpliceFlags) -> SpliceFlags { SpliceFlags(self.0 | other.0) } } impl BitAnd for SpliceFlags { type Output = SpliceFlags; fn bitand(self, other: SpliceFlags) -> SpliceFlags { SpliceFlags(self.0 & other.0) } } impl Not for SpliceFlags { type Output = SpliceFlags; fn not(self) -> SpliceFlags { SpliceFlags(!self.0) } } /// Dummy flag with all flags unset. pub const SPLICE_NONE: SpliceFlags = SpliceFlags(0); macro_rules! create_flags { ($($(#[$meta:meta])* flag $name:ident = $val:expr;)*) => { $($(#[$meta])* pub const $name: SpliceFlags = SpliceFlags($val);)* impl Debug for SpliceFlags { fn fmt<W: Write>(&self, w: &mut W) -> Result { let mut first = true; $( if self.0 & $val != 0 { if !first { try!(w.write(b"|")); } first = false; try!(w.write_all(stringify!($name).as_bytes())); } )* if first { try!(w.write_all("SPLICE_NONE".as_bytes())); } Ok(()) } } } } create_flags! { #[doc = "Return an error instead of blocking.\n"] #[doc = "= See also"] #[doc = "* link:man:splice(2) and SPLICE_F_NONBLOCK therein"] flag SPLICE_DONT_BLOCK = SPLICE_F_NONBLOCK; #[doc = "When splicing to a socket, apply MSG_MORE semantics.\n"] #[doc = "= See also"] #[doc = "* link:man:splice(2) and SPLICE_F_MORE therein"] #[doc = "* link:man:send(2) and MSG_MORE therein"] flag SPLICE_MORE = SPLICE_F_MORE; } impl SpliceFlags { /// Sets a flag. /// /// [argument, flag] /// The flag to be set. pub fn set(&mut self, flag: SpliceFlags) { self.0 |= flag.0 } /// Clears a flag. /// /// [argument, flag] /// The flag to be cleared. pub fn unset(&mut self, flag: SpliceFlags) { self.0 &= !flag.0 } /// Returns whether a flag is set. /// /// [argument, flag] /// The flag to be checked. pub fn is_set(&self, flag: SpliceFlags) -> bool { self.0 & flag.0 != 0 } }<|fim▁end|>
<|file_name|>spec_version.py<|end_file_name|><|fim▁begin|># ~autogen spec_version spec_version = "spec: 0.9.3-pre-r2, kernel: v3.16.7-ckt16-7-ev3dev-ev3"<|fim▁hole|># ~autogen<|fim▁end|>
<|file_name|>trait-bounds-on-structs-and-enums-static.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. trait Trait { fn dummy(&self) { } }<|fim▁hole|> x: T, } static X: Foo<usize> = Foo { //~^ ERROR not implemented x: 1, }; fn main() { }<|fim▁end|>
struct Foo<T:Trait> {
<|file_name|>holder.js<|end_file_name|><|fim▁begin|><|fim▁hole|>version https://git-lfs.github.com/spec/v1 oid sha256:641860132ccb9772e708b19feb3d59bb6291f6c40eebbfcfa0982a4e8eeda219 size 69639<|fim▁end|>
<|file_name|>definitions.cpp<|end_file_name|><|fim▁begin|>#include "definitions.h" CustomWeakForm::CustomWeakForm(std::vector<std::string> newton_boundaries, double heatcap, double rho, double tau, double lambda, double alpha, double temp_ext, MeshFunctionSharedPtr<double> sln_prev_time, bool JFNK) : WeakForm<double>(1, JFNK)<|fim▁hole|> add_matrix_form(new JacobianFormVol(0, 0, heatcap, rho, lambda, tau)); // Jacobian forms - surface. add_matrix_form_surf(new JacobianFormSurf(0, 0, newton_boundaries, alpha, lambda)); // Residual forms - volumetric. ResidualFormVol* res_form = new ResidualFormVol(0, heatcap, rho, lambda, tau); add_vector_form(res_form); // Residual forms - surface. add_vector_form_surf(new ResidualFormSurf(0, newton_boundaries, alpha, lambda, temp_ext)); } double CustomWeakForm::JacobianFormVol::value(int n, double *wt, Func<double> *u_ext[], Func<double> *u, Func<double> *v, GeomVol<double> *e, Func<double> **ext) const { double result = 0; for (int i = 0; i < n; i++) result += wt[i] * (heatcap * rho * u->val[i] * v->val[i] / tau + lambda * (u->dx[i] * v->dx[i] + u->dy[i] * v->dy[i])); return result; } Ord CustomWeakForm::JacobianFormVol::ord(int n, double *wt, Func<Ord> *u_ext[], Func<Ord> *u, Func<Ord> *v, GeomVol<Ord> *e, Func<Ord> **ext) const { // Returning the sum of the degrees of the basis and test function plus two. return Ord(10); } MatrixFormVol<double>* CustomWeakForm::JacobianFormVol::clone() const { return new CustomWeakForm::JacobianFormVol(*this); } double CustomWeakForm::JacobianFormSurf::value(int n, double *wt, Func<double> *u_ext[], Func<double> *u, Func<double> *v, GeomSurf<double> *e, Func<double> **ext) const { double result = 0; for (int i = 0; i < n; i++) result += wt[i] * alpha * lambda * u->val[i] * v->val[i]; return result; } Ord CustomWeakForm::JacobianFormSurf::ord(int n, double *wt, Func<Ord> *u_ext[], Func<Ord> *u, Func<Ord> *v, GeomSurf<Ord> *e, Func<Ord> **ext) const { // Returning the sum of the degrees of the basis and test function plus two. return Ord(10); } MatrixFormSurf<double>* CustomWeakForm::JacobianFormSurf::clone() const { return new CustomWeakForm::JacobianFormSurf(*this); } double CustomWeakForm::ResidualFormVol::value(int n, double *wt, Func<double> *u_ext[], Func<double> *v, GeomVol<double> *e, Func<double> **ext) const { double result = 0; for (int i = 0; i < n; i++) result += wt[i] * (heatcap * rho * (u_ext[0]->val[i] - ext[0]->val[i]) * v->val[i] / tau + lambda * (u_ext[0]->dx[i] * v->dx[i] + u_ext[0]->dy[i] * v->dy[i])); return result; } Ord CustomWeakForm::ResidualFormVol::ord(int n, double *wt, Func<Ord> *u_ext[], Func<Ord> *v, GeomVol<Ord> *e, Func<Ord> **ext) const { // Returning the sum of the degrees of the test function and solution plus two. return Ord(10); } VectorFormVol<double>* CustomWeakForm::ResidualFormVol::clone() const { return new CustomWeakForm::ResidualFormVol(*this); } double CustomWeakForm::ResidualFormSurf::value(int n, double *wt, Func<double> *u_ext[], Func<double> *v, GeomSurf<double> *e, Func<double> **ext) const { double result = 0; for (int i = 0; i < n; i++) result += wt[i] * alpha * lambda * (u_ext[0]->val[i] - temp_ext) * v->val[i]; return result; } Ord CustomWeakForm::ResidualFormSurf::ord(int n, double *wt, Func<Ord> *u_ext[], Func<Ord> *v, GeomSurf<Ord> *e, Func<Ord> **ext) const { // Returning the sum of the degrees of the test function and solution plus two. return Ord(10); } VectorFormSurf<double>* CustomWeakForm::ResidualFormSurf::clone() const { return new CustomWeakForm::ResidualFormSurf(*this); }<|fim▁end|>
{ this->set_ext(sln_prev_time); // Jacobian forms - volumetric.
<|file_name|>envsuite.go<|end_file_name|><|fim▁begin|>package envsuite // Provides an EnvSuite type which makes sure this test suite gets an isolated // environment settings. Settings will be saved on start and then cleared, and // reset on tear down. import ( . "launchpad.net/gocheck" "os" "strings" ) type EnvSuite struct { environ []string } func (s *EnvSuite) SetUpSuite(c *C) { s.environ = os.Environ() }<|fim▁hole|> func (s *EnvSuite) TearDownTest(c *C) { for _, envstring := range s.environ { kv := strings.SplitN(envstring, "=", 2) os.Setenv(kv[0], kv[1]) } } func (s *EnvSuite) TearDownSuite(c *C) { }<|fim▁end|>
func (s *EnvSuite) SetUpTest(c *C) { os.Clearenv() }
<|file_name|>Modal.js<|end_file_name|><|fim▁begin|>import React, { Fragment, Children, cloneElement, useRef, useEffect } from 'react'; import { createPortal } from 'react-dom'; import PropTypes from 'prop-types'; import cx from 'classnames'; import idgen from './idgen'; import Button from './Button'; import { safeJSONStringify } from './utils'; const Modal = ({ actions, bottomSheet, children, fixedFooter, header, className, trigger, options, open, root, ...props }) => { const _modalRoot = useRef(null); const _modalInstance = useRef(null); const _modalRef = useRef(null); if (root === null) { console.warn( 'React Materialize: root should be a valid node element to render a Modal' ); } useEffect(() => { const modalRoot = _modalRoot.current; if (!_modalInstance.current) { _modalInstance.current = M.Modal.init(_modalRef.current, options); } return () => { if (root.contains(modalRoot)) { root.removeChild(modalRoot); } _modalInstance.current.destroy(); }; // deep comparing options object // eslint-disable-next-line react-hooks/exhaustive-deps }, [safeJSONStringify(options), root]); useEffect(() => { if (open) { showModal(); } else { hideModal(); } }, [open]); const showModal = e => { e && e.preventDefault(); _modalInstance.current && _modalInstance.current.open(); }; const hideModal = e => {<|fim▁hole|> _modalInstance.current && _modalInstance.current.close(); }; const classes = cx( 'modal', { 'modal-fixed-footer': fixedFooter, 'bottom-sheet': bottomSheet }, className ); const renderModalPortal = () => { if (!_modalRoot.current) { _modalRoot.current = document.createElement('div'); root.appendChild(_modalRoot.current); } return createPortal( <div className={classes} ref={_modalRef} {...props}> <div className="modal-content"> <h4>{header}</h4> {children} </div> <div className="modal-footer">{Children.toArray(actions)}</div> </div>, _modalRoot.current ); }; return ( <Fragment> {trigger && cloneElement(trigger, { onClick: showModal })} {renderModalPortal()} </Fragment> ); }; Modal.propTypes = { /** * Options * Object with options for modal */ options: PropTypes.shape({ /** * Opacity of the modal overlay. */ opacity: PropTypes.number, /** * Transition in duration in milliseconds. */ inDuration: PropTypes.number, /** * Transition out duration in milliseconds. */ outDuration: PropTypes.number, /** * Callback function called before modal is opened. */ onOpenStart: PropTypes.func, /** * Callback function called after modal is opened. */ onOpenEnd: PropTypes.func, /** * Callback function called before modal is closed. */ onCloseStart: PropTypes.func, /** * Callback function called after modal is closed. */ onCloseEnd: PropTypes.func, /** * Prevent page from scrolling while modal is open. */ preventScrolling: PropTypes.bool, /** * Allow modal to be dismissed by keyboard or overlay click. */ dismissible: PropTypes.bool, /** * Starting top offset */ startingTop: PropTypes.string, /** * Ending top offset */ endingTop: PropTypes.string }), /** * Extra class to added to the Modal */ className: PropTypes.string, /** * Modal is opened on mount * @default false */ open: PropTypes.bool, /** * BottomSheet styled modal * @default false */ bottomSheet: PropTypes.bool, /** * Component children */ children: PropTypes.node, /** * FixedFooter styled modal * @default false */ fixedFooter: PropTypes.bool, /** * Text to shown in the header of the modal */ header: PropTypes.string, /** * The button to trigger the display of the modal */ trigger: PropTypes.node, /** * The buttons to show in the footer of the modal * @default <Button>Close</Button> */ actions: PropTypes.node, /** * The ID to trigger the modal opening/closing */ id: PropTypes.string, /** * Root node where modal should be injected * @default document.body */ root: PropTypes.any }; Modal.defaultProps = { get id() { return `Modal-${idgen()}`; }, root: typeof window !== 'undefined' ? document.body : null, open: false, options: { opacity: 0.5, inDuration: 250, outDuration: 250, onOpenStart: null, onOpenEnd: null, onCloseStart: null, onCloseEnd: null, preventScrolling: true, dismissible: true, startingTop: '4%', endingTop: '10%' }, fixedFooter: false, bottomSheet: false, actions: [ <Button waves="green" modal="close" flat> Close </Button> ] }; export default Modal;<|fim▁end|>
e && e.preventDefault();
<|file_name|>widgets.py<|end_file_name|><|fim▁begin|>from django import forms from django.conf import settings from django.core.urlresolvers import reverse from django.template.loader import render_to_string from django.utils.safestring import mark_safe from django.utils.html import conditional_escape from django.utils.encoding import force_unicode from django.utils import simplejson from django.core.exceptions import ImproperlyConfigured from django.forms.util import flatatt json_encode = simplejson.JSONEncoder().encode DEFAULT_CONFIG = { 'skin': 'django', 'toolbar': 'Full', 'height': 291, 'width': 835, 'filebrowserWindowWidth': 940, 'filebrowserWindowHeight': 725, } class CKEditorWidget(forms.Textarea): """ Widget providing CKEditor for Rich Text Editing. Supports direct image uploads and embed. """ class Media: try: js = ( settings.STATIC_URL + 'ckeditor/ckeditor/ckeditor.js', )<|fim▁hole|> except AttributeError: raise ImproperlyConfigured("django-ckeditor requires \ CKEDITOR_MEDIA_PREFIX setting. This setting specifies a \ URL prefix to the ckeditor JS and CSS media (not \ uploaded media). Make sure to use a trailing slash: \ CKEDITOR_MEDIA_PREFIX = '/media/ckeditor/'") def __init__(self, config_name='default', *args, **kwargs): super(CKEditorWidget, self).__init__(*args, **kwargs) # Setup config from defaults. self.config = DEFAULT_CONFIG.copy() # Try to get valid config from settings. configs = getattr(settings, 'CKEDITOR_CONFIGS', None) if configs != None: if isinstance(configs, dict): # Make sure the config_name exists. if config_name in configs: config = configs[config_name] # Make sure the configuration is a dictionary. if not isinstance(config, dict): raise ImproperlyConfigured('CKEDITOR_CONFIGS["%s"] \ setting must be a dictionary type.' % \ config_name) # Override defaults with settings config. self.config.update(config) else: raise ImproperlyConfigured("No configuration named '%s' \ found in your CKEDITOR_CONFIGS setting." % \ config_name) else: raise ImproperlyConfigured('CKEDITOR_CONFIGS setting must be a\ dictionary type.') def render(self, name, value, attrs={}): if value is None: value = '' final_attrs = self.build_attrs(attrs, name=name) self.config['filebrowserUploadUrl'] = reverse('ckeditor_upload') self.config['filebrowserBrowseUrl'] = reverse('ckeditor_browse') return mark_safe(render_to_string('ckeditor/widget.html', { 'final_attrs': flatatt(final_attrs), 'value': conditional_escape(force_unicode(value)), 'id': final_attrs['id'], 'config': json_encode(self.config) }) )<|fim▁end|>
<|file_name|>converter.py<|end_file_name|><|fim▁begin|>from string import digits, ascii_letters valid_values = list(digits + ascii_letters) # приводим строку к списку radix = len(valid_values) #основание def convert(number):<|fim▁hole|> number //= radix return ''.join(result) def inverse(number): result = 0 for p, i in enumerate(reversed(number)): n = valid_values.index(i) # получаем индекс нужного нам элемента списка result += n * radix ** p return result<|fim▁end|>
result =[] #будем сюда складывать остатки от деления while number: result.insert(0,valid_values[number % radix])
<|file_name|>categorie.provider.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core'; import { ServiceProvider } from './service.provider'; import { Observable } from 'rxjs/Observable'; /** * Model imports */ import { Categorie } from '../model/Categorie'; //service fournit les Categories @Injectable() export class CategorieProvider { categories : Categorie[]; <|fim▁hole|> constructor(private service : ServiceProvider) { this.categories = new Array(); } getFirstCategories() : Observable<Categorie[]>{ return this.service.getFirstCategories() } getCategorieEnfants(unix : string) : Observable<Categorie[]> { return this.service.getCategorieEnfants(unix); } getCategorie(unix : string) : Observable<Categorie>{ return this.service.getCategorie(unix); } }<|fim▁end|>
<|file_name|>makeseeds.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright (c) 2013-2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Generate seeds.txt from Pieter's DNS seeder # import re import sys import dns.resolver import collections NSEEDS=512 MAX_SEEDS_PER_ASN=2 MIN_BLOCKS = 337600 # These are hosts that have been observed to be behaving strangely (e.g. # aggressively connecting to every node). with open("suspicious_hosts.txt", mode="r", encoding="utf-8") as f: SUSPICIOUS_HOSTS = {s.strip() for s in f if s.strip()} PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$") PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$") PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$") PATTERN_AGENT = re.compile( r"^/Satoshi:(" r"0.14.(0|1|2|3|99)|" r"0.15.(0|1|2|99)|" r"0.16.(0|1|2|3|99)|" r"0.17.(0|0.1|1|2|99)|" r"0.18.(0|1|99)|" r"0.19.(0|1|99)|" r"0.20.(0|1|99)|" r"0.21.99" r")") def parseline(line): sline = line.split() if len(sline) < 11: return None m = PATTERN_IPV4.match(sline[0]) sortkey = None ip = None if m is None: m = PATTERN_IPV6.match(sline[0]) if m is None: m = PATTERN_ONION.match(sline[0]) if m is None: return None else: net = 'onion' ipstr = sortkey = m.group(1) port = int(m.group(2)) else: net = 'ipv6' if m.group(1) in ['::']: # Not interested in localhost return None ipstr = m.group(1) sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds port = int(m.group(2)) else: # Do IPv4 sanity check ip = 0 for i in range(0,4): if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255: return None ip = ip + (int(m.group(i+2)) << (8*(3-i))) if ip == 0: return None net = 'ipv4' sortkey = ip ipstr = m.group(1) port = int(m.group(6)) # Skip bad results. if sline[1] == 0: return None # Extract uptime %. uptime30 = float(sline[7][:-1]) # Extract Unix timestamp of last success. lastsuccess = int(sline[2]) # Extract protocol version. version = int(sline[10]) # Extract user agent. agent = sline[11][1:-1] # Extract service flags. service = int(sline[9], 16) # Extract blocks. blocks = int(sline[8]) # Construct result. return { 'net': net, 'ip': ipstr, 'port': port, 'ipnum': ip, 'uptime': uptime30, 'lastsuccess': lastsuccess, 'version': version, 'agent': agent, 'service': service, 'blocks': blocks, 'sortkey': sortkey, } def dedup(ips): '''deduplicate by address,port''' d = {} for ip in ips: d[ip['ip'],ip['port']] = ip return list(d.values()) def filtermultiport(ips): '''Filter out hosts with more nodes per IP''' hist = collections.defaultdict(list) for ip in ips: hist[ip['sortkey']].append(ip) return [value[0] for (key,value) in list(hist.items()) if len(value)==1] def lookup_asn(net, ip): ''' Look up the asn for an IP (4 or 6) address by querying cymru.com, or None if it could not be found. ''' try: if net == 'ipv4': ipaddr = ip prefix = '.origin' else: # http://www.team-cymru.com/IP-ASN-mapping.html res = str() # 2001:4860:b002:23::68 for nb in ip.split(':')[:4]: # pick the first 4 nibbles for c in nb.zfill(4): # right padded with '0' res += c + '.' # 2001 4860 b002 0023 ipaddr = res.rstrip('.') # 2.0.0.1.4.8.6.0.b.0.0.2.0.0.2.3 prefix = '.origin6' asn = int([x.to_text() for x in dns.resolver.resolve('.'.join( reversed(ipaddr.split('.'))) + prefix + '.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0]) return asn except Exception: sys.stderr.write('ERR: Could not resolve ASN for "' + ip + '"\n') return None # Based on Greg Maxwell's seed_filter.py def filterbyasn(ips, max_per_asn, max_per_net): # Sift out ips by type ips_ipv46 = [ip for ip in ips if ip['net'] in ['ipv4', 'ipv6']] ips_onion = [ip for ip in ips if ip['net'] == 'onion'] # Filter IPv46 by ASN, and limit to max_per_net per network result = [] net_count = collections.defaultdict(int) asn_count = collections.defaultdict(int) for ip in ips_ipv46: if net_count[ip['net']] == max_per_net: continue asn = lookup_asn(ip['net'], ip['ip']) if asn is None or asn_count[asn] == max_per_asn: continue asn_count[asn] += 1 net_count[ip['net']] += 1 result.append(ip) # Add back Onions (up to max_per_net) result.extend(ips_onion[0:max_per_net]) return result def ip_stats(ips): hist = collections.defaultdict(int) for ip in ips: if ip is not None: hist[ip['net']] += 1 return '%6d %6d %6d' % (hist['ipv4'], hist['ipv6'], hist['onion']) def main(): lines = sys.stdin.readlines() ips = [parseline(line) for line in lines] print('\x1b[7m IPv4 IPv6 Onion Pass \x1b[0m', file=sys.stderr) print('%s Initial' % (ip_stats(ips)), file=sys.stderr) # Skip entries with invalid address. ips = [ip for ip in ips if ip is not None] print('%s Skip entries with invalid address' % (ip_stats(ips)), file=sys.stderr) # Skip duplicates (in case multiple seeds files were concatenated) ips = dedup(ips) print('%s After removing duplicates' % (ip_stats(ips)), file=sys.stderr) # Skip entries from suspicious hosts. ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS] print('%s Skip entries from suspicious hosts' % (ip_stats(ips)), file=sys.stderr) # Enforce minimal number of blocks. ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS] print('%s Enforce minimal number of blocks' % (ip_stats(ips)), file=sys.stderr) # Require service bit 1. ips = [ip for ip in ips if (ip['service'] & 1) == 1] print('%s Require service bit 1' % (ip_stats(ips)), file=sys.stderr) # Require at least 50% 30-day uptime for clearnet, 10% for onion. req_uptime = { 'ipv4': 50, 'ipv6': 50, 'onion': 10, } ips = [ip for ip in ips if ip['uptime'] > req_uptime[ip['net']]] print('%s Require minimum uptime' % (ip_stats(ips)), file=sys.stderr) # Require a known and recent user agent. ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])] print('%s Require a known and recent user agent' % (ip_stats(ips)), file=sys.stderr) # Sort by availability (and use last success as tie breaker)<|fim▁hole|> print('%s Filter out hosts with multiple bitcoin ports' % (ip_stats(ips)), file=sys.stderr) # Look up ASNs and limit results, both per ASN and globally. ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS) print('%s Look up ASNs and limit results per ASN and per net' % (ip_stats(ips)), file=sys.stderr) # Sort the results by IP address (for deterministic output). ips.sort(key=lambda x: (x['net'], x['sortkey'])) for ip in ips: if ip['net'] == 'ipv6': print('[%s]:%i' % (ip['ip'], ip['port'])) else: print('%s:%i' % (ip['ip'], ip['port'])) if __name__ == '__main__': main()<|fim▁end|>
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True) # Filter out hosts with multiple bitcoin ports, these are likely abusive ips = filtermultiport(ips)
<|file_name|>std-sync-right-kind-impls.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // pretty-expanded FIXME #23616 #![feature(static_mutex, static_rwlock, static_condvar)] #![feature(arc_weak, semaphore)] use std::sync; fn assert_both<T: Sync + Send>() {}<|fim▁hole|> fn main() { assert_both::<sync::StaticMutex>(); assert_both::<sync::StaticCondvar>(); assert_both::<sync::StaticRwLock>(); assert_both::<sync::Mutex<()>>(); assert_both::<sync::Condvar>(); assert_both::<sync::RwLock<()>>(); assert_both::<sync::Semaphore>(); assert_both::<sync::Barrier>(); assert_both::<sync::Arc<()>>(); assert_both::<sync::Weak<()>>(); assert_both::<sync::Once>(); }<|fim▁end|>
<|file_name|>manifest.rs<|end_file_name|><|fim▁begin|>use std::fmt; use std::path::{PathBuf, Path}; use semver::Version; use rustc_serialize::{Encoder, Encodable}; use core::{Dependency, PackageId, Summary, SourceId, PackageIdSpec}; use core::WorkspaceConfig; use core::package_id::Metadata; pub enum EitherManifest { Real(Manifest), Virtual(VirtualManifest), } /// Contains all the information about a package, as loaded from a Cargo.toml. #[derive(Clone, Debug)] pub struct Manifest { summary: Summary, targets: Vec<Target>, links: Option<String>, warnings: Vec<String>, exclude: Vec<String>, include: Vec<String>, metadata: ManifestMetadata, profiles: Profiles, publish: bool, replace: Vec<(PackageIdSpec, Dependency)>, workspace: WorkspaceConfig, } #[derive(Clone, Debug)] pub struct VirtualManifest { replace: Vec<(PackageIdSpec, Dependency)>, workspace: WorkspaceConfig, profiles: Profiles, } /// General metadata about a package which is just blindly uploaded to the /// registry. /// /// Note that many of these fields can contain invalid values such as the /// homepage, repository, documentation, or license. These fields are not /// validated by cargo itself, but rather it is up to the registry when uploaded /// to validate these fields. Cargo will itself accept any valid TOML /// specification for these values. #[derive(PartialEq, Clone, Debug)] pub struct ManifestMetadata { pub authors: Vec<String>, pub keywords: Vec<String>, pub license: Option<String>, pub license_file: Option<String>, pub description: Option<String>, // not markdown pub readme: Option<String>, // file, not contents pub homepage: Option<String>, // url pub repository: Option<String>, // url pub documentation: Option<String>, // url } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum LibKind { Lib, Rlib, Dylib, ProcMacro, Other(String), } impl LibKind { pub fn from_str(string: &str) -> LibKind { match string { "lib" => LibKind::Lib, "rlib" => LibKind::Rlib, "dylib" => LibKind::Dylib, "procc-macro" => LibKind::ProcMacro, s => LibKind::Other(s.to_string()), } } /// Returns the argument suitable for `--crate-type` to pass to rustc. pub fn crate_type(&self) -> &str { match *self { LibKind::Lib => "lib", LibKind::Rlib => "rlib", LibKind::Dylib => "dylib", LibKind::ProcMacro => "proc-macro", LibKind::Other(ref s) => s, } } pub fn linkable(&self) -> bool { match *self { LibKind::Lib | LibKind::Rlib | LibKind::Dylib | LibKind::ProcMacro => true, LibKind::Other(..) => false, } } } #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum TargetKind { Lib(Vec<LibKind>), Bin, Test, Bench, Example, CustomBuild, } impl Encodable for TargetKind { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { match *self { TargetKind::Lib(ref kinds) => { kinds.iter().map(|k| k.crate_type()).collect() } TargetKind::Bin => vec!["bin"], TargetKind::Example => vec!["example"], TargetKind::Test => vec!["test"], TargetKind::CustomBuild => vec!["custom-build"], TargetKind::Bench => vec!["bench"], }.encode(s) } } #[derive(RustcEncodable, RustcDecodable, Clone, PartialEq, Eq, Debug, Hash)] pub struct Profile { pub opt_level: String, pub lto: bool, pub codegen_units: Option<u32>, // None = use rustc default pub rustc_args: Option<Vec<String>>, pub rustdoc_args: Option<Vec<String>>, pub debuginfo: bool, pub debug_assertions: bool, pub rpath: bool, pub test: bool, pub doc: bool, pub run_custom_build: bool, pub panic: Option<String>, } #[derive(Default, Clone, Debug, PartialEq, Eq)] pub struct Profiles { pub release: Profile, pub dev: Profile, pub test: Profile, pub test_deps: Profile, pub bench: Profile, pub bench_deps: Profile, pub doc: Profile, pub custom_build: Profile, } /// Information about a binary, a library, an example, etc. that is part of the /// package.<|fim▁hole|> name: String, src_path: PathBuf, metadata: Option<Metadata>, tested: bool, benched: bool, doc: bool, doctest: bool, harness: bool, // whether to use the test harness (--test) for_host: bool, } #[derive(RustcEncodable)] struct SerializedTarget<'a> { kind: &'a TargetKind, name: &'a str, src_path: &'a str, } impl Encodable for Target { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { SerializedTarget { kind: &self.kind, name: &self.name, src_path: &self.src_path.display().to_string(), }.encode(s) } } impl Manifest { pub fn new(summary: Summary, targets: Vec<Target>, exclude: Vec<String>, include: Vec<String>, links: Option<String>, metadata: ManifestMetadata, profiles: Profiles, publish: bool, replace: Vec<(PackageIdSpec, Dependency)>, workspace: WorkspaceConfig) -> Manifest { Manifest { summary: summary, targets: targets, warnings: Vec::new(), exclude: exclude, include: include, links: links, metadata: metadata, profiles: profiles, publish: publish, replace: replace, workspace: workspace, } } pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() } pub fn exclude(&self) -> &[String] { &self.exclude } pub fn include(&self) -> &[String] { &self.include } pub fn metadata(&self) -> &ManifestMetadata { &self.metadata } pub fn name(&self) -> &str { self.package_id().name() } pub fn package_id(&self) -> &PackageId { self.summary.package_id() } pub fn summary(&self) -> &Summary { &self.summary } pub fn targets(&self) -> &[Target] { &self.targets } pub fn version(&self) -> &Version { self.package_id().version() } pub fn warnings(&self) -> &[String] { &self.warnings } pub fn profiles(&self) -> &Profiles { &self.profiles } pub fn publish(&self) -> bool { self.publish } pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace } pub fn links(&self) -> Option<&str> { self.links.as_ref().map(|s| &s[..]) } pub fn workspace_config(&self) -> &WorkspaceConfig { &self.workspace } pub fn add_warning(&mut self, s: String) { self.warnings.push(s) } pub fn set_summary(&mut self, summary: Summary) { self.summary = summary; } pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Manifest { Manifest { summary: self.summary.map_source(to_replace, replace_with), ..self } } } impl VirtualManifest { pub fn new(replace: Vec<(PackageIdSpec, Dependency)>, workspace: WorkspaceConfig, profiles: Profiles) -> VirtualManifest { VirtualManifest { replace: replace, workspace: workspace, profiles: profiles, } } pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace } pub fn workspace_config(&self) -> &WorkspaceConfig { &self.workspace } pub fn profiles(&self) -> &Profiles { &self.profiles } } impl Target { fn blank() -> Target { Target { kind: TargetKind::Bin, name: String::new(), src_path: PathBuf::new(), metadata: None, doc: false, doctest: false, harness: true, for_host: false, tested: true, benched: true, } } pub fn lib_target(name: &str, crate_targets: Vec<LibKind>, src_path: &Path, metadata: Metadata) -> Target { Target { kind: TargetKind::Lib(crate_targets), name: name.to_string(), src_path: src_path.to_path_buf(), metadata: Some(metadata), doctest: true, doc: true, ..Target::blank() } } pub fn bin_target(name: &str, src_path: &Path, metadata: Option<Metadata>) -> Target { Target { kind: TargetKind::Bin, name: name.to_string(), src_path: src_path.to_path_buf(), metadata: metadata, doc: true, ..Target::blank() } } /// Builds a `Target` corresponding to the `build = "build.rs"` entry. pub fn custom_build_target(name: &str, src_path: &Path, metadata: Option<Metadata>) -> Target { Target { kind: TargetKind::CustomBuild, name: name.to_string(), src_path: src_path.to_path_buf(), metadata: metadata, for_host: true, benched: false, tested: false, ..Target::blank() } } pub fn example_target(name: &str, src_path: &Path) -> Target { Target { kind: TargetKind::Example, name: name.to_string(), src_path: src_path.to_path_buf(), benched: false, ..Target::blank() } } pub fn test_target(name: &str, src_path: &Path, metadata: Metadata) -> Target { Target { kind: TargetKind::Test, name: name.to_string(), src_path: src_path.to_path_buf(), metadata: Some(metadata), benched: false, ..Target::blank() } } pub fn bench_target(name: &str, src_path: &Path, metadata: Metadata) -> Target { Target { kind: TargetKind::Bench, name: name.to_string(), src_path: src_path.to_path_buf(), metadata: Some(metadata), tested: false, ..Target::blank() } } pub fn name(&self) -> &str { &self.name } pub fn crate_name(&self) -> String { self.name.replace("-", "_") } pub fn src_path(&self) -> &Path { &self.src_path } pub fn metadata(&self) -> Option<&Metadata> { self.metadata.as_ref() } pub fn kind(&self) -> &TargetKind { &self.kind } pub fn tested(&self) -> bool { self.tested } pub fn harness(&self) -> bool { self.harness } pub fn documented(&self) -> bool { self.doc } pub fn for_host(&self) -> bool { self.for_host } pub fn benched(&self) -> bool { self.benched } pub fn doctested(&self) -> bool { self.doctest && match self.kind { TargetKind::Lib(ref kinds) => { kinds.contains(&LibKind::Rlib) || kinds.contains(&LibKind::Lib) } _ => false, } } pub fn allows_underscores(&self) -> bool { self.is_bin() || self.is_example() || self.is_custom_build() } pub fn is_lib(&self) -> bool { match self.kind { TargetKind::Lib(_) => true, _ => false } } pub fn linkable(&self) -> bool { match self.kind { TargetKind::Lib(ref kinds) => { kinds.iter().any(|k| k.linkable()) } _ => false } } pub fn is_bin(&self) -> bool { self.kind == TargetKind::Bin } pub fn is_example(&self) -> bool { self.kind == TargetKind::Example } pub fn is_test(&self) -> bool { self.kind == TargetKind::Test } pub fn is_bench(&self) -> bool { self.kind == TargetKind::Bench } pub fn is_custom_build(&self) -> bool { self.kind == TargetKind::CustomBuild } /// Returns the arguments suitable for `--crate-type` to pass to rustc. pub fn rustc_crate_types(&self) -> Vec<&str> { match self.kind { TargetKind::Lib(ref kinds) => { kinds.iter().map(|kind| kind.crate_type()).collect() }, TargetKind::CustomBuild | TargetKind::Bench | TargetKind::Test | TargetKind::Example | TargetKind::Bin => vec!["bin"], } } pub fn can_lto(&self) -> bool { match self.kind { TargetKind::Lib(ref v) => { !v.contains(&LibKind::Rlib) && !v.contains(&LibKind::Dylib) && !v.contains(&LibKind::Lib) } _ => true, } } pub fn set_tested(&mut self, tested: bool) -> &mut Target { self.tested = tested; self } pub fn set_benched(&mut self, benched: bool) -> &mut Target { self.benched = benched; self } pub fn set_doctest(&mut self, doctest: bool) -> &mut Target { self.doctest = doctest; self } pub fn set_for_host(&mut self, for_host: bool) -> &mut Target { self.for_host = for_host; self } pub fn set_harness(&mut self, harness: bool) -> &mut Target { self.harness = harness; self } pub fn set_doc(&mut self, doc: bool) -> &mut Target { self.doc = doc; self } } impl fmt::Display for Target { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.kind { TargetKind::Lib(..) => write!(f, "Target(lib)"), TargetKind::Bin => write!(f, "Target(bin: {})", self.name), TargetKind::Test => write!(f, "Target(test: {})", self.name), TargetKind::Bench => write!(f, "Target(bench: {})", self.name), TargetKind::Example => write!(f, "Target(example: {})", self.name), TargetKind::CustomBuild => write!(f, "Target(script)"), } } } impl Profile { pub fn default_dev() -> Profile { Profile { debuginfo: true, debug_assertions: true, ..Profile::default() } } pub fn default_release() -> Profile { Profile { opt_level: "3".to_string(), debuginfo: false, ..Profile::default() } } pub fn default_test() -> Profile { Profile { test: true, ..Profile::default_dev() } } pub fn default_bench() -> Profile { Profile { test: true, ..Profile::default_release() } } pub fn default_doc() -> Profile { Profile { doc: true, ..Profile::default_dev() } } pub fn default_custom_build() -> Profile { Profile { run_custom_build: true, ..Profile::default_dev() } } } impl Default for Profile { fn default() -> Profile { Profile { opt_level: "0".to_string(), lto: false, codegen_units: None, rustc_args: None, rustdoc_args: None, debuginfo: false, debug_assertions: false, rpath: false, test: false, doc: false, run_custom_build: false, panic: None, } } } impl fmt::Display for Profile { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.test { write!(f, "Profile(test)") } else if self.doc { write!(f, "Profile(doc)") } else if self.run_custom_build { write!(f, "Profile(run)") } else { write!(f, "Profile(build)") } } }<|fim▁end|>
#[derive(Clone, Hash, PartialEq, Eq, Debug)] pub struct Target { kind: TargetKind,
<|file_name|>test_processes.py<|end_file_name|><|fim▁begin|>from .__meta__ import * from tracer.resources.processes import Processes, Process from tracer.resources.collections import ProcessesCollection import os import subprocess @unittest.skipIf(True, "@TODO Create Mock for Processes class") class TestProcesses(unittest.TestCase): def test_children(self): process = Processes.all()[0] children = process.children() self.assertIsInstance(children, ProcessesCollection) for child in children: self.assertIsInstance(child, Process) def test_unique_process(self): process = Process(os.getpid()) parent = Process(os.getppid()) self.assertIs(process, Process(os.getpid())) self.assertIs(parent, process.parent()) self.assertIn(process, parent.children()) Process.reset_cache() process2 = Process(os.getpid()) self.assertEqual(process, process2) self.assertIsNot(process, process2) def test_process_caching(self): process = Process(os.getpid()) # Populate the cache entry for children process.children() child = subprocess.Popen(os.sys.executable, stdin=subprocess.PIPE) self.assertEqual(0, len(process.children())) process.rebuild_cache() self.assertEqual(1, len(process.children()))<|fim▁hole|> child.terminate()<|fim▁end|>
<|file_name|>0068_iaticheck.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import django.db.models.deletion from django.db import models, migrations import akvo.rsr.fields class Migration(migrations.Migration): dependencies = [ ('rsr', '0067_auto_20160412_1858'), ] operations = [ migrations.CreateModel( name='IatiCheck', fields=[<|fim▁hole|> ('description', akvo.rsr.fields.ValidXMLTextField(verbose_name='description')), ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='iati_checks', verbose_name='project', to='rsr.Project')), ], options={ 'verbose_name': 'IATI check', 'verbose_name_plural': 'IATI checks', }, bases=(models.Model,), ), ]<|fim▁end|>
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('status', models.PositiveSmallIntegerField(verbose_name='status')),
<|file_name|>appconfig.py<|end_file_name|><|fim▁begin|>import os import yaml MONGO_USERNAME = os.getenv('MONGO_USERNAME', None) MONGO_PASSWORD = os.getenv('MONGO_PASSWORD', None) MONGODB_HOST = os.getenv('MONGODB_HOST', '127.0.0.1') MONGODB_PORT = int(os.getenv('MONGODB_PORT', '27017')) MONGODB_SERVERS = os.getenv('MONGODB_SERVERS') \ or '{}:{}'.format(MONGODB_HOST, MONGODB_PORT) MONGODB_DEFAULT_URL = 'mongodb://{}'.format(MONGODB_SERVERS) MONGO_URL = os.getenv('MONGO_URL') or MONGODB_DEFAULT_URL MONGO_INCLUDES = os.getenv('MONGO_INCLUDES', '') ES_URL = os.getenv('ES_URL', 'http://localhost:9200') ES_INDEXES = yaml.load(os.getenv('ES_INDEXES') or '{}') ES_TIMEOUT_SECONDS = int(os.getenv('ES_TIMEOUT_SECONDS', '100')) LOG_VERBOSITY = int(os.getenv('LOG_VERBOSITY', 2)) <|fim▁hole|> 'es': { 'url': ES_URL, 'indexes': ES_INDEXES }, 'mongo-connector': { 'mainAddress': MONGO_URL, 'authentication': { 'adminUsername': MONGO_USERNAME, 'password': MONGO_PASSWORD }, 'namespaces': { 'include': MONGO_INCLUDES.split(','), }, 'timezoneAware': True, 'docManagers': [ { 'docManager': 'elastic_doc_manager', 'targetURL': ES_URL, "args": { "clientOptions": { "timeout": ES_TIMEOUT_SECONDS } } } ], 'logging': { 'type': 'stream' }, 'verbosity': LOG_VERBOSITY, 'continueOnError': True }, } CONFIG_LOCATION = os.getenv('CONFIG_LOCATION')<|fim▁end|>
MONGO_CONNECTOR_CONFIG = 'mongo-connector.json' DEFAULTS = {
<|file_name|>Iterator.js<|end_file_name|><|fim▁begin|>/* * ! JSRT JavaScript Library 0.1.1 [email protected] * * Copyright 2008, 2014 Atom Union, Inc. Released under the MIT license * * Date: Feb 11, 2014 */ Class.forName({ name: "class js.util.Iterator extends Object", "private _element": null, "private _cursor": 0, "private _lastRet": -1, Iterator: function(element) { this._element = element || []; }, hasNext: function() { return this._cursor < this._element.size(); }, next: function() { try { var next = this._element.get(this._cursor); this._lastRet = this._cursor++; return next; } catch (e) { throw new js.lang.IndexOutOfBoundsException("Index: " + this._cursor + ", Size: " + this._element.size() + ",Message:" + e.getMessage());<|fim▁hole|> }, remove: function() { if (this._lastRet === -1) throw new js.lang.IllegalStateException(); try { this._element.removeAt(this._lastRet); if (this._lastRet < this._cursor) this._cursor--; this._lastRet = -1; } catch (e) { throw new js.lang.IndexOutOfBoundsException(); } } });<|fim▁end|>
}
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals<|fim▁hole|> from django.db import models # Create your models here. class Urls(models.Model): longurl = models.CharField(max_length=256) shorturl = models.CharField(max_length=128)<|fim▁end|>
<|file_name|>exclusao.js<|end_file_name|><|fim▁begin|>$('#confirmacaoExclusaoModal').on('show.bs.modal', function(event) { var button = $(event.relatedTarget); var codigoVinho = button.data('codigo'); var nomeVinho = button.data('nome'); var modal = $(this); var form = modal.find('form'); var action = form.data('url-base'); if (!action.endsWith('/')) { action += '/';<|fim▁hole|> } form.attr('action', action + codigoVinho); modal.find('.modal-body span').html('Tem certeza que deseja excluir o vinho <strong>' + nomeVinho + '</strong>?'); });<|fim▁end|>
<|file_name|>Filter.java<|end_file_name|><|fim▁begin|>package logbook.server.proxy; /** * 動作に必要なデータのみ取得するためのフィルターです。 * */ public class Filter { /** フィルターするContent-Type */ public static final String CONTENT_TYPE_FILTER = "text/plain"; /** キャプチャーするリクエストのバイトサイズ上限 */ public static final int MAX_POST_FIELD_SIZE = 1024 * 1024; /** setAttribute用のキー(Response) */ public static final String RESPONSE_BODY = "res-body"; /** setAttribute用のキー(Request) */ public static final String REQUEST_BODY = "req-body"; private static String serverName; /** * 鎮守府サーバー名を設定する * @param name 鎮守府サーバー名 */ public static void setServerName(String name) { serverName = name; } /** * 鎮守府サーバー名を取得する * @param name 鎮守府サーバー名 */ public static String getServerName() { return serverName; } /** * 鎮守府サーバー名を検出した場合true * * @return 鎮守府サーバー名を検出した場合true */ public static boolean isServerDetected() { return serverName != null; } /** * <p> * 取得が必要なデータかを調べます<br> * 鎮守府サーバーが検出された場合はサーバー名から必要かどうかを判別します<br> * 鎮守府サーバーが検出できていない場合は常にtrue<br> * * @param name サーバー名<|fim▁hole|> return true; } return false; } /** * <p> * 取得が必要なデータかを調べます<br> * 鎮守府サーバーが検出された場合はサーバー名とContent-Typeから必要かどうかを判別します<br> * 鎮守府サーバーが検出できていない場合はContent-Typeから必要かどうかを判別します<br> * * @param name サーバー名 * @param contentType Content-Type * @return 取得が必要なデータか */ public static boolean isNeed(String name, String contentType) { if ((!isServerDetected() || serverName.equals(name)) && CONTENT_TYPE_FILTER.equals(contentType)) { return true; } return false; } }<|fim▁end|>
* @return 取得が必要なデータか */ public static boolean isNeed(String name) { if ((!isServerDetected() || (isServerDetected() && serverName.equals(name)))) {
<|file_name|>reset-password-form.js<|end_file_name|><|fim▁begin|>import React, {Component} from 'react'; import {connect} from 'react-redux'; import Button from "../../../../../components/buttons/button"; import {Link} from "react-router-dom"; import {firebaseResetPasswordRequest} from "../../../../../redux/firebase/actions/reset-password.action"; class ResetPasswordForm extends Component{ componentDidMount(){ document.title = "Restore Password"; } handleResetPassword = (e) =>{ e.preventDefault(); const {resetPassword, resetPass_email} = this.props; resetPassword(resetPass_email); }; render() { const {handleInputChange, resetPass_email} = this.props; return ( <form action=""> <div className="input-wrapper"> <label htmlFor="resetPass_email">E-mail:</label> <input id="resetPass_email" type="text" onChange={handleInputChange} value={resetPass_email}/> </div> <div className="button-wrapper"> <Button text="Reset password" handleClick={(e) => this.handleResetPassword(e, resetPass_email)} requiresAuth={false} type="submit--light"/> <Link to="/sign-in">Cancel</Link> </div> </form> ) } } const mapStateToProps = state => { const {resetPass_email} = state.entry; return {resetPass_email}; }; const mapDispatchToProps = dispatch => { return { resetPassword: payload => dispatch(firebaseResetPasswordRequest(payload)) }<|fim▁hole|> export default connect(mapStateToProps, mapDispatchToProps)(ResetPasswordForm);<|fim▁end|>
};
<|file_name|>test_input.py<|end_file_name|><|fim▁begin|>import numpy import pytest<|fim▁hole|> class TestInputLayer: @pytest.fixture def layer(self): from lasagne.layers.input import InputLayer return InputLayer((3, 2)) def test_input_var(self, layer): assert layer.input_var.ndim == 2 def test_get_output_shape(self, layer): assert layer.get_output_shape() == (3, 2) def test_get_output_without_arguments(self, layer): assert layer.get_output() is layer.input_var def test_get_output_input_is_variable(self, layer): variable = theano.Variable("myvariable") assert layer.get_output(variable) is variable def test_get_output_input_is_array(self, layer): input = [[1,2,3]] output = layer.get_output(input) assert numpy.all(output.eval() == input) def test_get_output_input_is_a_mapping(self, layer): input = {layer: theano.tensor.matrix()} assert layer.get_output(input) is input[layer] def test_input_var_name(self, layer): assert layer.input_var.name == "input" def test_named_layer_input_var_name(self): from lasagne.layers.input import InputLayer layer = InputLayer((3, 2), name="foo") assert layer.input_var.name == "foo.input"<|fim▁end|>
import theano
<|file_name|>ResponseTest.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2016 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package okhttp3; import java.io.IOException; import okio.Buffer; import okio.BufferedSource; import okio.Okio; import okio.Source; import okio.Timeout; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; public final class ResponseTest { @Test public void peekShorterThanResponse() throws Exception { Response response = newResponse(responseBody("abcdef")); ResponseBody peekedBody = response.peekBody(3); assertEquals("abc", peekedBody.string()); assertEquals("abcdef", response.body().string()); } @Test public void peekLongerThanResponse() throws Exception { Response response = newResponse(responseBody("abc")); ResponseBody peekedBody = response.peekBody(6); assertEquals("abc", peekedBody.string()); assertEquals("abc", response.body().string()); } @Test public void peekAfterReadingResponse() throws Exception { Response response = newResponse(responseBody("abc")); assertEquals("abc", response.body().string()); try { response.peekBody(3); fail(); } catch (IllegalStateException expected) { } } @Test public void eachPeakIsIndependent() throws Exception { Response response = newResponse(responseBody("abcdef")); ResponseBody p1 = response.peekBody(4); ResponseBody p2 = response.peekBody(2); assertEquals("abcdef", response.body().string()); assertEquals("abcd", p1.string()); assertEquals("ab", p2.string()); } /** * Returns a new response body that refuses to be read once it has been closed. This is true of * most {@link BufferedSource} instances, but not of {@link Buffer}. */ private ResponseBody responseBody(String content) { final Buffer data = new Buffer().writeUtf8(content); Source source = new Source() { boolean closed; @Override public void close() throws IOException { closed = true; } @Override public long read(Buffer sink, long byteCount) throws IOException { if (closed) throw new IllegalStateException(); return data.read(sink, byteCount); } @Override public Timeout timeout() { return Timeout.NONE; } }; return ResponseBody.create(null, -1, Okio.buffer(source)); } private Response newResponse(ResponseBody responseBody) { return new Response.Builder() .request(new Request.Builder() .url("https://example.com/") .build()) .protocol(Protocol.HTTP_1_1) .code(200)<|fim▁hole|>}<|fim▁end|>
.body(responseBody) .build(); }
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># coding: utf-8 from __future__ import unicode_literals import asyncore import email import os import shutil import smtpd import sys import tempfile import threading from django.core import mail from django.core.mail import (EmailMessage, mail_admins, mail_managers, EmailMultiAlternatives, send_mail, send_mass_mail) from django.core.mail.backends import console, dummy, locmem, filebased, smtp from django.core.mail.message import BadHeaderError from django.test import TestCase from django.test.utils import override_settings from django.utils.encoding import force_str, force_text from django.utils.six import PY3, StringIO from django.utils.translation import ugettext_lazy class MailTests(TestCase): """ Non-backend specific tests. """ def test_ascii(self): email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]']) message = email.message() self.assertEqual(message['Subject'], 'Subject') self.assertEqual(message.get_payload(), 'Content') self.assertEqual(message['From'], '[email protected]') self.assertEqual(message['To'], '[email protected]') def test_multiple_recipients(self): email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]']) message = email.message() self.assertEqual(message['Subject'], 'Subject') self.assertEqual(message.get_payload(), 'Content') self.assertEqual(message['From'], '[email protected]') self.assertEqual(message['To'], '[email protected], [email protected]') def test_cc(self): """Regression test for #7722""" email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], cc=['[email protected]']) message = email.message() self.assertEqual(message['Cc'], '[email protected]') self.assertEqual(email.recipients(), ['[email protected]', '[email protected]']) # Test multiple CC with multiple To email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]'], cc=['[email protected]', '[email protected]']) message = email.message() self.assertEqual(message['Cc'], '[email protected], [email protected]') self.assertEqual(email.recipients(), ['[email protected]', '[email protected]', '[email protected]', '[email protected]']) # Testing with Bcc email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]'], cc=['[email protected]', '[email protected]'], bcc=['[email protected]']) message = email.message() self.assertEqual(message['Cc'], '[email protected], [email protected]') self.assertEqual(email.recipients(), ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]']) def test_recipients_as_tuple(self): email = EmailMessage('Subject', 'Content', '[email protected]', ('[email protected]', '[email protected]'), cc=('[email protected]', '[email protected]'), bcc=('[email protected]',)) message = email.message() self.assertEqual(message['Cc'], '[email protected], [email protected]') self.assertEqual(email.recipients(), ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]']) def test_header_injection(self): email = EmailMessage('Subject\nInjection Test', 'Content', '[email protected]', ['[email protected]']) self.assertRaises(BadHeaderError, email.message) email = EmailMessage(ugettext_lazy('Subject\nInjection Test'), 'Content', '[email protected]', ['[email protected]']) self.assertRaises(BadHeaderError, email.message) def test_space_continuation(self): """ Test for space continuation character in long (ascii) subject headers (#7747) """ email = EmailMessage('Long subject lines that get wrapped should contain a space continuation character to get expected behavior in Outlook and Thunderbird', 'Content', '[email protected]', ['[email protected]']) message = email.message() # Note that in Python 3, maximum line length has increased from 76 to 78 self.assertEqual(message['Subject'].encode(), b'Long subject lines that get wrapped should contain a space continuation\n character to get expected behavior in Outlook and Thunderbird') def test_message_header_overrides(self): """ Specifying dates or message-ids in the extra headers overrides the default values (#9233) """ headers = {"date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"} email = EmailMessage('subject', 'content', '[email protected]', ['[email protected]'], headers=headers) self.assertEqual(sorted(email.message().items()), [ ('Content-Transfer-Encoding', '7bit'), ('Content-Type', 'text/plain; charset="utf-8"'), ('From', '[email protected]'), ('MIME-Version', '1.0'), ('Message-ID', 'foo'), ('Subject', 'subject'), ('To', '[email protected]'), ('date', 'Fri, 09 Nov 2001 01:08:47 -0000'), ]) def test_from_header(self): """ Make sure we can manually set the From header (#9214) """ email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) message = email.message() self.assertEqual(message['From'], '[email protected]') def test_to_header(self): """ Make sure we can manually set the To header (#17444) """ email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]'], headers={'To': '[email protected]'}) message = email.message() self.assertEqual(message['To'], '[email protected]') self.assertEqual(email.to, ['[email protected]', '[email protected]']) # If we don't set the To header manually, it should default to the `to` argument to the constructor email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]']) message = email.message() self.assertEqual(message['To'], '[email protected], [email protected]') self.assertEqual(email.to, ['[email protected]', '[email protected]']) def test_multiple_message_call(self): """ Regression for #13259 - Make sure that headers are not changed when calling EmailMessage.message() """ email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) message = email.message() self.assertEqual(message['From'], '[email protected]') message = email.message() self.assertEqual(message['From'], '[email protected]') def test_unicode_address_header(self): """ Regression for #11144 - When a to/from/cc header contains unicode, make sure the email addresses are parsed correctly (especially with regards to commas) """ email = EmailMessage('Subject', 'Content', '[email protected]', ['"Firstname Sürname" <[email protected]>', '[email protected]']) self.assertEqual(email.message()['To'], '=?utf-8?q?Firstname_S=C3=BCrname?= <[email protected]>, [email protected]') email = EmailMessage('Subject', 'Content', '[email protected]', ['"Sürname, Firstname" <[email protected]>', '[email protected]']) self.assertEqual(email.message()['To'], '=?utf-8?q?S=C3=BCrname=2C_Firstname?= <[email protected]>, [email protected]') def test_unicode_headers(self): email = EmailMessage("Gżegżółka", "Content", "[email protected]", ["[email protected]"], headers={"Sender": '"Firstname Sürname" <[email protected]>', "Comments": 'My Sürname is non-ASCII'}) message = email.message() self.assertEqual(message['Subject'], '=?utf-8?b?R8W8ZWfFvMOzxYJrYQ==?=') self.assertEqual(message['Sender'], '=?utf-8?q?Firstname_S=C3=BCrname?= <[email protected]>') self.assertEqual(message['Comments'], '=?utf-8?q?My_S=C3=BCrname_is_non-ASCII?=') def test_safe_mime_multipart(self): """ Make sure headers can be set with a different encoding than utf-8 in SafeMIMEMultipart as well """ headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"} subject, from_email, to = 'hello', '[email protected]', '"Sürname, Firstname" <[email protected]>' text_content = 'This is an important message.' html_content = '<p>This is an <strong>important</strong> message.</p>' msg = EmailMultiAlternatives('Message from Firstname Sürname', text_content, from_email, [to], headers=headers) msg.attach_alternative(html_content, "text/html") msg.encoding = 'iso-8859-1' self.assertEqual(msg.message()['To'], '=?iso-8859-1?q?S=FCrname=2C_Firstname?= <[email protected]>') self.assertEqual(msg.message()['Subject'], '=?iso-8859-1?q?Message_from_Firstname_S=FCrname?=') def test_encoding(self): """ Regression for #12791 - Encode body correctly with other encodings than utf-8 """ email = EmailMessage('Subject', 'Firstname Sürname is a great guy.', '[email protected]', ['[email protected]']) email.encoding = 'iso-8859-1' message = email.message() self.assertTrue(message.as_string().startswith('Content-Type: text/plain; charset="iso-8859-1"\nMIME-Version: 1.0\nContent-Transfer-Encoding: quoted-printable\nSubject: Subject\nFrom: [email protected]\nTo: [email protected]')) self.assertEqual(message.get_payload(), 'Firstname S=FCrname is a great guy.') # Make sure MIME attachments also works correctly with other encodings than utf-8 text_content = 'Firstname Sürname is a great guy.' html_content = '<p>Firstname Sürname is a <strong>great</strong> guy.</p>' msg = EmailMultiAlternatives('Subject', text_content, '[email protected]', ['[email protected]']) msg.encoding = 'iso-8859-1' msg.attach_alternative(html_content, "text/html") self.assertEqual(msg.message().get_payload(0).as_string(), 'Content-Type: text/plain; charset="iso-8859-1"\nMIME-Version: 1.0\nContent-Transfer-Encoding: quoted-printable\n\nFirstname S=FCrname is a great guy.') self.assertEqual(msg.message().get_payload(1).as_string(), 'Content-Type: text/html; charset="iso-8859-1"\nMIME-Version: 1.0\nContent-Transfer-Encoding: quoted-printable\n\n<p>Firstname S=FCrname is a <strong>great</strong> guy.</p>') def test_attachments(self): """Regression test for #9367""" headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"} subject, from_email, to = 'hello', '[email protected]', '[email protected]' text_content = 'This is an important message.' html_content = '<p>This is an <strong>important</strong> message.</p>' msg = EmailMultiAlternatives(subject, text_content, from_email, [to], headers=headers) msg.attach_alternative(html_content, "text/html") msg.attach("an attachment.pdf", b"%PDF-1.4.%...", mimetype="application/pdf") msg_str = msg.message().as_string() message = email.message_from_string(msg_str) self.assertTrue(message.is_multipart()) self.assertEqual(message.get_content_type(), 'multipart/mixed') self.assertEqual(message.get_default_type(), 'text/plain') payload = message.get_payload() self.assertEqual(payload[0].get_content_type(), 'multipart/alternative') self.assertEqual(payload[1].get_content_type(), 'application/pdf') def test_non_ascii_attachment_filename(self): """Regression test for #14964""" headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"} subject, from_email, to = 'hello', '[email protected]', '[email protected]' content = 'This is the message.' msg = EmailMessage(subject, content, from_email, [to], headers=headers) # Unicode in file name msg.attach("une pièce jointe.pdf", b"%PDF-1.4.%...", mimetype="application/pdf") msg_str = msg.message().as_string() message = email.message_from_string(msg_str) payload = message.get_payload() self.assertEqual(payload[1].get_filename(), 'une pièce jointe.pdf') def test_dummy_backend(self): """ Make sure that dummy backends returns correct number of sent messages """ connection = dummy.EmailBackend() email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) self.assertEqual(connection.send_messages([email, email, email]), 3) def test_arbitrary_keyword(self): """ Make sure that get_connection() accepts arbitrary keyword that might be used with custom backends. """ c = mail.get_connection(fail_silently=True, foo='bar') self.assertTrue(c.fail_silently) def test_custom_backend(self): """Test custom backend defined in this suite.""" conn = mail.get_connection('mail.custombackend.EmailBackend') self.assertTrue(hasattr(conn, 'test_outbox')) email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) conn.send_messages([email]) self.assertEqual(len(conn.test_outbox), 1) def test_backend_arg(self): """Test backend argument of mail.get_connection()""" self.assertIsInstance(mail.get_connection('django.core.mail.backends.smtp.EmailBackend'), smtp.EmailBackend) self.assertIsInstance(mail.get_connection('django.core.mail.backends.locmem.EmailBackend'), locmem.EmailBackend) self.assertIsInstance(mail.get_connection('django.core.mail.backends.dummy.EmailBackend'), dummy.EmailBackend) self.assertIsInstance(mail.get_connection('django.core.mail.backends.console.EmailBackend'), console.EmailBackend) tmp_dir = tempfile.mkdtemp() try: self.assertIsInstance(mail.get_connection('django.core.mail.backends.filebased.EmailBackend', file_path=tmp_dir), filebased.EmailBackend) finally: shutil.rmtree(tmp_dir) self.assertIsInstance(mail.get_connection(), locmem.EmailBackend) @override_settings( EMAIL_BACKEND='django.core.mail.backends.locmem.EmailBackend', ADMINS=[('nobody', '[email protected]')], MANAGERS=[('nobody', '[email protected]')]) def test_connection_arg(self): """Test connection argument to send_mail(), et. al.""" mail.outbox = [] # Send using non-default connection connection = mail.get_connection('mail.custombackend.EmailBackend') send_mail('Subject', 'Content', '[email protected]', ['[email protected]'], connection=connection) self.assertEqual(mail.outbox, []) self.assertEqual(len(connection.test_outbox), 1) self.assertEqual(connection.test_outbox[0].subject, 'Subject') connection = mail.get_connection('mail.custombackend.EmailBackend') send_mass_mail([ ('Subject1', 'Content1', '[email protected]', ['[email protected]']), ('Subject2', 'Content2', '[email protected]', ['[email protected]']), ], connection=connection) self.assertEqual(mail.outbox, []) self.assertEqual(len(connection.test_outbox), 2) self.assertEqual(connection.test_outbox[0].subject, 'Subject1') self.assertEqual(connection.test_outbox[1].subject, 'Subject2') connection = mail.get_connection('mail.custombackend.EmailBackend') mail_admins('Admin message', 'Content', connection=connection) self.assertEqual(mail.outbox, []) self.assertEqual(len(connection.test_outbox), 1) self.assertEqual(connection.test_outbox[0].subject, '[Django] Admin message') connection = mail.get_connection('mail.custombackend.EmailBackend') mail_managers('Manager message', 'Content', connection=connection) self.assertEqual(mail.outbox, []) self.assertEqual(len(connection.test_outbox), 1) self.assertEqual(connection.test_outbox[0].subject, '[Django] Manager message') def test_dont_mangle_from_in_body(self): # Regression for #13433 - Make sure that EmailMessage doesn't mangle # 'From ' in message body. email = EmailMessage('Subject', 'From the future', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) self.assertFalse('>From the future' in email.message().as_string()) def test_dont_base64_encode(self): # Ticket #3472 # Shouldn't use Base64 encoding at all msg = EmailMessage('Subject', 'UTF-8 encoded body', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) self.assertFalse('Content-Transfer-Encoding: base64' in msg.message().as_string()) # Ticket #11212 # Shouldn't use quoted printable, should detect it can represent content with 7 bit data msg = EmailMessage('Subject', 'Body with only ASCII characters.', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) s = msg.message().as_string() self.assertFalse('Content-Transfer-Encoding: quoted-printable' in s) self.assertTrue('Content-Transfer-Encoding: 7bit' in s) # Shouldn't use quoted printable, should detect it can represent content with 8 bit data msg = EmailMessage('Subject', 'Body with latin characters: àáä.', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) s = msg.message().as_string() self.assertFalse(str('Content-Transfer-Encoding: quoted-printable') in s) self.assertTrue(str('Content-Transfer-Encoding: 8bit') in s) msg = EmailMessage('Subject', 'Body with non latin characters: А Б В Г Д Е Ж Ѕ З И І К Л М Н О П.', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) s = msg.message().as_string() self.assertFalse(str('Content-Transfer-Encoding: quoted-printable') in s) self.assertTrue(str('Content-Transfer-Encoding: 8bit') in s) class BaseEmailBackendTests(object): email_backend = None def setUp(self): self.settings_override = override_settings(EMAIL_BACKEND=self.email_backend) self.settings_override.enable() def tearDown(self): self.settings_override.disable() def assertStartsWith(self, first, second): if not first.startswith(second): self.longMessage = True self.assertEqual(first[:len(second)], second, "First string doesn't start with the second.") def get_mailbox_content(self): raise NotImplementedError def flush_mailbox(self): raise NotImplementedError def get_the_message(self): mailbox = self.get_mailbox_content() self.assertEqual(len(mailbox), 1, "Expected exactly one message, got %d.\n%r" % (len(mailbox), [ m.as_string() for m in mailbox])) return mailbox[0] def test_send(self): email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]']) num_sent = mail.get_connection().send_messages([email]) self.assertEqual(num_sent, 1) message = self.get_the_message() self.assertEqual(message["subject"], "Subject") self.assertEqual(message.get_payload(), "Content") self.assertEqual(message["from"], "[email protected]") self.assertEqual(message.get_all("to"), ["[email protected]"]) def test_send_unicode(self): email = EmailMessage('Chère maman', 'Je t\'aime très fort', '[email protected]', ['[email protected]']) num_sent = mail.get_connection().send_messages([email]) self.assertEqual(num_sent, 1) message = self.get_the_message() self.assertEqual(message["subject"], '=?utf-8?q?Ch=C3=A8re_maman?=') self.assertEqual(force_text(message.get_payload()), 'Je t\'aime très fort') def test_send_many(self): email1 = EmailMessage('Subject', 'Content1', '[email protected]', ['[email protected]']) email2 = EmailMessage('Subject', 'Content2', '[email protected]', ['[email protected]']) num_sent = mail.get_connection().send_messages([email1, email2]) self.assertEqual(num_sent, 2) messages = self.get_mailbox_content() self.assertEqual(len(messages), 2) self.assertEqual(messages[0].get_payload(), "Content1") self.assertEqual(messages[1].get_payload(), "Content2") def test_send_verbose_name(self): email = EmailMessage("Subject", "Content", '"Firstname Sürname" <[email protected]>', ["[email protected]"]) email.send() message = self.get_the_message() self.assertEqual(message["subject"], "Subject") self.assertEqual(message.get_payload(), "Content") self.assertEqual(message["from"], "=?utf-8?q?Firstname_S=C3=BCrname?= <[email protected]>") @override_settings(MANAGERS=[('nobody', '[email protected]')]) def test_html_mail_managers(self): """Test html_message argument to mail_managers""" mail_managers('Subject', 'Content', html_message='HTML Content') message = self.get_the_message() self.assertEqual(message.get('subject'), '[Django] Subject') self.assertEqual(message.get_all('to'), ['[email protected]']) self.assertTrue(message.is_multipart()) self.assertEqual(len(message.get_payload()), 2) self.assertEqual(message.get_payload(0).get_payload(), 'Content') self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain') self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content') self.assertEqual(message.get_payload(1).get_content_type(), 'text/html') @override_settings(ADMINS=[('nobody', '[email protected]')]) def test_html_mail_admins(self): """Test html_message argument to mail_admins """ mail_admins('Subject', 'Content', html_message='HTML Content') message = self.get_the_message() self.assertEqual(message.get('subject'), '[Django] Subject') self.assertEqual(message.get_all('to'), ['[email protected]']) self.assertTrue(message.is_multipart()) self.assertEqual(len(message.get_payload()), 2) self.assertEqual(message.get_payload(0).get_payload(), 'Content') self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain') self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content') self.assertEqual(message.get_payload(1).get_content_type(), 'text/html') @override_settings( ADMINS=[('nobody', '[email protected]')], MANAGERS=[('nobody', '[email protected]')]) def test_manager_and_admin_mail_prefix(self): """ String prefix + lazy translated subject = bad output Regression for #13494 """ mail_managers(ugettext_lazy('Subject'), 'Content') message = self.get_the_message() self.assertEqual(message.get('subject'), '[Django] Subject') self.flush_mailbox() mail_admins(ugettext_lazy('Subject'), 'Content') message = self.get_the_message() self.assertEqual(message.get('subject'), '[Django] Subject') @override_settings(ADMINS=(), MANAGERS=()) def test_empty_admins(self): """ Test that mail_admins/mail_managers doesn't connect to the mail server if there are no recipients (#9383) """ mail_admins('hi', 'there') self.assertEqual(self.get_mailbox_content(), []) mail_managers('hi', 'there') self.assertEqual(self.get_mailbox_content(), []) def test_message_cc_header(self): """ Regression test for #7722 """ email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], cc=['[email protected]']) mail.get_connection().send_messages([email]) message = self.get_the_message() self.assertStartsWith(message.as_string(), 'Content-Type: text/plain; charset="utf-8"\nMIME-Version: 1.0\nContent-Transfer-Encoding: 7bit\nSubject: Subject\nFrom: [email protected]\nTo: [email protected]\nCc: [email protected]\nDate: ') def test_idn_send(self): """ Regression test for #14301 """ self.assertTrue(send_mail('Subject', 'Content', 'from@öäü.com', ['to@öäü.com'])) message = self.get_the_message() self.assertEqual(message.get('subject'), 'Subject') self.assertEqual(message.get('from'), '[email protected]') self.assertEqual(message.get('to'), '[email protected]') self.flush_mailbox() m = EmailMessage('Subject', 'Content', 'from@öäü.com', ['to@öäü.com'], cc=['cc@öäü.com']) m.send() message = self.get_the_message() self.assertEqual(message.get('subject'), 'Subject') self.assertEqual(message.get('from'), '[email protected]') self.assertEqual(message.get('to'), '[email protected]') self.assertEqual(message.get('cc'), '[email protected]') def test_recipient_without_domain(self): """ Regression test for #15042 """ self.assertTrue(send_mail("Subject", "Content", "tester", ["django"])) message = self.get_the_message() self.assertEqual(message.get('subject'), 'Subject') self.assertEqual(message.get('from'), "tester") self.assertEqual(message.get('to'), "django") def test_close_connection(self): """ Test that connection can be closed (even when not explicitely opened) """ conn = mail.get_connection(username='', password='') try: conn.close() except Exception as e: self.fail("close() unexpectedly raised an exception: %s" % e) class LocmemBackendTests(BaseEmailBackendTests, TestCase): email_backend = 'django.core.mail.backends.locmem.EmailBackend' def get_mailbox_content(self): return [m.message() for m in mail.outbox] def flush_mailbox(self): mail.outbox = [] def tearDown(self): super(LocmemBackendTests, self).tearDown() mail.outbox = [] def test_locmem_shared_messages(self): """ Make sure that the locmen backend populates the outbox. """ connection = locmem.EmailBackend() connection2 = locmem.EmailBackend() email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) connection.send_messages([email]) connection2.send_messages([email]) self.assertEqual(len(mail.outbox), 2) def test_validate_multiline_headers(self): # Ticket #18861 - Validate emails when using the locmem backend with self.assertRaises(BadHeaderError): send_mail('Subject\nMultiline', 'Content', '[email protected]', ['[email protected]']) class FileBackendTests(BaseEmailBackendTests, TestCase): email_backend = 'django.core.mail.backends.filebased.EmailBackend' def setUp(self): super(FileBackendTests, self).setUp() self.tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.tmp_dir) self._settings_override = override_settings(EMAIL_FILE_PATH=self.tmp_dir) self._settings_override.enable() def tearDown(self): self._settings_override.disable() super(FileBackendTests, self).tearDown() def flush_mailbox(self): for filename in os.listdir(self.tmp_dir): os.unlink(os.path.join(self.tmp_dir, filename)) def get_mailbox_content(self): messages = [] for filename in os.listdir(self.tmp_dir): with open(os.path.join(self.tmp_dir, filename), 'r') as fp: session = force_text(fp.read()).split('\n' + ('-' * 79) + '\n') messages.extend(email.message_from_string(force_str(m)) for m in session if m) return messages def test_file_sessions(self): """Make sure opening a connection creates a new file""" msg = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'}) connection = mail.get_connection() connection.send_messages([msg]) self.assertEqual(len(os.listdir(self.tmp_dir)), 1) with open(os.path.join(self.tmp_dir, os.listdir(self.tmp_dir)[0])) as fp: message = email.message_from_file(fp)<|fim▁hole|> connection2 = mail.get_connection() connection2.send_messages([msg]) self.assertEqual(len(os.listdir(self.tmp_dir)), 2) connection.send_messages([msg]) self.assertEqual(len(os.listdir(self.tmp_dir)), 2) msg.connection = mail.get_connection() self.assertTrue(connection.open()) msg.send() self.assertEqual(len(os.listdir(self.tmp_dir)), 3) msg.send() self.assertEqual(len(os.listdir(self.tmp_dir)), 3) connection.close() class ConsoleBackendTests(BaseEmailBackendTests, TestCase): email_backend = 'django.core.mail.backends.console.EmailBackend' def setUp(self): super(ConsoleBackendTests, self).setUp() self.__stdout = sys.stdout self.stream = sys.stdout = StringIO() def tearDown(self): del self.stream sys.stdout = self.__stdout del self.__stdout super(ConsoleBackendTests, self).tearDown() def flush_mailbox(self): self.stream = sys.stdout = StringIO() def get_mailbox_content(self): messages = force_text(self.stream.getvalue()).split('\n' + ('-' * 79) + '\n') return [email.message_from_string(force_str(m)) for m in messages if m] def test_console_stream_kwarg(self): """ Test that the console backend can be pointed at an arbitrary stream. """ s = StringIO() connection = mail.get_connection('django.core.mail.backends.console.EmailBackend', stream=s) send_mail('Subject', 'Content', '[email protected]', ['[email protected]'], connection=connection) self.assertTrue(s.getvalue().startswith('Content-Type: text/plain; charset="utf-8"\nMIME-Version: 1.0\nContent-Transfer-Encoding: 7bit\nSubject: Subject\nFrom: [email protected]\nTo: [email protected]\nDate: ')) class FakeSMTPServer(smtpd.SMTPServer, threading.Thread): """ Asyncore SMTP server wrapped into a thread. Based on DummyFTPServer from: http://svn.python.org/view/python/branches/py3k/Lib/test/test_ftplib.py?revision=86061&view=markup """ def __init__(self, *args, **kwargs): threading.Thread.__init__(self) smtpd.SMTPServer.__init__(self, *args, **kwargs) self._sink = [] self.active = False self.active_lock = threading.Lock() self.sink_lock = threading.Lock() def process_message(self, peer, mailfrom, rcpttos, data): m = email.message_from_string(data) if PY3: maddr = email.utils.parseaddr(m.get('from'))[1] else: maddr = email.Utils.parseaddr(m.get('from'))[1] if mailfrom != maddr: return "553 '%s' != '%s'" % (mailfrom, maddr) with self.sink_lock: self._sink.append(m) def get_sink(self): with self.sink_lock: return self._sink[:] def flush_sink(self): with self.sink_lock: self._sink[:] = [] def start(self): assert not self.active self.__flag = threading.Event() threading.Thread.start(self) self.__flag.wait() def run(self): self.active = True self.__flag.set() while self.active and asyncore.socket_map: with self.active_lock: asyncore.loop(timeout=0.1, count=1) asyncore.close_all() def stop(self): if self.active: self.active = False self.join() class SMTPBackendTests(BaseEmailBackendTests, TestCase): email_backend = 'django.core.mail.backends.smtp.EmailBackend' @classmethod def setUpClass(cls): cls.server = FakeSMTPServer(('127.0.0.1', 0), None) cls._settings_override = override_settings( EMAIL_HOST="127.0.0.1", EMAIL_PORT=cls.server.socket.getsockname()[1]) cls._settings_override.enable() cls.server.start() @classmethod def tearDownClass(cls): cls._settings_override.disable() cls.server.stop() def setUp(self): super(SMTPBackendTests, self).setUp() self.server.flush_sink() def tearDown(self): self.server.flush_sink() super(SMTPBackendTests, self).tearDown() def flush_mailbox(self): self.server.flush_sink() def get_mailbox_content(self): return self.server.get_sink() @override_settings(EMAIL_HOST_USER="not empty username", EMAIL_HOST_PASSWORD="not empty password") def test_email_authentication_use_settings(self): backend = smtp.EmailBackend() self.assertEqual(backend.username, 'not empty username') self.assertEqual(backend.password, 'not empty password') @override_settings(EMAIL_HOST_USER="not empty username", EMAIL_HOST_PASSWORD="not empty password") def test_email_authentication_override_settings(self): backend = smtp.EmailBackend(username='username', password='password') self.assertEqual(backend.username, 'username') self.assertEqual(backend.password, 'password') @override_settings(EMAIL_HOST_USER="not empty username", EMAIL_HOST_PASSWORD="not empty password") def test_email_disabled_authentication(self): backend = smtp.EmailBackend(username='', password='') self.assertEqual(backend.username, '') self.assertEqual(backend.password, '') def test_server_stopped(self): """ Test that closing the backend while the SMTP server is stopped doesn't raise an exception. """ backend = smtp.EmailBackend(username='', password='') backend.open() self.server.stop() try: backend.close() except Exception as e: self.fail("close() unexpectedly raised an exception: %s" % e)<|fim▁end|>
self.assertEqual(message.get_content_type(), 'text/plain') self.assertEqual(message.get('subject'), 'Subject') self.assertEqual(message.get('from'), '[email protected]') self.assertEqual(message.get('to'), '[email protected]')
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Configure jemalloc as the `global_allocator` when configured. This is // so that we use the sized deallocation apis jemalloc provides // (namely `sdallocx`). // // The symbol overrides documented below are also performed so that we can // ensure that we use a consistent allocator across the rustc <-> llvm boundary #[cfg(feature = "jemalloc")] #[global_allocator] static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; #[cfg(feature = "tikv-jemalloc-sys")] use tikv_jemalloc_sys as jemalloc_sys; fn main() { // Pull in jemalloc when enabled. // // Note that we're pulling in a static copy of jemalloc which means that to // pull it in we need to actually reference its symbols for it to get // linked. The two crates we link to here, std and rustc_driver, are both // dynamic libraries. That means to pull in jemalloc we actually need to // reference allocation symbols one way or another (as this file is the only // object code in the rustc executable). #[cfg(feature = "tikv-jemalloc-sys")] { use std::os::raw::{c_int, c_void}; #[used] static _F1: unsafe extern "C" fn(usize, usize) -> *mut c_void = jemalloc_sys::calloc; #[used] static _F2: unsafe extern "C" fn(*mut *mut c_void, usize, usize) -> c_int = jemalloc_sys::posix_memalign; #[used] static _F3: unsafe extern "C" fn(usize, usize) -> *mut c_void = jemalloc_sys::aligned_alloc; #[used] static _F4: unsafe extern "C" fn(usize) -> *mut c_void = jemalloc_sys::malloc; #[used] static _F5: unsafe extern "C" fn(*mut c_void, usize) -> *mut c_void = jemalloc_sys::realloc; #[used] static _F6: unsafe extern "C" fn(*mut c_void) = jemalloc_sys::free; // On OSX, jemalloc doesn't directly override malloc/free, but instead // registers itself with the allocator's zone APIs in a ctor. However, // the linker doesn't seem to consider ctors as "used" when statically // linking, so we need to explicitly depend on the function. #[cfg(target_os = "macos")] { extern "C" { fn _rjem_je_zone_register(); }<|fim▁hole|> } } rustc_driver::set_sigpipe_handler(); rustc_driver::main() }<|fim▁end|>
#[used] static _F7: unsafe extern "C" fn() = _rjem_je_zone_register;
<|file_name|>ChangeGGIBoundary.py<|end_file_name|><|fim▁begin|>""" Application-class that implements pyFoamChangeGGIBoundary.py Modification of GGI and cyclicGGI interface parameters in constant/polymesh/boundary file. Author: Martin Beaudoin, Hydro-Quebec, 2009. All rights reserved """ from PyFoam.Applications.PyFoamApplication import PyFoamApplication from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile from PyFoam.ThirdParty.six import print_ from os import path import sys import re class ChangeGGIBoundary(PyFoamApplication): def __init__(self,args=None): description="""\ Change GGI boundary condition parameters """ PyFoamApplication.__init__(self, args=args, description=description, usage="%prog <caseDirectory> ggiPatchName", interspersed=True, changeVersion=False, nr=2) def addOptions(self): self.parser.add_option("--shadowPatch", action="store", dest="shadowPatch", default=None, help='Name of the shadowPatch') self.parser.add_option("--shadowName", action="store", dest="shadowName", default=None, help='Name of the shadowPatch. Deprecated. Use --shadowPatch instead') self.parser.add_option("--zone", action="store", dest="zone", default=None, help='Name of the zone for the GGI patch') self.parser.add_option("--patchZoneName", action="store", dest="patchZoneName", default=None, help='Name of the zone for the GGI patch. Deprecated. Use --zone instead') self.parser.add_option("--bridgeOverlap", action="store", dest="bridgeOverlap", default=None, help='bridgeOverlap flag (on/off)') self.parser.add_option("--bridgeOverlapFlag", action="store", dest="bridgeOverlapFlag", default=None, help='bridgeOverlap flag (on/off). Deprecated. Use --bridgeOverlap instead') self.parser.add_option("--rotationAxis", action="store", dest="rotationAxis", default=None, help='rotation axis for cyclicGgi') self.parser.add_option("--rotationAngle", action="store", dest="rotationAngle", default=None, help='rotation axis angle for cyclicGgi') self.parser.add_option("--separationOffset", action="store", dest="separationOffset", default=None, help='separation offset for cyclicGgi') self.parser.add_option("--test", action="store_true", default=False, dest="test", help="Only print the new boundary file") def run(self): fName=self.parser.getArgs()[0] bName=self.parser.getArgs()[1] boundary=ParsedParameterFile(path.join(".",fName,"constant","polyMesh","boundary"),debug=False,boundaryDict=True) bnd=boundary.content if type(bnd)!=list: self.error("Problem with boundary file (not a list)") found=False for val in bnd: if val==bName: found=True elif found: bcType=val["type"] if re.match("cyclicGgi", bcType)!= None or re.match("ggi", bcType)!= None: if self.parser.getOptions().shadowPatch!=None: shadowPatch=self.parser.getOptions().shadowPatch val["shadowPatch"]=shadowPatch if shadowPatch not in bnd: self.error("\n Option --shadowPatch for patch:",bName,": there is no patch called",shadowPatch,"\n") if self.parser.getOptions().zone!=None: val["zone"]=self.parser.getOptions().zone if self.parser.getOptions().bridgeOverlap!=None: val["bridgeOverlap"]=self.parser.getOptions().bridgeOverlap if val["type"]=="cyclicGgi": if self.parser.getOptions().rotationAxis!=None: val["rotationAxis"]=self.parser.getOptions().rotationAxis <|fim▁hole|> val["separationOffset"]=self.parser.getOptions().separationOffset # Deprecated if self.parser.getOptions().shadowName!=None: self.warning("\n PatchName:",bName,": Option --shadowName is deprecated. Use --shadowPatch instead\n") shadowName=self.parser.getOptions().shadowName val["shadowPatch"]=shadowName if shadowName not in bnd: self.error("\n Option --shadowName for patch:",bName,": there is no patch called",shadowName,"\n") # Deprecated if self.parser.getOptions().patchZoneName!=None: self.warning("\n PatchName:",bName,": Option --patchZoneName is deprecated. Use --zone instead\n") val["zone"]=self.parser.getOptions().patchZoneName # Deprecated if self.parser.getOptions().bridgeOverlapFlag!=None: self.warning("\n PatchName:",bName,": Option --bridgeOverlapFlag is deprecated. Use --bridgeOverlap instead\n") val["bridgeOverlap"]=self.parser.getOptions().bridgeOverlapFlag else: print_("Unsupported GGI type '",bcType,"' for patch",bName) break if not found: self.error("Boundary",bName,"not found in",bnd[::2]) if self.parser.getOptions().test: print_(boundary) else: boundary.writeFile()<|fim▁end|>
if self.parser.getOptions().rotationAngle!=None: val["rotationAngle"]=self.parser.getOptions().rotationAngle if self.parser.getOptions().separationOffset!=None:
<|file_name|>plot_ticpe_accises.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on Mon Aug 17 15:48:31 2015 @author: thomas.douenne """ # L'objectif est de décrire l'évolution des montants des accises de la TICPE depuis 1993 # Import de fonctions spécifiques à Openfisca Indirect Taxation<|fim▁hole|># Recherche des paramètres de la législation liste = ['ticpe_gazole', 'ticpe_super9598', 'super_plombe_ticpe'] df_accises = get_accise_ticpe_majoree() # Réalisation des graphiques graph_builder_bar_list(df_accises['accise majoree sans plomb'], 1, 1) graph_builder_bar_list(df_accises['accise majoree diesel'], 1, 1) graph_builder_bar_list(df_accises['accise majoree super plombe'], 1, 1)<|fim▁end|>
from openfisca_france_indirect_taxation.examples.utils_example import graph_builder_bar_list from openfisca_france_indirect_taxation.examples.dataframes_from_legislation.get_accises import \ get_accise_ticpe_majoree
<|file_name|>SampleWindow.js<|end_file_name|><|fim▁begin|>/* * Sample Window */ function SampleWindow(navController){ /* * essentials */ var win = Titanium.UI.createWindow({ top: ULA_WIN_TOP, title: 'Sample Window',<|fim▁hole|> var rootView = Titanium.UI.createView({ top: 10, bottom: 10, left: 10, right: 10, backgroundColor: 'blue', layout: 'vertical' }); /* * components */ var lblTitle = Titanium.UI.createLabel({ text: 'This is Sample window', color: 'black', font: ULA_FONT_A, top: 100 }); var btnClose =Titanium.UI.createButton({ title: 'Close', color: 'black', height: 60, width: 100, top: 20, }); btnClose.addEventListener('click',function(e){ _c('btnClosed clicked'); navController.back(1); }); /* * heirarchy */ rootView.add(lblTitle); win.add(rootView); win.rightNavButton = btnClose; return win; } module.exports = SampleWindow;<|fim▁end|>
backgroundColor: 'red' });
<|file_name|>ptr_as_ptr.rs<|end_file_name|><|fim▁begin|>// run-rustfix #![warn(clippy::ptr_as_ptr)] #![feature(custom_inner_attributes)] fn main() {<|fim▁hole|> let _ = ptr as *const i32; let _ = mut_ptr as *mut i32; // Make sure the lint can handle the difference in their operator precedences. unsafe { let ptr_ptr: *const *const u32 = &ptr; let _ = *ptr_ptr as *const i32; } // Changes in mutability. Do not lint this. let _ = ptr as *mut i32; let _ = mut_ptr as *const i32; // `pointer::cast` cannot perform unsized coercions unlike `as`. Do not lint this. let ptr_of_array: *const [u32; 4] = &[1, 2, 3, 4]; let _ = ptr_of_array as *const [u32]; let _ = ptr_of_array as *const dyn std::fmt::Debug; // Ensure the lint doesn't produce unnecessary turbofish for inferred types. let _: *const i32 = ptr as *const _; let _: *mut i32 = mut_ptr as _; } fn _msrv_1_37() { #![clippy::msrv = "1.37"] let ptr: *const u32 = &42_u32; let mut_ptr: *mut u32 = &mut 42_u32; // `pointer::cast` was stabilized in 1.38. Do not lint this let _ = ptr as *const i32; let _ = mut_ptr as *mut i32; } fn _msrv_1_38() { #![clippy::msrv = "1.38"] let ptr: *const u32 = &42_u32; let mut_ptr: *mut u32 = &mut 42_u32; let _ = ptr as *const i32; let _ = mut_ptr as *mut i32; }<|fim▁end|>
let ptr: *const u32 = &42_u32; let mut_ptr: *mut u32 = &mut 42_u32;
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os from setuptools import setup README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-tvdb', version='0.1',<|fim▁hole|> license='The MIT License: http://www.opensource.org/licenses/mit-license.php', description='A simple Django app for TV channels DB.', long_description=README, author='Maksym Sokolsky', author_email='[email protected]', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], )<|fim▁end|>
packages=['tvdb'], include_package_data=True,
<|file_name|>media_editing.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ ################################################################################ # # # media_editing # # # ################################################################################ # # # LICENCE INFORMATION # # # # This program provides media editing utilities. # # # # copyright (C) 2018 Will Breaden Madden, [email protected] # # # # This software is released under the terms of the GNU General Public License # # version 3 (GPLv3). # # # # This program is free software: you can redistribute it and/or modify it # # under the terms of the GNU General Public License as published by the Free # # Software Foundation, either version 3 of the License, or (at your option) # # any later version. # # #<|fim▁hole|># FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # # more details. # # # # For a copy of the GNU General Public License, see # # <http://www.gnu.org/licenses/>. # # # ################################################################################ """<|fim▁end|>
# This program is distributed in the hope that it will be useful, but WITHOUT # # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
<|file_name|>test_artificial_32_Anscombe_PolyTrend_7__0.py<|end_file_name|><|fim▁begin|>import pyaf.Bench.TS_datasets as tsds<|fim▁hole|> art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 7, transform = "Anscombe", sigma = 0.0, exog_count = 0, ar_order = 0);<|fim▁end|>
import tests.artificial.process_artificial_dataset as art
<|file_name|>gather.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" "io/ioutil" "os" "path/filepath" "strings" "crypto/md5" "bytes" "compress/gzip" "encoding/json" "log" "github.com/rakyll/magicmime" "github.com/garyburd/redigo/redis" ) var global_hostname string type FileStat struct { Path string Hash string Size int64 Hostname string } func check(err error) { if err != nil { log.Fatal(err) } } func WriteToRedis(key string, value []byte) { // fmt.Printf("wrote key='%s' to tcp:6379\n", key) fmt.Printf(".") conn, err := redis.Dial("tcp", ":6379") check(err) defer conn.Close() _, err = conn.Do("SET", key, value) check(err) } func WriteToRedisCompressed(key string, value []byte) { var b bytes.Buffer w := gzip.NewWriter(&b) w.Write(value) w.Close() WriteToRedis(key, b.Bytes()) } func GrabWalk(dirPath string) { fullPath, err := filepath.Abs(dirPath) check(err) callback := func(path string, fi os.FileInfo, err error) error { if strings.Contains(path, ".git") { return nil<|fim▁hole|> } file_md5sum, fileStatJson, file_contents := getFileInfo(path, fi) hostname_key := fmt.Sprintf("%s:%s", global_hostname, path) WriteToRedis(hostname_key, fileStatJson) WriteToRedisCompressed(file_md5sum, file_contents) return nil } filepath.Walk(fullPath, callback) return } func getFileInfo(path string, fi os.FileInfo) (string, []byte, []byte) { var empty_bytes []byte var empty_string string; if fi.IsDir() { return empty_string, empty_bytes, empty_bytes } mm, _ := magicmime.New(magicmime.MAGIC_MIME_TYPE | magicmime.MAGIC_SYMLINK | magicmime.MAGIC_ERROR) mimetype, _ := mm.TypeByFile(path) if strings.Contains(mimetype, "application"){ return empty_string, empty_bytes, empty_bytes } file, _ := os.Open(path) defer file.Close() file_contents, _ := ioutil.ReadFile(path) file_md5sum := fmt.Sprintf("%x", md5.Sum(file_contents)) file_size := fi.Size() fs := FileStat{ Path: path, Hash: file_md5sum, Size: file_size, Hostname: global_hostname, } fileStatJson, _ := json.Marshal(fs) return file_md5sum, fileStatJson, file_contents } func main() { global_hostname, _ = os.Hostname() GrabWalk("/etc/") fmt.Printf("\nDone\n") }<|fim▁end|>
<|file_name|>comm.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright (c) 2015 Intel Corporation. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of works must retain the original copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the original copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of Intel Corporation nor the names of its contributors # may be used to endorse or promote products derived from this work without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT, # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY # OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, # EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: # Hongjuan, Wang<[email protected]> # Yun, Liu<[email protected]> import os import sys import commands import shutil import urllib2 SCRIPT_PATH = os.path.realpath(__file__) ConstPath = os.path.dirname(SCRIPT_PATH) def setUp(): global device, XwalkPath, crosswalkVersion, PackTools, ARCH, cachedir #device = "E6OKCY411012" device = os.environ.get('DEVICE_ID') cachedir = os.environ.get('CROSSWALK_APP_TOOLS_CACHE_DIR') if not device: print ("Get env error\n") sys.exit(1) fp = open(ConstPath + "/../arch.txt", 'r') if fp.read().strip("\n\t") != "x86": ARCH = "arm" else: ARCH = "x86" fp.close() vp = open(ConstPath + "/../version.txt", 'r') crosswalkVersion = vp.read().strip("\n\t") vp.close() PackTools = ConstPath + "/../tools/crosswalk-app-tools/src/" XwalkPath = ConstPath + "/../tools/" if "crosswalk-app-tools" not in os.listdir(XwalkPath): print "Please check if the crosswalk-app-tools exists in " + ConstPath + "/../tools/" sys.exit(1) elif "crosswalk-app-tools" in os.listdir(XwalkPath) and len(os.listdir(XwalkPath)) < 2: print "Please check if the Crosswalk Binary exists in " + ConstPath + "/../tools/" sys.exit(1) def clear(pkg): os.chdir(XwalkPath) if os.path.exists(ConstPath + "/../tools/" + pkg): try: shutil.rmtree(XwalkPath + pkg) except Exception as e: os.system("rm -rf " + XwalkPath + pkg + " &>/dev/null") def create(self): clear("org.xwalk.test") setUp() os.chdir(XwalkPath) cmd = PackTools + \ "crosswalk-app create org.xwalk.test --android-crosswalk=" + \ crosswalkVersion packstatus = commands.getstatusoutput(cmd) self.assertEquals(packstatus[0], 0) self.assertIn("org.xwalk.test", os.listdir(os.getcwd())) def build(self, cmd): buildstatus = commands.getstatusoutput(cmd) self.assertEquals(buildstatus[0], 0) self.assertIn("pkg", os.listdir(XwalkPath + "org.xwalk.test")) os.chdir('pkg') apks = os.listdir(os.getcwd()) self.assertNotEquals(len(apks), 0) for i in range(len(apks)): self.assertTrue(apks[i].endswith(".apk")) if "x86" in apks[i]: self.assertIn("x86", apks[i]) if i < len(os.listdir(os.getcwd())): self.assertIn("arm", apks[i - 1]) else: self.assertIn("arm", apks[i + 1]) elif "arm" in apks[i]: self.assertIn("arm", apks[i]) if i < len(os.listdir(os.getcwd())): self.assertIn("x86", apks[i - 1]) else: self.assertIn("x86", apks[i + 1]) def update(self, cmd): updatestatus = commands.getstatusoutput(cmd) self.assertEquals(updatestatus[0], 0) self.assertNotIn("ERROR:", updatestatus[1]) version = updatestatus[1].split('\n')[-1].split(' ')[-1][1:-1] if not cachedir: namelist = os.listdir(os.getcwd()) else: newcachedir = os.environ.get('CROSSWALK_APP_TOOLS_CACHE_DIR') os.chdir(newcachedir) namelist = os.listdir(os.getcwd()) os.chdir(XwalkPath + 'org.xwalk.test') crosswalk = 'crosswalk-{}.zip'.format(version) self.assertIn(crosswalk, namelist) return version def run(self): setUp() apks = os.listdir(os.getcwd()) for apk in apks: if ARCH in apk: inststatus = commands.getstatusoutput( 'adb -s ' + device + ' install -r ' + os.getcwd() + '/' + apk) # print inststatus self.assertEquals(inststatus[0], 0) self.assertIn("Success", inststatus[1]) pmstatus = commands.getstatusoutput( 'adb -s ' + device + ' shell pm list package |grep org.xwalk.test') self.assertEquals(pmstatus[0], 0) launstatus = commands.getstatusoutput( 'adb -s ' + device + ' shell am start -n org.xwalk.test/.TestActivity') self.assertEquals(launstatus[0], 0) stopstatus = commands.getstatusoutput( 'adb -s ' + device + ' shell am force-stop org.xwalk.test') self.assertEquals(stopstatus[0], 0) uninstatus = commands.getstatusoutput( 'adb -s ' + device + ' uninstall org.xwalk.test') self.assertEquals(uninstatus[0], 0) def channel(self, channel): createcmd = PackTools + \ "crosswalk-app create org.xwalk.test --android-crosswalk=" + channel packstatus = commands.getstatusoutput(createcmd) self.assertEquals(packstatus[0], 0) self.assertIn(channel, packstatus[1]) crosswalklist = urllib2.urlopen( 'https://download.01.org/crosswalk/releases/crosswalk/android/' + channel + '/').read() fp = open('test', 'w') fp.write(crosswalklist) fp.close() line = commands.getstatusoutput( "cat test|sed -n '/src\=\"\/icons\/folder.gif\"/=' |sed -n '$p'")[1].strip() cmd = "cat test |sed -n '%dp' |awk -F 'href=' '{print $2}' |awk -F '\"|/' '{print $2}'" % int(<|fim▁hole|> version = commands.getstatusoutput(cmd)[1] if not '.' in version: line = commands.getstatusoutput( "tac test|sed -n '/src\=\"\/icons\/folder.gif\"/=' |sed -n '2p'")[1].strip() cmd = "tac test |sed -n '%dp' |awk -F 'href=' '{print $2}' |awk -F '\"|/' '{print $2}'" % int( line) version = commands.getstatusoutput(cmd)[1] commands.getstatusoutput("rm -rf test") crosswalk = 'crosswalk-{}.zip'.format(version) namelist = os.listdir(os.getcwd()) self.assertIn(crosswalk, namelist)<|fim▁end|>
line)
<|file_name|>UnitAnimEvent.java<|end_file_name|><|fim▁begin|>package openra.server; import openra.core.Unit; public class UnitAnimEvent extends ActionEvent { private Unit un; private UnitAnimEvent scheduled; public UnitAnimEvent(int p, Unit un) { super(p); //logger->debug("UAE cons: this:%p un:%p\n",this,un); this.un = un; //un.referTo(); scheduled = null; } void destUnitAnimEvent() { //logger->debug("UAE dest: this:%p un:%p sch:%p\n",this,un,scheduled); if (scheduled != null) { this.getAequeue().scheduleEvent(scheduled); } //un->unrefer(); } protected void setSchedule(UnitAnimEvent e) { //logger->debug("Scheduling an event. (this: %p, e: %p)\n",this,e); if (scheduled != null) { scheduled.setSchedule(null); scheduled.stop(); } scheduled = e; } void stopScheduled() { if (scheduled != null) { scheduled.stop(); <|fim▁hole|> void update() { } }<|fim▁end|>
} }
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2 # test.py # nroberts 04/10/2017 # Instead of lighting up a bridge, we light up the terminal from tennis_show import TennisShow import current_bridge from threading import Thread import Queue from colors import Colors thread_continuing = True class OutQueue: def put(self, event): print "Put in outqueue: %s" % str(event) def main(bridge): global thread_continuing print "Usage: Press 1 for player 1 swing, 2 for player 2 swing (followed by Enter)" print "To quit, press Ctrl+C and then Enter" inqueue = Queue.Queue() outqueue = OutQueue() show = TennisShow(bridge(), inqueue=inqueue, outqueue=outqueue) def cause_problems(): global thread_continuing while thread_continuing: inp = raw_input() if inp == "r": inqueue.put(("game_reset", None)) continue try: x = int(inp[0]) if len(inp) > 1: if inp[1] == "s": inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.RED })) elif inp[1] == "t": inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.GREEN })) elif inp[1] == "c": inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.PURPLE })) elif inp[1] == "x": inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.SKY_BLUE })) else: inqueue.put(("game_swing", { "player_num": x, "hand": 1, "strength": 1.0 })) except: pass # put something new on the inqueue every 10 seconds thread = Thread(target = cause_problems)<|fim▁hole|> # run the show try: show.run(framerate=40) finally: thread_continuing = False if __name__ == "__main__": main(current_bridge.bridge)<|fim▁end|>
thread.start()
<|file_name|>project_tree.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright (C) Pootle contributors. # # This file is a part of the Pootle project. It is distributed under the GPL3 # or later license. See the LICENSE file for a copy of the license and the # AUTHORS file for copyright and authorship information. import errno import logging import os import re from django.conf import settings from pootle.core.log import STORE_RESURRECTED, store_log from pootle.core.utils.timezone import datetime_min from pootle_app.models.directory import Directory from pootle_language.models import Language from pootle_store.models import Store from pootle_store.util import absolute_real_path, relative_real_path #: Case insensitive match for language codes LANGCODE_RE = re.compile('^[a-z]{2,3}([_-][a-z]{2,3})?(@[a-z0-9]+)?$', re.IGNORECASE) #: Case insensitive match for language codes as postfix LANGCODE_POSTFIX_RE = re.compile( '^.*?[-_.]([a-z]{2,3}([_-][a-z]{2,3})?(@[a-z0-9]+)?)$', re.IGNORECASE) def direct_language_match_filename(language_code, path_name): name, ext = os.path.splitext(os.path.basename(path_name)) if name == language_code or name.lower() == language_code.lower(): return True # Check file doesn't match another language. if Language.objects.filter(code__iexact=name).count(): return False detect = LANGCODE_POSTFIX_RE.split(name) return (len(detect) > 1 and (detect[1] == language_code or detect[1].lower() == language_code.lower())) def match_template_filename(project, filename): """Test if :param:`filename` might point at a template file for a given :param:`project`. """ name, ext = os.path.splitext(os.path.basename(filename)) # FIXME: is the test for matching extension redundant? if ext == os.path.extsep + project.get_template_filetype(): if ext != os.path.extsep + project.localfiletype: # Template extension is distinct, surely file is a template. return True elif not find_lang_postfix(filename): # File name can't possibly match any language, assume it is a # template. return True return False def get_matching_language_dirs(project_dir, language): return [lang_dir for lang_dir in os.listdir(project_dir) if language.code == lang_dir] def get_non_existant_language_dir(project_dir, language, file_style, make_dirs): if file_style == "gnu": return project_dir elif make_dirs: language_dir = os.path.join(project_dir, language.code) os.mkdir(language_dir) return language_dir else: raise IndexError("Directory not found for language %s, project %s" % (language.code, project_dir)) def get_or_make_language_dir(project_dir, language, file_style, make_dirs): matching_language_dirs = get_matching_language_dirs(project_dir, language) if len(matching_language_dirs) == 0: # If no matching directories can be found, check if it is a GNU-style # project.<|fim▁hole|> return get_non_existant_language_dir(project_dir, language, file_style, make_dirs) else: return os.path.join(project_dir, matching_language_dirs[0]) def get_language_dir(project_dir, language, file_style, make_dirs): language_dir = os.path.join(project_dir, language.code) if not os.path.exists(language_dir): return get_or_make_language_dir(project_dir, language, file_style, make_dirs) else: return language_dir def get_translation_project_dir(language, project_dir, file_style, make_dirs=False): """Returns the base directory containing translations files for the project. :param make_dirs: if ``True``, project and language directories will be created as necessary. """ if file_style == 'gnu': return project_dir else: return get_language_dir(project_dir, language, file_style, make_dirs) def is_hidden_file(path): return path[0] == '.' def split_files_and_dirs(ignored_files, ext, real_dir, file_filter): files = [] dirs = [] for child_path in [child_path for child_path in os.listdir(real_dir) if child_path not in ignored_files and not is_hidden_file(child_path)]: full_child_path = os.path.join(real_dir, child_path) if (os.path.isfile(full_child_path) and full_child_path.endswith(ext) and file_filter(full_child_path)): files.append(child_path) elif os.path.isdir(full_child_path): dirs.append(child_path) return files, dirs def add_items(fs_items_set, db_items, create_or_resurrect_db_item, parent): """Add/make obsolete the database items to correspond to the filesystem. :param fs_items_set: items (dirs, files) currently in the filesystem :param db_items: dict (name, item) of items (dirs, stores) currently in the database :create_or_resurrect_db_item: callable that will create a new db item or resurrect an obsolete db item with a given name and parent. :parent: parent db directory for the items :return: list of all items, list of newly added items :rtype: tuple """ items = [] new_items = [] db_items_set = set(db_items) items_to_delete = db_items_set - fs_items_set items_to_create = fs_items_set - db_items_set for name in items_to_delete: db_items[name].makeobsolete() if len(items_to_delete) > 0: parent.update_all_cache() for vfolder_treeitem in parent.vfolder_treeitems: vfolder_treeitem.update_all_cache() for name in db_items_set - items_to_delete: items.append(db_items[name]) for name in items_to_create: item = create_or_resurrect_db_item(name) items.append(item) new_items.append(item) try: item.save() except Exception: logging.exception('Error while adding %s', item) return items, new_items def create_or_resurrect_store(file, parent, name, translation_project): """Create or resurrect a store db item with given name and parent.""" try: store = Store.objects.get(parent=parent, name=name) store.obsolete = False store.file_mtime = datetime_min if store.last_sync_revision is None: store.last_sync_revision = store.get_max_unit_revision() store_log(user='system', action=STORE_RESURRECTED, path=store.pootle_path, store=store.id) except Store.DoesNotExist: store = Store(file=file, parent=parent, name=name, translation_project=translation_project) store.mark_all_dirty() return store def create_or_resurrect_dir(name, parent): """Create or resurrect a directory db item with given name and parent.""" try: dir = Directory.objects.get(parent=parent, name=name) dir.obsolete = False except Directory.DoesNotExist: dir = Directory(name=name, parent=parent) dir.mark_all_dirty() return dir # TODO: rename function or even rewrite it def add_files(translation_project, ignored_files, ext, relative_dir, db_dir, file_filter=lambda _x: True): podir_path = to_podir_path(relative_dir) files, dirs = split_files_and_dirs(ignored_files, ext, podir_path, file_filter) file_set = set(files) dir_set = set(dirs) existing_stores = dict((store.name, store) for store in db_dir.child_stores.live().exclude(file='') .iterator()) existing_dirs = dict((dir.name, dir) for dir in db_dir.child_dirs.live().iterator()) files, new_files = add_items( file_set, existing_stores, lambda name: create_or_resurrect_store( file=os.path.join(relative_dir, name), parent=db_dir, name=name, translation_project=translation_project, ), db_dir, ) db_subdirs, new_db_subdirs = add_items( dir_set, existing_dirs, lambda name: create_or_resurrect_dir(name=name, parent=db_dir), db_dir, ) is_empty = len(files) == 0 for db_subdir in db_subdirs: fs_subdir = os.path.join(relative_dir, db_subdir.name) _files, _new_files, _is_empty = \ add_files(translation_project, ignored_files, ext, fs_subdir, db_subdir, file_filter) files += _files new_files += _new_files is_empty &= _is_empty if is_empty: db_dir.makeobsolete() return files, new_files, is_empty def to_podir_path(path): path = relative_real_path(path) return os.path.join(settings.POOTLE_TRANSLATION_DIRECTORY, path) def find_lang_postfix(filename): """Finds the language code at end of a filename.""" name = os.path.splitext(os.path.basename(filename))[0] if LANGCODE_RE.match(name): return name match = LANGCODE_POSTFIX_RE.match(name) if match: return match.groups()[0] for code in Language.objects.values_list('code', flat=True): if (name.endswith('-'+code) or name.endswith('_'+code) or name.endswith('.'+code) or name.lower().endswith('-'+code.lower()) or name.endswith('_'+code) or name.endswith('.'+code)): return code def translation_project_dir_exists(language, project): """Tests if there are translation files corresponding to the given :param:`language` and :param:`project`. """ if project.get_treestyle() == "gnu": # GNU style projects are tricky if language.code == 'templates': # Language is template look for template files for dirpath, dirnames, filenames in os.walk( project.get_real_path()): for filename in filenames: if (project.file_belongs_to_project(filename, match_templates=True) and match_template_filename(project, filename)): return True else: # find files with the language name in the project dir for dirpath, dirnames, filenames in os.walk( project.get_real_path()): for filename in filenames: # FIXME: don't reuse already used file if (project.file_belongs_to_project(filename, match_templates=False) and direct_language_match_filename(language.code, filename)): return True else: # find directory with the language name in the project dir try: dirpath, dirnames, filename = os.walk( project.get_real_path()).next() if language.code in dirnames: return True except StopIteration: pass return False def init_store_from_template(translation_project, template_store): """Initialize a new file for `translation_project` using `template_store`. """ if translation_project.file_style == 'gnu': target_pootle_path, target_path = get_translated_name_gnu( translation_project, template_store) else: target_pootle_path, target_path = get_translated_name( translation_project, template_store) # Create the missing directories for the new TP. target_dir = os.path.dirname(target_path) if not os.path.exists(target_dir): os.makedirs(target_dir) output_file = template_store.file.store output_file.settargetlanguage(translation_project.language.code) output_file.savefile(target_path) def get_translated_name_gnu(translation_project, store): """Given a template :param:`store` and a :param:`translation_project` return target filename. """ pootle_path_parts = store.pootle_path.split('/') pootle_path_parts[1] = translation_project.language.code pootle_path = '/'.join(pootle_path_parts[:-1]) if not pootle_path.endswith('/'): pootle_path = pootle_path + '/' suffix = "%s%s%s" % (translation_project.language.code, os.extsep, translation_project.project.localfiletype) # try loading file first try: target_store = translation_project.stores.live().get( parent__pootle_path=pootle_path, name__iexact=suffix, ) return (target_store.pootle_path, target_store.file and target_store.file.path) except Store.DoesNotExist: target_store = None # is this GNU-style with prefix? use_prefix = (store.parent.child_stores.live().exclude(file="").count() > 1 or translation_project.stores.live().exclude( name__iexact=suffix, file='').count()) if not use_prefix: # let's make sure for tp in translation_project.project.translationproject_set.exclude( language__code='templates').iterator(): temp_suffix = \ "%s%s%s" % (tp.language.code, os.extsep, translation_project.project.localfiletype) if tp.stores.live().exclude( name__iexact=temp_suffix).exclude(file="").count(): use_prefix = True break if use_prefix: if store.translation_project.language.code == 'templates': tprefix = os.path.splitext(store.name)[0] # FIXME: we should detect separator prefix = tprefix + '-' else: prefix = os.path.splitext(store.name)[0][:-len( store.translation_project.language.code)] tprefix = prefix[:-1] try: target_store = translation_project.stores.live().filter( parent__pootle_path=pootle_path, name__in=[ tprefix + '-' + suffix, tprefix + '_' + suffix, tprefix + '.' + suffix, tprefix + '-' + suffix.lower(), tprefix + '_' + suffix.lower(), tprefix + '.' + suffix.lower(), ], )[0] return (target_store.pootle_path, target_store.file and target_store.file.path) except (Store.DoesNotExist, IndexError): pass else: prefix = "" if store.file: path_parts = store.file.path.split(os.sep) name = prefix + suffix path_parts[-1] = name pootle_path_parts[-1] = name else: path_parts = store.parent.get_real_path().split(os.sep) path_parts.append(store.name) return '/'.join(pootle_path_parts), os.sep.join(path_parts) def get_translated_name(translation_project, store): name, ext = os.path.splitext(store.name) if store.file: path_parts = store.file.name.split(os.sep) else: path_parts = store.parent.get_real_path().split(os.sep) path_parts.append(store.name) pootle_path_parts = store.pootle_path.split('/') # Replace language code path_parts[1] = translation_project.language.code pootle_path_parts[1] = translation_project.language.code # Replace extension path_parts[-1] = "%s.%s" % (name, translation_project.project.localfiletype) pootle_path_parts[-1] = \ "%s.%s" % (name, translation_project.project.localfiletype) return ('/'.join(pootle_path_parts), absolute_real_path(os.sep.join(path_parts))) def does_not_exist(path): if os.path.exists(path): return False try: os.stat(path) # what the hell? except OSError as e: if e.errno == errno.ENOENT: # explicit no such file or directory return True<|fim▁end|>
<|file_name|>dispatcher.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # This file is part of the VecNet OpenMalaria Portal.<|fim▁hole|># # This Source Code Form is subject to the terms of the Mozilla Public # License (MPL), version 2.0. If a copy of the MPL was not distributed # with this file, You can obtain one at http://mozilla.org/MPL/2.0/. from django.conf import settings import subprocess import sys import os import logging from website.apps.ts_om.models import Simulation logger = logging.getLogger(__name__) def submit(simulation): logger.debug("dispatcher.submit: simulation id %s" % simulation.id) assert isinstance(simulation, Simulation) base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) executable = sys.executable if hasattr(settings, "PYTHON_EXECUTABLE"): executable = settings.PYTHON_EXECUTABLE run_script_filename = os.path.join(base_dir, "run.py") try: logger.debug("dispatcher.submit: before Popen") p = subprocess.Popen( [executable, run_script_filename, str(simulation.id)], cwd=base_dir, shell=False ) logger.debug("dispatcher.submit: after Popen") except (OSError, IOError) as e: logger.exception("subprocess failed: %s", sys.exc_info()) simulation.status = Simulation.FAILED simulation.last_error_message = "Subprocess failed: %s" % e simulation.pid = "" simulation.save(update_fields=["status", "pid", "last_error_message"]) return None simulation.status = Simulation.QUEUED simulation.pid = str(p.pid) simulation.last_error_message = "" simulation.save(update_fields=["status", "pid", "last_error_message"]) logger.debug("dispatcher.submit: success, PID: %s" % p.pid) return str(p.pid)<|fim▁end|>
# For copyright and licensing information about this package, see the # NOTICE.txt and LICENSE.txt files in its top-level directory; they are # available at https://github.com/vecnet/om
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># This file is part of Indico. # Copyright (C) 2002 - 2022 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. import os from indico.core import signals from indico.core.db import db from .logger import logger<|fim▁hole|> @signals.core.app_created.connect def _no_ssl_required_on_debug(app, **kwargs): if app.debug or app.testing: os.environ['AUTHLIB_INSECURE_TRANSPORT'] = '1' @signals.users.merged.connect def _delete_merged_user_tokens(target, source, **kwargs): target_app_links = {link.application: link for link in target.oauth_app_links} for source_link in source.oauth_app_links.all(): try: target_link = target_app_links[source_link.application] except KeyError: logger.info('merge: reassigning %r to %r', source_link, target) source_link.user = target else: logger.info('merge: merging %r into %r', source_link, target_link) target_link.update_scopes(set(source_link.scopes)) target_link.tokens.extend(source_link.tokens) db.session.delete(source_link)<|fim▁end|>
from .oauth2 import require_oauth __all__ = ['require_oauth']
<|file_name|>file_ssl_cert_key.py<|end_file_name|><|fim▁begin|>from a10sdk.common.A10BaseClass import A10BaseClass class SslCertKey(A10BaseClass): """ :param action: {"optional": true, "enum": ["create", "import", "export", "copy", "rename", "check", "replace", "delete"], "type": "string", "description": "'create': create; 'import': import; 'export': export; 'copy': copy; 'rename': rename; 'check': check; 'replace': replace; 'delete': delete; ", "format": "enum"} :param dst_file: {"description": "destination file name for copy and rename action", "format": "string", "minLength": 1, "optional": true, "maxLength": 32, "type": "string"} :param file_handle: {"description": "full path of the uploaded file", "format": "string-rlx", "minLength": 1, "optional": true, "maxLength": 255, "type": "string"} :param file: {"description": "ssl certificate local file name", "format": "string", "minLength": 1, "optional": true, "maxLength": 255, "type": "string"} :param size: {"description": "ssl certificate file size in byte", "format": "number", "type": "number", "maximum": 2147483647, "minimum": 0, "optional": true} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` Class Description:: ssl certificate and key file information and management commands. Class ssl-cert-key supports CRUD Operations and inherits from `common/A10BaseClass`. This class is the `"PARENT"` class for this module.` URL for this object:: `https://<Hostname|Ip address>//axapi/v3/file/ssl-cert-key`. """ def __init__(self, **kwargs):<|fim▁hole|> self.a10_url="/axapi/v3/file/ssl-cert-key" self.DeviceProxy = "" self.action = "" self.dst_file = "" self.file_handle = "" self.A10WW_file = "" self.size = "" for keys, value in kwargs.items(): setattr(self,keys, value)<|fim▁end|>
self.ERROR_MSG = "" self.required=[] self.b_key = "ssl-cert-key"
<|file_name|>fork.py<|end_file_name|><|fim▁begin|>from pwn.internal.shellcode_helper import * @shellcode_reqs(arch=['i386', 'amd64'], os=['linux', 'freebsd']) def fork(parent, child = None, os = None, arch = None): """Fork this shit.""" if arch == 'i386': if os in ['linux', 'freebsd']: return _fork_i386(parent, child) elif arch == 'amd64': if os in ['linux', 'freebsd']: return _fork_amd64(parent, child) bug('OS/arch combination (%s, %s) was not supported for fork' % (os, arch)) def _fork_amd64(parent, child): code = """ push SYS_fork pop rax syscall test rax, rax jne %s """ % parent if child is not None: code += 'jmp %s\n' % child return code def _fork_i386(parent, child):<|fim▁hole|> code = """ push SYS_fork pop eax int 0x80 test eax, eax jne %s """ % parent if child is not None: code += 'jmp %s\n' % child return code<|fim▁end|>