prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var elixir = require('laravel-elixir');
var gulp = require("gulp");
require('laravel-elixir-wiredep');
/*
|--------------------------------------------------------------------------
| Elixir Asset Management
|--------------------------------------------------------------------------
|
| Elixir provides a clean, fluent API for defining some basic Gulp tasks
| for your Laravel application. By default, we are compiling the Less
| file for our application, as well as publishing vendor resources.
|
*/<|fim▁hole|> bowerDir + "bootstrap/less/"
]
var templatePaths = [
"resources/assets/js/app/components/*/*.html",
];
elixir.extend("templates", function(src, base, dest) {
gulp.task("templates", function () {
// the base option sets the relative root for the set of files,
// preserving the folder structure
gulp.src(src, {base: base})
.pipe(gulp.dest(dest));
});
// Watch each glob in src
for (idx in src){
var glob = src[idx];
this.registerWatcher("templates", glob);
}
return this.queueTask("templates");
});
elixir(function(mix) {
// Complile LESS into CSS
mix.less("main.less", "public/css/", {paths: lessPaths});
// Inject dependencies into layout (except bootstrap css, since that is compiled into main css)
mix.wiredep({src: "master.blade.php"}, {exclude: "vendor/bootstrap/dist/css/bootstrap.css"});
// Combine app js into one file
mix.scriptsIn("resources/assets/js/", "public/js/main.js");
// Copy angular templates to public folder
mix.templates(templatePaths, "resources/assets/js/app/components/", "public");
});<|fim▁end|> | var bowerDir = "public/vendor/";
var lessPaths = [ |
<|file_name|>missing_inline_executable.rs<|end_file_name|><|fim▁begin|>#![warn(clippy::missing_inline_in_public_items)]
<|fim▁hole|>
fn main() {}<|fim▁end|> | pub fn foo() {} |
<|file_name|>_inbound_nat_rules_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class InboundNatRulesOperations:
"""InboundNatRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs: Any
) -> AsyncIterable["_models.InboundNatRuleListResult"]:
"""Gets all the inbound nat rules in a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either InboundNatRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_11_01.models.InboundNatRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.InboundNatRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('InboundNatRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules'} # type: ignore<|fim▁hole|>
async def _delete_initial(
self,
resource_group_name: str,
load_balancer_name: str,
inbound_nat_rule_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'inboundNatRuleName': self._serialize.url("inbound_nat_rule_name", inbound_nat_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules/{inboundNatRuleName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
load_balancer_name: str,
inbound_nat_rule_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified load balancer inbound nat rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param inbound_nat_rule_name: The name of the inbound nat rule.
:type inbound_nat_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
inbound_nat_rule_name=inbound_nat_rule_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'inboundNatRuleName': self._serialize.url("inbound_nat_rule_name", inbound_nat_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules/{inboundNatRuleName}'} # type: ignore
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
inbound_nat_rule_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.InboundNatRule":
"""Gets the specified load balancer inbound nat rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param inbound_nat_rule_name: The name of the inbound nat rule.
:type inbound_nat_rule_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: InboundNatRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_11_01.models.InboundNatRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.InboundNatRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'inboundNatRuleName': self._serialize.url("inbound_nat_rule_name", inbound_nat_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('InboundNatRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules/{inboundNatRuleName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
load_balancer_name: str,
inbound_nat_rule_name: str,
inbound_nat_rule_parameters: "_models.InboundNatRule",
**kwargs: Any
) -> "_models.InboundNatRule":
cls = kwargs.pop('cls', None) # type: ClsType["_models.InboundNatRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'inboundNatRuleName': self._serialize.url("inbound_nat_rule_name", inbound_nat_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(inbound_nat_rule_parameters, 'InboundNatRule')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('InboundNatRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('InboundNatRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules/{inboundNatRuleName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
load_balancer_name: str,
inbound_nat_rule_name: str,
inbound_nat_rule_parameters: "_models.InboundNatRule",
**kwargs: Any
) -> AsyncLROPoller["_models.InboundNatRule"]:
"""Creates or updates a load balancer inbound nat rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param inbound_nat_rule_name: The name of the inbound nat rule.
:type inbound_nat_rule_name: str
:param inbound_nat_rule_parameters: Parameters supplied to the create or update inbound nat
rule operation.
:type inbound_nat_rule_parameters: ~azure.mgmt.network.v2018_11_01.models.InboundNatRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either InboundNatRule or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_11_01.models.InboundNatRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.InboundNatRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
inbound_nat_rule_name=inbound_nat_rule_name,
inbound_nat_rule_parameters=inbound_nat_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('InboundNatRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'inboundNatRuleName': self._serialize.url("inbound_nat_rule_name", inbound_nat_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules/{inboundNatRuleName}'} # type: ignore<|fim▁end|> | |
<|file_name|>slim-loading-bar.service.spec.ts<|end_file_name|><|fim▁begin|>import { inject, fakeAsync, tick, TestBed }
from '@angular/core/testing';
import {SlimLoadingBarService}
from '../src/slim-loading-bar.service';
describe('SlimLoadingBarService', () => {
let service: SlimLoadingBarService;
let providers = [SlimLoadingBarService];
beforeEach(() => {
TestBed.configureTestingModule({
providers: [providers]
});
});
beforeEach(inject([SlimLoadingBarService], (slbs: SlimLoadingBarService) => {
service = slbs;
}));
it('is defined', () => {
expect(SlimLoadingBarService).toBeDefined();
expect(service instanceof SlimLoadingBarService).toBeTruthy();
});
it('starts at zero when just being injected', () => {
expect(service.progress).toBe(0);
});
it('can change the progress to 30 if you call set progress', () => {
service.progress = 30;
expect(service.progress).toBe(30);
});
it('increaments over time after calling start()', <any>fakeAsync((): void => {
// var value, flag;
expect(service.progress).toBe(0);
service.start();
tick(500);
expect(service.progress).toBe(1);
service.stop();
}));<|fim▁hole|> expect(service.progress).toBe(100);
});
it('resets to zero when calling reset() after start() or set()', () => {
service.progress = 30;
service.reset();
expect(service.progress).toBe(0);
});
it('will return 100 after calling complete', () => {
service.progress = 30;
service.complete();
expect(service.progress).toBe(100);
});
it('return current height when calling height() without parameters', () => {
expect(service.height).toBe('2px');
});
it('set the height when calling height() with parameter', () => {
service.height = '5px';
expect(service.height).toBe('5px');
});
it('return current color ', () => {
expect(service.color).toBe('firebrick');
});
it('set the color', () => {
service.color = 'green';
expect(service.color).toBe('green');
});
});<|fim▁end|> |
it('have 100 returned from progress after complete()', () => {
service.start();
service.complete(); |
<|file_name|>builder.go<|end_file_name|><|fim▁begin|>package controllercmd
import (
"fmt"
"github.com/golang/glog"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/leaderelection"
configv1 "github.com/openshift/api/config/v1"
"github.com/openshift/library-go/pkg/config/client"
leaderelectionconverter "github.com/openshift/library-go/pkg/config/leaderelection"
)
// StartFunc is the function to call on leader election start
type StartFunc func(config *rest.Config, stop <-chan struct{}) error
// OperatorBuilder allows the construction of an controller in optional pieces.
type ControllerBuilder struct {
kubeAPIServerConfigFile *string
clientOverrides *client.ClientConnectionOverrides
leaderElection *configv1.LeaderElection
startFunc StartFunc
componentName string
instanceIdentity string
// TODO add serving info, authentication, and authorization
}
// NewController returns a builder struct for constructing the command you want to run
func NewController(componentName string, startFunc StartFunc) *ControllerBuilder {
return &ControllerBuilder{
startFunc: startFunc,
componentName: componentName,
}
}
// WithLeaderElection adds leader election options
func (b *ControllerBuilder) WithLeaderElection(leaderElection configv1.LeaderElection, defaultNamespace, defaultName string) *ControllerBuilder {
if leaderElection.Disable {
return b
}
defaulted := leaderelectionconverter.LeaderElectionDefaulting(leaderElection, defaultNamespace, defaultName)
b.leaderElection = &defaulted
return b
}
// WithKubeConfigFile sets an optional kubeconfig file. inclusterconfig will be used if filename is empty
func (b *ControllerBuilder) WithKubeConfigFile(kubeConfigFilename string, defaults *client.ClientConnectionOverrides) *ControllerBuilder {
b.kubeAPIServerConfigFile = &kubeConfigFilename
b.clientOverrides = defaults
return b
}
// WithInstanceIdentity sets the instance identity to use if you need something special. The default is just a UID which is
// usually fine for a pod.
func (b *ControllerBuilder) WithInstanceIdentity(identity string) *ControllerBuilder {
b.instanceIdentity = identity
return b
}
// Run starts your controller for you. It uses leader election if you asked, otherwise it directly calls you
func (b *ControllerBuilder) Run() error {
clientConfig, err := b.getClientConfig()
if err != nil {
return err
}
if b.leaderElection == nil {
if err := b.startFunc(clientConfig, wait.NeverStop); err != nil {
return err
}<|fim▁hole|> return fmt.Errorf("exited")
}
leaderElection, err := leaderelectionconverter.ToConfigMapLeaderElection(clientConfig, *b.leaderElection, b.componentName, b.instanceIdentity)
if err != nil {
return err
}
leaderElection.Callbacks.OnStartedLeading = func(stop <-chan struct{}) {
if err := b.startFunc(clientConfig, stop); err != nil {
glog.Fatal(err)
}
}
leaderelection.RunOrDie(leaderElection)
return fmt.Errorf("exited")
}
func (b *ControllerBuilder) getClientConfig() (*rest.Config, error) {
kubeconfig := ""
if b.kubeAPIServerConfigFile != nil {
kubeconfig = *b.kubeAPIServerConfigFile
}
return client.GetKubeConfigOrInClusterConfig(kubeconfig, b.clientOverrides)
}
func (b *ControllerBuilder) getNamespace() (*rest.Config, error) {
kubeconfig := ""
if b.kubeAPIServerConfigFile != nil {
kubeconfig = *b.kubeAPIServerConfigFile
}
return client.GetKubeConfigOrInClusterConfig(kubeconfig, b.clientOverrides)
}<|fim▁end|> | |
<|file_name|>definition-list-fixtures.module.ts<|end_file_name|><|fim▁begin|>import { CommonModule } from '@angular/common';
import { NgModule } from '@angular/core';
import { SkyDefinitionListModule } from '../definition-list.module';
import { SkyDefinitionListTestComponent } from './definition-list.component.fixture';
@NgModule({<|fim▁hole|> declarations: [SkyDefinitionListTestComponent],
imports: [CommonModule, SkyDefinitionListModule],
exports: [SkyDefinitionListTestComponent],
})
export class SkyDefinitionListFixturesModule {}<|fim▁end|> | |
<|file_name|>public.ts<|end_file_name|><|fim▁begin|>import {Router, Response, Request} from "express";
export const publicRouter = Router();
<|fim▁hole|>publicRouter.get("/", (req: Request, res: Response) => {
res.json({
title: "greetings",
text: "hello web"
});
});<|fim▁end|> | |
<|file_name|>repoclosure.py<|end_file_name|><|fim▁begin|># repoclosure.py
# DNF plugin adding a command to display a list of unresolved dependencies
# for repositories.
#
# Copyright (C) 2015 Igor Gnatenko
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _
import dnf.cli
class RepoClosure(dnf.Plugin):
name = "repoclosure"
def __init__(self, base, cli):
super(RepoClosure, self).__init__(base, cli)
if cli is None:
return
cli.register_command(RepoClosureCommand)
class RepoClosureCommand(dnf.cli.Command):
aliases = ("repoclosure",)
summary = _("Display a list of unresolved dependencies for repositories")
def configure(self):
demands = self.cli.demands
demands.sack_activation = True
demands.available_repos = True
if self.opts.repo:
for repo in self.base.repos.all():
if repo.id not in self.opts.repo and repo.id not in self.opts.check:
repo.disable()
else:
repo.enable()
def run(self):
if self.opts.arches:
unresolved = self._get_unresolved(self.opts.arches)
else:
unresolved = self._get_unresolved()
for pkg in sorted(unresolved.keys()):
print("package: {} from {}".format(str(pkg), pkg.reponame))
print(" unresolved deps:")
for dep in unresolved[pkg]:
print(" {}".format(dep))
if len(unresolved) > 0:
msg = _("Repoclosure ended with unresolved dependencies.")
raise dnf.exceptions.Error(msg)
def _get_unresolved(self, arch=None):
unresolved = {}
deps = set()
available = self.base.sack.query().available()
if self.base.conf.best and not self.opts.check:
available = available.latest()
elif self.opts.newest or self.base.conf.best:
available = available.filter(latest=True)
if arch is not None:
available = available.filter(arch=arch)
pkgs = set()
if self.opts.pkglist:
available.apply()
for pkg in self.opts.pkglist:
for pkgs_filtered in available.filter(name=pkg):
pkgs.add(pkgs_filtered)
else:
for pkgs_filtered in available:
pkgs.add(pkgs_filtered)
if self.opts.check:
checkpkgs = set()
available.apply()
for repo in self.opts.check:
for pkgs_filtered in available.filter(reponame=repo):
checkpkgs.add(pkgs_filtered)
pkgs.intersection_update(checkpkgs)
# --best not applied earlier due to --check, so do it now
if self.base.conf.best:
available = available.latest()
for pkg in pkgs:
unresolved[pkg] = set()
for req in pkg.requires:
reqname = str(req)
# XXX: https://bugzilla.redhat.com/show_bug.cgi?id=1186721
if reqname.startswith("solvable:") or \
reqname.startswith("rpmlib("):
continue
deps.add(req)
unresolved[pkg].add(req)
available.apply()
unresolved_deps = set(x for x in deps if not available.filter(provides=x))
unresolved_transition = {k: set(x for x in v if x in unresolved_deps)<|fim▁hole|> @staticmethod
def set_argparser(parser):
parser.add_argument("--arch", default=[], action="append", dest='arches',
help=_("check packages of the given archs, can be "
"specified multiple times"))
parser.add_argument("--check", default=[], action="append",
help=_("Specify repositories to check"))
parser.add_argument("-n", "--newest", action="store_true",
help=_("Check only the newest packages in the "
"repos"))
parser.add_argument("--pkg", default=[], action="append",
help=_("Check closure for this package only"),
dest="pkglist")<|fim▁end|> | for k, v in unresolved.items()}
return {k: v for k, v in unresolved_transition.items() if v}
|
<|file_name|>config.rs<|end_file_name|><|fim▁begin|>pub use crate::tracker::TrackerMode;
use serde::Deserialize;
use std::collections::HashMap;
#[derive(Deserialize)]
pub struct UDPConfig {
bind_address: String,
announce_interval: u32,
}
impl UDPConfig {
pub fn get_address(&self) -> &str {
self.bind_address.as_str()
}
<|fim▁hole|> pub fn get_announce_interval(&self) -> u32 {
self.announce_interval
}
}
#[derive(Deserialize)]
pub struct HTTPConfig {
bind_address: String,
access_tokens: HashMap<String, String>,
}
impl HTTPConfig {
pub fn get_address(&self) -> &str {
self.bind_address.as_str()
}
pub fn get_access_tokens(&self) -> &HashMap<String, String> {
&self.access_tokens
}
}
#[derive(Deserialize)]
pub struct Configuration {
mode: TrackerMode,
udp: UDPConfig,
http: Option<HTTPConfig>,
log_level: Option<String>,
db_path: Option<String>,
cleanup_interval: Option<u64>,
}
#[derive(Debug)]
pub enum ConfigError {
IOError(std::io::Error),
ParseError(toml::de::Error),
}
impl std::fmt::Display for ConfigError {
fn fmt(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
ConfigError::IOError(e) => e.fmt(formatter),
ConfigError::ParseError(e) => e.fmt(formatter),
}
}
}
impl std::error::Error for ConfigError {}
impl Configuration {
pub fn load(data: &[u8]) -> Result<Configuration, toml::de::Error> {
toml::from_slice(data)
}
pub fn load_file(path: &str) -> Result<Configuration, ConfigError> {
match std::fs::read(path) {
Err(e) => Err(ConfigError::IOError(e)),
Ok(data) => {
match Self::load(data.as_slice()) {
Ok(cfg) => Ok(cfg),
Err(e) => Err(ConfigError::ParseError(e)),
}
}
}
}
pub fn get_mode(&self) -> &TrackerMode {
&self.mode
}
pub fn get_udp_config(&self) -> &UDPConfig {
&self.udp
}
pub fn get_log_level(&self) -> &Option<String> {
&self.log_level
}
pub fn get_http_config(&self) -> Option<&HTTPConfig> {
self.http.as_ref()
}
pub fn get_db_path(&self) -> &Option<String> {
&self.db_path
}
pub fn get_cleanup_interval(&self) -> Option<u64> {
self.cleanup_interval
}
}
impl Default for Configuration {
fn default() -> Configuration {
Configuration {
log_level: None,
mode: TrackerMode::Dynamic,
udp: UDPConfig {
announce_interval: 120,
bind_address: String::from("0.0.0.0:6969"),
},
http: None,
db_path: None,
cleanup_interval: None,
}
}
}<|fim▁end|> | |
<|file_name|>range_set.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
//! A set library to aid character set manipulation.
//!
//! `range_set` aims to make it easier to handle set manipulation for characters
//! over ranges. For example, a unicode library may expose character ranges such
//! as `('0', '9')` as a sequence of digits. If I was already later state I would
//! like to add the sequence of digits: `('1', '3')`, it would consider them as
//! distinct and store both. This is a nuisance. It should recognize that `1-3`
//! is encased inside `0-9` and leave it as is.
//!
//! It provides the standard set operations: union, intersection, difference,
//! and symmetric difference.
use std::collections::BTreeSet;
use std::fmt::{self, Display};
use parse::NextPrev;
#[derive(Clone, Copy, Debug, PartialOrd, Ord, PartialEq, Eq)]
pub struct Range(pub char, pub char);
impl fmt::Display for Range {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({}, {})", self.0, self.1)
}
}
<|fim▁hole|> }
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Set(BTreeSet<Range>);
impl fmt::Display for Set {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Set(ref set) = *self;
let len = BTreeSet::len(set);
for (count, s) in set.iter().enumerate() {
if count < len - 1 { try!(write!(f, "{}, ", s)) }
else { return write!(f, "{}", s) }
}
Ok(())
}
}
impl Set {
pub fn contains(&self, c: char) -> bool {
for range in &self.0 {
if range.contains(c) { return true }
}
false
}
pub fn new() -> Self { Set(BTreeSet::new()) }
pub fn insert(&mut self, value: Range) {
let mut ret = BTreeSet::new();
// value is a complete subset of one of the other ranges.
let mut subset = false;
// Borrowing self blocks later operation. Add a new scope.
{ let Set(ref set) = *self;
let Range(mut min_val, mut max_val) = value;
if min_val > max_val { panic!("First value cannot be greater than the second.") }
// Loop over set adding old disjoint pieces and supersets back. When partially
// overlapped or disjoint without a gap, expand value to the union. At the
// end, insert union after it has been fully expanded.
//
// It is important that each branch consider all cases which lead to a specific
// modification. For example, expanding the low side isn't checking for only
// partial overlap, it's checking all cases which result in *only* the left
// side expanding. Previous attempts, for example, checked for partial overlap
// as distinct from subsets/supersets. The result was missing many edge cases.
for &Range(min, max) in &*set {
// value overlaps at the beginning or disjoint w/o gap on the low side.
if min_val < min && max_val >= min.prev() && max_val <= max { max_val = max }
// value overlaps at the end or disjoin w/o gap on the high side.
else if min_val >= min && min_val <= max.next() && max_val > max { min_val = min }
// value is entirely contained between min and max. Insert original
// into new array because new is a subset.
else if min_val >= min && max_val <= max {
ret.insert(Range(min, max));
subset = true;
}
// value is a superset to the current so don't add current.
else if min_val < min && max_val > max {}
// value is disjoint with current and has a gap. Add current.
else { ret.insert(Range(min, max)); }
}
// Insert value only when it's not a subset.
if !subset { ret.insert(Range(min_val, max_val)); }
}
*self = Set(ret);
}
pub fn is_empty(&self) -> bool { self.0.is_empty() }
pub fn remove(&mut self, value: Range) {
let mut ret = BTreeSet::new();
// Borrowing self blocks later modification. Make a new scope to contain it.
{ let Set(ref set) = *self;
let Range(min_val, max_val) = value;
if min_val > max_val { panic!("First value cannot be greater than the second.") }
// Loop over set inserting whatever doesn't intersect.
for &Range(min, max) in &*set {
// value overlaps at the beginning.
if min_val <= min && max_val >= min && max_val < max { ret.insert(Range(max_val.next(), max)); }
// value overlaps at the end.
else if min_val > min && min_val <= max && max_val >= max { ret.insert(Range(min, min_val.prev())); }
// value is entirely contained between min and max. Split set
// into two pieces.
else if min_val > min && max_val < max {
ret.insert(Range(min, min_val.prev()));
ret.insert(Range(max_val.next(), max));
// Current piece was a superset so value cannot be anywhere else.
break;
// value is a superset to the current so don't add current.
} else if min_val <= min && max_val >= max {}
// value is disjoint with current so add current.
else { ret.insert(Range(min, max)); }
}
}
*self = Set(ret)
}
// 123 + 345 = 12345.
pub fn union(&self, value: &Self) -> Self {
let mut ret = self.clone();
// Loop over the btreeset of Range(char, char).
for &x in &value.0 { ret.insert(x) }
ret
}
// Intersection of `A` & `B` is `A - (A - B)`: 123 & 345 = 3.
pub fn intersection(&self, value: &Self) -> Self {
let diff = self.difference(value);
self.difference(&diff)
}
// 123 - 345 = 12.
pub fn difference(&self, value: &Self) -> Self {
let mut ret = self.clone();
for &x in &value.0 { ret.remove(x) }
ret
}
// `A` ^ `B` is `(A + B) - (A & B)`: 123 ^ 345 = 1245.
pub fn symmetric_difference(&self, value: &Self) -> Self {
let union = self.union(value);
let intersection = self.intersection(value);
union.difference(&intersection)
}
}<|fim▁end|> | impl Range {
fn contains(&self, c: char) -> bool {
self.0 <= c && self.1 >= c |
<|file_name|>Roupa.java<|end_file_name|><|fim▁begin|><|fim▁hole|>
/**
* Created by felansu on 03/06/2015.
*/
public interface Roupa {
void vestir();
}<|fim▁end|> | package padroesprojeto.criacional.abstractfactorymethod.outroexemplo.model; |
<|file_name|>SceneReaderBinding.cpp<|end_file_name|><|fim▁begin|>//////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2014, Image Engine Design Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above
// copyright notice, this list of conditions and the following
// disclaimer.
//
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided with
// the distribution.
//
// * Neither the name of John Haddon nor the names of
// any other contributors to this software may be used to endorse or
// promote products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR<|fim▁hole|>// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//////////////////////////////////////////////////////////////////////////
#include "boost/python.hpp"
#include "GafferBindings/DependencyNodeBinding.h"
#include "GafferScene/SceneReader.h"
#include "GafferSceneBindings/SceneReaderBinding.h"
using namespace GafferScene;
static boost::python::list supportedExtensions()
{
std::vector<std::string> e;
SceneReader::supportedExtensions( e );
boost::python::list result;
for( std::vector<std::string>::const_iterator it = e.begin(), eIt = e.end(); it != eIt; ++it )
{
result.append( *it );
}
return result;
}
void GafferSceneBindings::bindSceneReader()
{
GafferBindings::DependencyNodeClass<SceneReader>()
.def( "supportedExtensions", &supportedExtensions )
.staticmethod( "supportedExtensions" )
;
}<|fim▁end|> | |
<|file_name|>options.rs<|end_file_name|><|fim▁begin|>use scaly::containers::Ref;
use scaly::memory::Region;
use scaly::Equal;
use scaly::Page;
use scaly::{Array, String, Vector};
pub struct Options {
pub files: Ref<Vector<String>>,
pub output_name: Option<String>,
pub directory: Option<String>,
pub repl: bool,
}
impl Options {
pub fn parse_arguments(
_pr: &Region,
_rp: *mut Page,
_ep: *mut Page,
arguments: Ref<Vector<String>>,
) -> Ref<Options> {
let _r = Region::create(_pr);
let mut output_name: Option<String> = None;
let mut directory: Option<String> = None;
let mut files: Ref<Array<String>> = Ref::new(_r.page, Array::new());
let mut repl = false;
<|fim▁hole|> let mut args = arguments.iter();
loop {
let mut arg = args.next();
if let None = arg {
break;
}
{
let _r_1 = Region::create(&_r);
if (arg.unwrap()).equals(&String::from_string_slice(_r_1.page, "-o")) {
arg = args.next();
output_name = Some(*arg.unwrap());
continue;
}
}
{
let _r_1 = Region::create(&_r);
if (arg.unwrap()).equals(&String::from_string_slice(_r_1.page, "-d")) {
arg = args.next();
directory = Some(*arg.unwrap());
continue;
}
}
{
let _r_1 = Region::create(&_r);
if (arg.unwrap()).equals(&String::from_string_slice(_r_1.page, "-r")) {
repl = true;
continue;
}
}
files.add(*arg.unwrap());
}
Ref::new(
_rp,
Options {
files: Ref::new(_rp, Vector::from_array(_rp, files)),
output_name: output_name,
directory: directory,
repl: repl,
},
)
}
}<|fim▁end|> | |
<|file_name|>more.rs<|end_file_name|><|fim▁begin|>#![crate_name = "uu_more"]
/*
* This file is part of the uutils coreutils package.
*
* (c) Martin Kysel <[email protected]>
*
* For the full copyright and license information, please view the LICENSE file
* that was distributed with this source code.
*/
extern crate getopts;
#[macro_use]
extern crate uucore;
use getopts::Options;
use std::io::{stdout, Write, Read};
use std::fs::File;
#[cfg(unix)]
extern crate nix;
#[cfg(unix)]
use nix::sys::termios;<|fim▁hole|> Help,
Version,
}
static NAME: &'static str = "more";
static VERSION: &'static str = env!("CARGO_PKG_VERSION");
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = Options::new();
opts.optflag("h", "help", "display this help and exit");
opts.optflag("v", "version", "output version information and exit");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(e) => {
show_error!("{}", e);
panic!()
},
};
let usage = opts.usage("more TARGET.");
let mode = if matches.opt_present("version") {
Mode::Version
} else if matches.opt_present("help") {
Mode::Help
} else {
Mode::More
};
match mode {
Mode::More => more(matches),
Mode::Help => help(&usage),
Mode::Version => version(),
}
0
}
fn version() {
println!("{} {}", NAME, VERSION);
}
fn help(usage: &str) {
let msg = format!("{0} {1}\n\n\
Usage: {0} TARGET\n \
\n\
{2}", NAME, VERSION, usage);
println!("{}", msg);
}
#[cfg(unix)]
fn setup_term() -> termios::Termios {
let mut term = termios::tcgetattr(0).unwrap();
// Unset canonical mode, so we get characters immediately
term.c_lflag.remove(termios::ICANON);
// Disable local echo
term.c_lflag.remove(termios::ECHO);
termios::tcsetattr(0, termios::TCSADRAIN, &term).unwrap();
term
}
#[cfg(windows)]
fn setup_term() -> usize {
0
}
#[cfg(unix)]
fn reset_term(term: &mut termios::Termios) {
term.c_lflag.insert(termios::ICANON);
term.c_lflag.insert(termios::ECHO);
termios::tcsetattr(0, termios::TCSADRAIN, &term).unwrap();
}
#[cfg(windows)]
fn reset_term(_: &mut usize) {
}
fn more(matches: getopts::Matches) {
let files = matches.free;
let mut f = File::open(files.first().unwrap()).unwrap();
let mut buffer = [0; 1024];
let mut term = setup_term();
let mut end = false;
while let Ok(sz) = f.read(&mut buffer) {
if sz == 0 { break; }
stdout().write(&buffer[0..sz]).unwrap();
for byte in std::io::stdin().bytes() {
match byte.unwrap() {
b' ' => break,
b'q' | 27 => {
end = true;
break;
},
_ => ()
}
}
if end { break;}
}
reset_term(&mut term);
println!("");
}<|fim▁end|> |
#[derive(Clone, Eq, PartialEq)]
pub enum Mode {
More, |
<|file_name|>1strandbushinga002.py<|end_file_name|><|fim▁begin|>#1strand Bushing Tool
#Standalone program for minimized cruft
import math
print "This program is for printing the best possible circular bushings"
print "Printer config values are hardcoded for ease of use (for me)"
xpath = [] #These are initialized and default values
ypath = []
zpath = []
step = []
epath = []
xstart = 10.0
ystart = 10.0
zstart = 0.5
height = 0.0
LayerHeight = 0.3
ExtrusionWidth = 0.6
FilamentDiameter=3
FilamentArea = FilamentDiameter * FilamentDiameter * 3.14159 / 4.0
GooCoefficient = LayerHeight * ExtrusionWidth / FilamentArea
configlist = [LayerHeight, ExtrusionWidth, FilamentDiameter, GooCoefficient]
BrimDiameter = 0.0
OuterDiameter = 0.0<|fim▁hole|>
print "Current values are:"
print "LayerHeight =", configlist[0] #This assignment is super important
print "ExtrusionWidth=", configlist[1] #and needs to be consistent with
print "FilamentDiameter=", configlist[2] #with other code blocks related
print "GooCoefficient=", configlist[3] #to these options.
BrimDiameter = float(raw_input("Enter brim diameter in mm:"))
OuterDiameter = float(raw_input("Enter Outer Diameter in mm:"))
InnerDiameter = float(raw_input("Enter Inner Diameter in mm:"))
N = int(raw_input("Enter number of line segments in your alleged circles"))
anglestep = 2 * math.pi / N
print "Angular step is ", anglestep, " radians."
height = float(raw_input("Enter Height"))
centerx = (BrimDiameter / 2.0)+5 #Center is chosen so brim is 5mm from edge
centery = (BrimDiameter / 2.0)+5 #Center is chosen so brim is 5mm from edge
thickness = (OuterDiameter-InnerDiameter)/2
perimeters = thickness/ExtrusionWidth
print "Thickness = ", thickness
print "Needed perimeters = ", perimeters
perimeters = int(perimeters)
ActualExtrusionWidth = thickness/perimeters
print "Revised perimeters = ", perimeters
print "Revised extrusion width = ", ActualExtrusionWidth
BrimThickness = (BrimDiameter-InnerDiameter)/2
BrimPerimeters = int(BrimThickness/ActualExtrusionWidth)
print "Brim Thickness = ", BrimThickness
print "Brim Perimeters = ", BrimPerimeters
#Brim layer is first, and treated separately.
j=0
i=0
radius = BrimDiameter/2 - (j+0.5)*ActualExtrusionWidth
xpath.append(centerx+radius)
ypath.append(centery)
zpath.append(LayerHeight)
while (j<BrimPerimeters):
radius = BrimDiameter/2 - (j+0.5)*ActualExtrusionWidth
j=j+1
i=0
while (i<N):
i=i+1
#print "i=", i, "j=", j, "radius=", radius
xpath.append(centerx+radius*math.cos(i*anglestep))
ypath.append(centery+radius*math.sin(i*anglestep))
zpath.append(LayerHeight)
#
#
#
#Now the actual bushing begins printing.
#
#
#
CurrentLayer=1
CurrentHeight=LayerHeight*CurrentLayer #Technically should be earlier but wutev
#
#
#
#Now the actual bushing begins printing.
#
#
#
#k=0
##Even layers (1st bushing layer is 2) are inside to outside
##odd layers are outside to inside, to maintain strand continuity
#j=0
#i=0
#radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
#xpath.append(centerx+radius)
#ypath.append(centery)
#zpath.append(CurrentHeight)
#while (j<=perimeters):
# radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
# j=j+1
# i=0
# while (i<N):
# i=i+1
# #print "i=", i, "j=", j, "radius=", radius
# xpath.append(centerx+radius*math.cos(i*anglestep))
# ypath.append(centery+radius*math.sin(i*anglestep))
# zpath.append(CurrentHeight)
##odd layers are outside to inside, to maintain strand continuity
#CurrentLayer=3
#CurrentHeight=LayerHeight*CurrentLayer
#j=0
#i=0
#radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
#xpath.append(centerx+radius)
#ypath.append(centery)
#zpath.append(CurrentHeight)
#while (j<perimeters):
# radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
# j=j+1
# i=0
# while (i<N):
# i=i+1
# #print "i=", i, "j=", j, "radius=", radius
# xpath.append(centerx+radius*math.cos(i*anglestep))
# ypath.append(centery+radius*math.sin(i*anglestep))
# zpath.append(CurrentHeight)
while (CurrentLayer*LayerHeight < height):
CurrentLayer=CurrentLayer+1
CurrentHeight=LayerHeight*CurrentLayer
#Even layers (1st bushing layer is 2) are inside to outside
#odd layers are outside to inside, to maintain strand continuity
j=1
i=0
radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
xpath.append(centerx+radius)
ypath.append(centery)
zpath.append(CurrentHeight-LayerHeight*0.75)
while (j<=perimeters):
radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
j=j+1
i=0
while (i<(N-1)): #kludge
i=i+1
#print "i=", i, "j=", j, "layer=", CurrentLayer, "radius=", radius
xpath.append(centerx+radius*math.cos(i*anglestep))
ypath.append(centery+radius*math.sin(i*anglestep))
if (i==1 and j==1):
zpath.append(CurrentHeight-LayerHeight*.25)
else:
zpath.append(CurrentHeight)
#odd layers are outside to inside, to maintain strand continuity
CurrentLayer=CurrentLayer+1
CurrentHeight=LayerHeight*CurrentLayer
j=0
i=0
radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
xpath.append(centerx+radius)
ypath.append(centery)
zpath.append(CurrentHeight-LayerHeight*.75)
while (j<perimeters):
radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
j=j+1
i=0
while (i<(N-1)): #Same kludge as the even layers.
i=i+1
#print "i=", i, "j=", j, "layer=", CurrentLayer, "radius=", radius
xpath.append(centerx+radius*math.cos(i*anglestep))
ypath.append(centery+radius*math.sin(i*anglestep))
if (i==1 and j==1):
zpath.append(CurrentHeight-LayerHeight*.25)
else:
zpath.append(CurrentHeight)
#Extrusion is only handled here temporarily for testing
for x in xrange(len(xpath)): # This initializes the arrays so I can
step.append(0.0) #avoid that append() bullshit where I dont
epath.append(0.0) #know where I'm writing.
for x in xrange(2, len(xpath)): # This calculates how much extruder movement per step
distance=((xpath[x]-xpath[x-1])**2+(ypath[x]-ypath[x-1])**2)**0.5
step[x]=distance*GooCoefficient
epath[x]=epath[x-1]+step[x]
#for x in range(len(xpath)): #Human readable raw output
# print xpath[x-1], ypath[x-1], zpath[x-1], step[x-1], epath[x-1]
goutput = open("output1.gcode", "wb") #Now save to output1.gcode
goutput.write("G28 \nG21 \nG90 \nG92 E0 \nM82")
x=0
for x in range(len(xpath)):
goutput.write("G1 X" );
goutput.write( str(xpath[x]) );
goutput.write( " Y" );
goutput.write( str(ypath[x]) );
goutput.write( " Z" );
goutput.write( str(zpath[x]) );
goutput.write( " E" );
goutput.write( str(epath[x]) );
goutput.write( " F2000 \n" );
goutput.close()<|fim▁end|> | InnerDiameter = 0.0
N = 1
ActualExtrusionWidth = ExtrusionWidth |
<|file_name|>RedisDistributedSystemService.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.redis.internal;
<|fim▁hole|>import org.apache.geode.distributed.internal.DistributedSystemService;
import org.apache.geode.distributed.internal.InternalDistributedSystem;
import org.apache.geode.internal.InternalDataSerializer;
import org.apache.geode.internal.classloader.ClassPathLoader;
public class RedisDistributedSystemService implements DistributedSystemService {
@Override
public void init(InternalDistributedSystem internalDistributedSystem) {
}
@Override
public Class getInterface() {
return getClass();
}
@Override
public Collection<String> getSerializationAcceptlist() throws IOException {
URL sanctionedSerializables = ClassPathLoader.getLatest().getResource(getClass(),
"sanctioned-geode-apis-compatible-with-redis-serializables.txt");
return InternalDataSerializer.loadClassNames(sanctionedSerializables);
}
}<|fim▁end|> | import java.io.IOException;
import java.net.URL;
import java.util.Collection;
|
<|file_name|>highlighter.rs<|end_file_name|><|fim▁begin|>//! Iterators and data structures for transforming parsing information into styled text.
// Code based on https://github.com/defuz/sublimate/blob/master/src/core/syntax/highlighter.rs
// released under the MIT license by @defuz
use std::iter::Iterator;
use std::ops::Range;
use crate::parsing::{Scope, ScopeStack, BasicScopeStackOp, ScopeStackOp, MatchPower, ATOM_LEN_BITS};
use super::selector::ScopeSelector;
use super::theme::{Theme, ThemeItem};
use super::style::{Color, FontStyle, Style, StyleModifier};
/// Basically a wrapper around a [`Theme`] preparing it to be used for highlighting.
///
/// This is part of the API to preserve the possibility of caching matches of the
/// selectors of the theme on various scope paths or setting up some kind of
/// accelerator structure.
///
/// So for now this does very little but eventually if you keep it around between
/// highlighting runs it will preserve its cache.
///
/// [`Theme`]: struct.Theme.html
#[derive(Debug)]
pub struct Highlighter<'a> {
theme: &'a Theme,
/// Cache of the selectors in the theme that are only one scope
/// In most themes this is the majority, hence the usefullness
single_selectors: Vec<(Scope, StyleModifier)>,
multi_selectors: Vec<(ScopeSelector, StyleModifier)>,
// TODO single_cache: HashMap<Scope, StyleModifier, BuildHasherDefault<FnvHasher>>,
}
/// Keeps a stack of scopes and styles as state between highlighting different lines.
///
/// If you are highlighting an entire file you create one of these at the start and use it
/// all the way to the end.
///
/// # Caching
///
/// One reason this is exposed is that since it implements `Clone` you can actually cache these
/// (probably along with a [`ParseState`]) and only re-start highlighting from the point of a
/// change. You could also do something fancy like only highlight a bit past the end of a user's
/// screen and resume highlighting when they scroll down on large files.
///
/// Alternatively you can save space by caching only the `path` field of this struct then re-create
/// the `HighlightState` when needed by passing that stack as the `initial_stack` parameter to the
/// [`new`] method. This takes less space but a small amount of time to re-create the style stack.
///
/// **Note:** Caching is for advanced users who have tons of time to maximize performance or want to
/// do so eventually. It is not recommended that you try caching the first time you implement
/// highlighting.
///
/// [`ParseState`]: ../parsing/struct.ParseState.html
/// [`new`]: #method.new
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct HighlightState {
styles: Vec<Style>,
single_caches: Vec<ScoredStyle>,
pub path: ScopeStack,
}
/// Highlights a line of parsed code given a [`HighlightState`] and line of changes from the parser.<|fim▁hole|>///
/// [`HighlightState`]: struct.HighlightState.html
/// [`Style`]: struct.Style.html
#[derive(Debug)]
pub struct RangedHighlightIterator<'a, 'b> {
index: usize,
pos: usize,
changes: &'a [(usize, ScopeStackOp)],
text: &'b str,
highlighter: &'a Highlighter<'a>,
state: &'a mut HighlightState,
}
/// Highlights a line of parsed code given a [`HighlightState`] and line of changes from the parser.
///
/// This is a backwards compatible shim on top of the [`RangedHighlightIterator`] which only
/// yields the [`Style`] and the text of the token, not the range.
///
/// It splits a line of text into different pieces each with a [`Style`].
///
/// [`HighlightState`]: struct.HighlightState.html
/// [`RangedHighlightIterator`]: struct.RangedHighlightIterator.html
/// [`Style`]: struct.Style.html
#[derive(Debug)]
pub struct HighlightIterator<'a, 'b> {
ranged_iterator: RangedHighlightIterator<'a, 'b>
}
impl HighlightState {
/// Note that the [`Highlighter`] is not stored; it is used to construct the initial stack
/// of styles.
///
/// Most of the time you'll want to pass an empty stack as `initial_stack`, but see the docs for
/// [`HighlightState`] for a discussion of advanced caching use cases.
///
/// [`Highlighter`]: struct.Highlighter.html
/// [`HighlightState`]: struct.HighlightState.html
pub fn new(highlighter: &Highlighter<'_>, initial_stack: ScopeStack) -> HighlightState {
let mut styles = vec![highlighter.get_default()];
let mut single_caches = vec![ScoredStyle::from_style(styles[0])];
for i in 0..initial_stack.len() {
let prefix = initial_stack.bottom_n(i + 1);
let new_cache = highlighter.update_single_cache_for_push(&single_caches[i], prefix);
styles.push(highlighter.finalize_style_with_multis(&new_cache, prefix));
single_caches.push(new_cache);
}
HighlightState {
styles,
single_caches,
path: initial_stack,
}
}
}
impl<'a, 'b> RangedHighlightIterator<'a, 'b> {
pub fn new(state: &'a mut HighlightState,
changes: &'a [(usize, ScopeStackOp)],
text: &'b str,
highlighter: &'a Highlighter<'_>)
-> RangedHighlightIterator<'a, 'b> {
RangedHighlightIterator {
index: 0,
pos: 0,
changes,
text,
highlighter,
state,
}
}
}
impl<'a, 'b> Iterator for RangedHighlightIterator<'a, 'b> {
type Item = (Style, &'b str, Range<usize>);
/// Yields the next token of text and the associated `Style` to render that text with.
/// the concatenation of the strings in each token will make the original string.
fn next(&mut self) -> Option<(Style, &'b str, Range<usize>)> {
if self.pos == self.text.len() && self.index >= self.changes.len() {
return None;
}
let (end, command) = if self.index < self.changes.len() {
self.changes[self.index].clone()
} else {
(self.text.len(), ScopeStackOp::Noop)
};
// println!("{} - {:?} {}:{}", self.index, self.pos, self.state.path.len(), self.state.styles.len());
let style = *self.state.styles.last().unwrap_or(&Style::default());
let text = &self.text[self.pos..end];
let range = Range { start: self.pos, end };
{
// closures mess with the borrow checker's ability to see different struct fields
let m_path = &mut self.state.path;
let m_styles = &mut self.state.styles;
let m_caches = &mut self.state.single_caches;
let highlighter = &self.highlighter;
m_path.apply_with_hook(&command, |op, cur_stack| {
// println!("{:?} - {:?}", op, cur_stack);
match op {
BasicScopeStackOp::Push(_) => {
// we can push multiple times so this might have changed
let new_cache = {
if let Some(prev_cache) = m_caches.last() {
highlighter.update_single_cache_for_push(prev_cache, cur_stack)
} else {
highlighter.update_single_cache_for_push(&ScoredStyle::from_style(highlighter.get_default()), cur_stack)
}
};
m_styles.push(highlighter.finalize_style_with_multis(&new_cache, cur_stack));
m_caches.push(new_cache);
}
BasicScopeStackOp::Pop => {
m_styles.pop();
m_caches.pop();
}
}
});
}
self.pos = end;
self.index += 1;
if text.is_empty() {
self.next()
} else {
Some((style, text, range))
}
}
}
impl<'a, 'b> HighlightIterator<'a, 'b> {
pub fn new(state: &'a mut HighlightState,
changes: &'a [(usize, ScopeStackOp)],
text: &'b str,
highlighter: &'a Highlighter<'_>)
-> HighlightIterator<'a, 'b> {
HighlightIterator {
ranged_iterator: RangedHighlightIterator {
index: 0,
pos: 0,
changes,
text,
highlighter,
state
}
}
}
}
impl<'a, 'b> Iterator for HighlightIterator<'a, 'b> {
type Item = (Style, &'b str);
/// Yields the next token of text and the associated `Style` to render that text with.
/// the concatenation of the strings in each token will make the original string.
fn next(&mut self) -> Option<(Style, &'b str)> {
self.ranged_iterator.next().map(|e| (e.0, e.1))
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ScoredStyle {
pub foreground: (MatchPower, Color),
pub background: (MatchPower, Color),
pub font_style: (MatchPower, FontStyle),
}
#[inline]
fn update_scored<T: Clone>(scored: &mut (MatchPower, T), update: &Option<T>, score: MatchPower) {
if score > scored.0 {
if let Some(u) = update {
scored.0 = score;
scored.1 = u.clone();
}
}
}
impl ScoredStyle {
fn apply(&mut self, other: &StyleModifier, score: MatchPower) {
update_scored(&mut self.foreground, &other.foreground, score);
update_scored(&mut self.background, &other.background, score);
update_scored(&mut self.font_style, &other.font_style, score);
}
fn to_style(&self) -> Style {
Style {
foreground: self.foreground.1,
background: self.background.1,
font_style: self.font_style.1,
}
}
fn from_style(style: Style) -> ScoredStyle {
ScoredStyle {
foreground: (MatchPower(-1.0), style.foreground),
background: (MatchPower(-1.0), style.background),
font_style: (MatchPower(-1.0), style.font_style),
}
}
}
impl<'a> Highlighter<'a> {
pub fn new(theme: &'a Theme) -> Highlighter<'a> {
let mut single_selectors = Vec::new();
let mut multi_selectors = Vec::new();
for item in &theme.scopes {
for sel in &item.scope.selectors {
if let Some(scope) = sel.extract_single_scope() {
single_selectors.push((scope, item.style));
} else {
multi_selectors.push((sel.clone(), item.style));
}
}
}
// So that deeper matching selectors get checked first
single_selectors.sort_by(|a, b| b.0.len().cmp(&a.0.len()));
Highlighter {
theme,
single_selectors,
multi_selectors,
}
}
/// The default style in the absence of any matched rules.
/// Basically what plain text gets highlighted as.
pub fn get_default(&self) -> Style {
Style {
foreground: self.theme.settings.foreground.unwrap_or(Color::BLACK),
background: self.theme.settings.background.unwrap_or(Color::WHITE),
font_style: FontStyle::empty(),
}
}
fn update_single_cache_for_push(&self, cur: &ScoredStyle, path: &[Scope]) -> ScoredStyle {
let mut new_style = cur.clone();
let last_scope = path[path.len() - 1];
for &(scope, ref modif) in self.single_selectors.iter().filter(|a| a.0.is_prefix_of(last_scope)) {
let single_score = f64::from(scope.len()) *
f64::from(ATOM_LEN_BITS * ((path.len() - 1) as u16)).exp2();
new_style.apply(modif, MatchPower(single_score));
}
new_style
}
fn finalize_style_with_multis(&self, cur: &ScoredStyle, path: &[Scope]) -> Style {
let mut new_style = cur.clone();
let mult_iter = self.multi_selectors
.iter()
.filter_map(|&(ref sel, ref style)| sel.does_match(path).map(|score| (score, style)));
for (score, modif) in mult_iter {
new_style.apply(modif, score);
}
new_style.to_style()
}
/// Returns the fully resolved style for the given stack.
///
/// This operation is convenient but expensive. For reasonable performance,
/// the caller should be caching results.
pub fn style_for_stack(&self, stack: &[Scope]) -> Style {
let mut single_cache = ScoredStyle::from_style(self.get_default());
for i in 0..stack.len() {
single_cache = self.update_single_cache_for_push(&single_cache, &stack[0..i+1]);
}
self.finalize_style_with_multis(&single_cache, stack)
}
/// Returns a [`StyleModifier`] which, if applied to the default style,
/// would generate the fully resolved style for this stack.
///
/// This is made available to applications that are using syntect styles
/// in combination with style information from other sources.
///
/// This operation is convenient but expensive. For reasonable performance,
/// the caller should be caching results. It's likely slower than [`style_for_stack`].
///
/// [`StyleModifier`]: struct.StyleModifier.html
/// [`style_for_stack`]: #method.style_for_stack
pub fn style_mod_for_stack(&self, path: &[Scope]) -> StyleModifier {
let mut matching_items : Vec<(MatchPower, &ThemeItem)> = self.theme
.scopes
.iter()
.filter_map(|item| {
item.scope
.does_match(path)
.map(|score| (score, item))
})
.collect();
matching_items.sort_by_key(|&(score, _)| score);
let sorted = matching_items.iter()
.map(|(_, item)| item);
let mut modifier = StyleModifier {
background: None,
foreground: None,
font_style: None,
};
for item in sorted {
modifier = modifier.apply(item.style);
}
modifier
}
}
#[cfg(all(feature = "default-syntaxes", feature = "default-themes"))]
#[cfg(test)]
mod tests {
use super::*;
use crate::highlighting::{ThemeSet, Style, Color, FontStyle};
use crate::parsing::{ SyntaxSet, ScopeStack, ParseState};
#[test]
fn can_parse() {
let ps = SyntaxSet::load_from_folder("testdata/Packages").unwrap();
let mut state = {
let syntax = ps.find_syntax_by_name("Ruby on Rails").unwrap();
ParseState::new(syntax)
};
let ts = ThemeSet::load_defaults();
let highlighter = Highlighter::new(&ts.themes["base16-ocean.dark"]);
let mut highlight_state = HighlightState::new(&highlighter, ScopeStack::new());
let line = "module Bob::Wow::Troll::Five; 5; end";
let ops = state.parse_line(line, &ps);
let iter = HighlightIterator::new(&mut highlight_state, &ops[..], line, &highlighter);
let regions: Vec<(Style, &str)> = iter.collect();
// println!("{:#?}", regions);
assert_eq!(regions[11],
(Style {
foreground: Color {
r: 208,
g: 135,
b: 112,
a: 0xFF,
},
background: Color {
r: 43,
g: 48,
b: 59,
a: 0xFF,
},
font_style: FontStyle::empty(),
},
"5"));
}
#[test]
fn can_parse_with_highlight_state_from_cache() {
let ps = SyntaxSet::load_from_folder("testdata/Packages").unwrap();
let mut state = {
let syntax = ps.find_syntax_by_scope(
Scope::new("source.python").unwrap()).unwrap();
ParseState::new(syntax)
};
let ts = ThemeSet::load_defaults();
let highlighter = Highlighter::new(&ts.themes["base16-ocean.dark"]);
// We start by parsing a python multiline-comment: """
let mut highlight_state = HighlightState::new(&highlighter, ScopeStack::new());
let line = r#"""""#;
let ops = state.parse_line(line, &ps);
let iter = HighlightIterator::new(&mut highlight_state, &ops[..], line, &highlighter);
assert_eq!(1, iter.count());
let path = highlight_state.path;
// We then parse the next line with a highlight state built from the previous state
let mut highlight_state = HighlightState::new(&highlighter, path);
let line = "multiline comment";
let ops = state.parse_line(line, &ps);
let iter = HighlightIterator::new(&mut highlight_state, &ops[..], line, &highlighter);
let regions: Vec<(Style, &str)> = iter.collect();
// We expect the line to be styled as a comment.
assert_eq!(regions[0],
(Style {
foreground: Color {
// (Comment: #65737E)
r: 101,
g: 115,
b: 126,
a: 0xFF,
},
background: Color {
r: 43,
g: 48,
b: 59,
a: 0xFF,
},
font_style: FontStyle::empty(),
},
"multiline comment"));
}
// see issues #133 and #203, this test tests the fixes for those issues
#[test]
fn tricky_cases() {
use crate::parsing::ScopeStack;
use std::str::FromStr;
use crate::highlighting::{ThemeSettings, ScopeSelectors};
let c1 = Color { r: 1, g: 1, b: 1, a: 255 };
let c2 = Color { r: 2, g: 2, b: 2, a: 255 };
let def_bg = Color { r: 255, g: 255, b: 255, a: 255 };
let test_color_scheme = Theme {
name: None,
author: None,
settings: ThemeSettings::default(),
scopes: vec![
ThemeItem {
scope: ScopeSelectors::from_str("comment.line").unwrap(),
style: StyleModifier {
foreground: Some(c1),
background: None,
font_style: None,
},
},
ThemeItem {
scope: ScopeSelectors::from_str("comment").unwrap(),
style: StyleModifier {
foreground: Some(c2),
background: None,
font_style: Some(FontStyle::ITALIC),
},
},
ThemeItem {
scope: ScopeSelectors::from_str("comment.line.rs - keyword").unwrap(),
style: StyleModifier {
foreground: None,
background: Some(c1),
font_style: None,
},
},
ThemeItem {
scope: ScopeSelectors::from_str("no.match").unwrap(),
style: StyleModifier {
foreground: None,
background: Some(c2),
font_style: Some(FontStyle::UNDERLINE),
},
},
],
};
let highlighter = Highlighter::new(&test_color_scheme);
use crate::parsing::ScopeStackOp::*;
let ops = [
// three rules apply at once here, two singles and one multi
(0, Push(Scope::new("comment.line.rs").unwrap())),
// multi un-applies
(1, Push(Scope::new("keyword.control.rs").unwrap())),
(2, Pop(1)),
];
let mut highlight_state = HighlightState::new(&highlighter, ScopeStack::new());
let iter = HighlightIterator::new(&mut highlight_state, &ops[..], "abcdef", &highlighter);
let regions: Vec<Style> = iter.map(|(s, _)| s).collect();
// println!("{:#?}", regions);
assert_eq!(regions, vec![
Style { foreground: c1, background: c1, font_style: FontStyle::ITALIC },
Style { foreground: c1, background: def_bg, font_style: FontStyle::ITALIC },
Style { foreground: c1, background: c1, font_style: FontStyle::ITALIC },
]);
let full_stack = ScopeStack::from_str("comment.line.rs keyword.control.rs").unwrap();
let full_style = highlighter.style_for_stack(full_stack.as_slice());
assert_eq!(full_style, Style { foreground: c1, background: def_bg, font_style: FontStyle::ITALIC });
let full_mod = highlighter.style_mod_for_stack(full_stack.as_slice());
assert_eq!(full_mod, StyleModifier { foreground: Some(c1), background: None, font_style: Some(FontStyle::ITALIC) });
}
#[test]
fn test_ranges() {
let ps = SyntaxSet::load_from_folder("testdata/Packages").unwrap();
let mut state = {
let syntax = ps.find_syntax_by_name("Ruby on Rails").unwrap();
ParseState::new(syntax)
};
let ts = ThemeSet::load_defaults();
let highlighter = Highlighter::new(&ts.themes["base16-ocean.dark"]);
let mut highlight_state = HighlightState::new(&highlighter, ScopeStack::new());
let line = "module Bob::Wow::Troll::Five; 5; end";
let ops = state.parse_line(line, &ps);
let iter = RangedHighlightIterator::new(&mut highlight_state, &ops[..], line, &highlighter);
let regions: Vec<(Style, &str, Range<usize>)> = iter.collect();
// println!("{:#?}", regions);
assert_eq!(regions[11],
(Style {
foreground: Color {
r: 208,
g: 135,
b: 112,
a: 0xFF,
},
background: Color {
r: 43,
g: 48,
b: 59,
a: 0xFF,
},
font_style: FontStyle::empty(),
},
"5", Range { start: 30, end: 31 }));
}
}<|fim▁end|> | ///
/// Yields the [`Style`], the text and well as the `Range` of the text in the source string.
///
/// It splits a line of text into different pieces each with a [`Style`] |
<|file_name|>main.js<|end_file_name|><|fim▁begin|><|fim▁hole|>app.on('ready', function() {
mainWindow = new BrowserWindow({width: 400, height: 360});
mainWindow.loadUrl('file://' + __dirname + '/manager.html');
});<|fim▁end|> | var app = require('app');
var BrowserWindow = require('browser-window');
var mainWindow = null; |
<|file_name|>csssupportsrule.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, ParserInput};
use dom::bindings::codegen::Bindings::CSSSupportsRuleBinding;
use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
use dom::bindings::js::Root;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::cssconditionrule::CSSConditionRule;
use dom::cssrule::SpecificCSSRule;
use dom::cssstylesheet::CSSStyleSheet;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::parser::ParserContext;
use style::shared_lock::{Locked, ToCssWithGuard};
use style::stylesheets::{CssRuleType, SupportsRule};
use style::stylesheets::supports_rule::SupportsCondition;
use style_traits::{PARSING_MODE_DEFAULT, ToCss};
#[dom_struct]
pub struct CSSSupportsRule {
cssconditionrule: CSSConditionRule,
#[ignore_heap_size_of = "Arc"]
supportsrule: Arc<Locked<SupportsRule>>,
}
impl CSSSupportsRule {
fn new_inherited(parent_stylesheet: &CSSStyleSheet, supportsrule: Arc<Locked<SupportsRule>>)
-> CSSSupportsRule {
let guard = parent_stylesheet.shared_lock().read();
let list = supportsrule.read_with(&guard).rules.clone();
CSSSupportsRule {
cssconditionrule: CSSConditionRule::new_inherited(parent_stylesheet, list),
supportsrule: supportsrule,
}
}
#[allow(unrooted_must_root)]
pub fn new(window: &Window, parent_stylesheet: &CSSStyleSheet,
supportsrule: Arc<Locked<SupportsRule>>) -> Root<CSSSupportsRule> {
reflect_dom_object(box CSSSupportsRule::new_inherited(parent_stylesheet, supportsrule),
window,
CSSSupportsRuleBinding::Wrap)
}
/// https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface
pub fn get_condition_text(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
let rule = self.supportsrule.read_with(&guard);
rule.condition.to_css_string().into()
}
/// https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface<|fim▁hole|> let mut input = ParserInput::new(&text);
let mut input = Parser::new(&mut input);
let cond = SupportsCondition::parse(&mut input);
if let Ok(cond) = cond {
let global = self.global();
let win = global.as_window();
let url = win.Document().url();
let quirks_mode = win.Document().quirks_mode();
let context = ParserContext::new_for_cssom(&url, Some(CssRuleType::Supports),
PARSING_MODE_DEFAULT,
quirks_mode);
let enabled = cond.eval(&context);
let mut guard = self.cssconditionrule.shared_lock().write();
let rule = self.supportsrule.write_with(&mut guard);
rule.condition = cond;
rule.enabled = enabled;
}
}
}
impl SpecificCSSRule for CSSSupportsRule {
fn ty(&self) -> u16 {
use dom::bindings::codegen::Bindings::CSSRuleBinding::CSSRuleConstants;
CSSRuleConstants::SUPPORTS_RULE
}
fn get_css(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
self.supportsrule.read_with(&guard).to_css_string(&guard).into()
}
}<|fim▁end|> | pub fn set_condition_text(&self, text: DOMString) { |
<|file_name|>org_app.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the views for GSoC Organization Application.
"""
__authors__ = [
'"Madhusudan.C.S" <[email protected]>',
]
from django.conf.urls.defaults import url
from soc.logic.models.org_app_survey import logic as org_app_logic
from soc.views import forms
from soc.views.models.org_app_survey import OrgAppSurveyForm
from soc.modules.gsoc.views.base import RequestHandler
from soc.modules.gsoc.views.helper import url_patterns
class OrgAppForm(OrgAppSurveyForm, forms.ModelForm):
"""Form for Organization Applications inherited from Surveys.
"""
<|fim▁hole|> """Act as a bridge between the new Forms APIs and the existing Survey
Form classes.
"""
kwargs.update({
'survey': kwargs.get('instance', None),
'survey_logic': org_app_logic,
})
super(OrgAppForm, self).__init__(*args, **kwargs)
class OrgApp(RequestHandler):
"""View methods for Organization Application Applications.
"""
def templatePath(self):
return 'v2/modules/gsoc/org_app/apply.html'
def djangoURLPatterns(self):
"""Returns the list of tuples for containing URL to view method mapping.
"""
return [
url(r'^gsoc/org_app/apply/%s$' % url_patterns.SURVEY, self,
name='gsoc_org_app_apply')
]
def checkAccess(self):
"""Access checks for GSoC Organization Application.
"""
pass
def context(self):
"""Handler to for GSoC Organization Application HTTP get request.
"""
org_app_keyfields = {
'prefix': self.kwargs.get('prefix'),
'scope_path': '%s/%s' % (self.kwargs.get('sponsor'),
self.kwargs.get('program')),
'link_id': self.kwargs.get('survey'),
}
org_app_entity = org_app_logic.getFromKeyFieldsOr404(org_app_keyfields)
if self.data.request.method == 'POST':
org_app_form = OrgAppForm(self.data.POST, instance=org_app_entity)
else:
org_app_form = OrgAppForm(instance=org_app_entity)
return {
'page_name': 'Organization Application',
'org_app_form': org_app_form,
}<|fim▁end|> | #TODO: Rewrite this class while refactoring surveys
def __init__(self, *args, **kwargs): |
<|file_name|>RadonBear.py<|end_file_name|><|fim▁begin|>import radon.complexity
import radon.visitors
from coalib.bears.LocalBear import LocalBear
from coalib.results.Result import Result
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.results.SourceRange import SourceRange
from coalib.settings.Setting import typed_list
class RadonBear(LocalBear):
def run(self, filename, file,<|fim▁hole|> Uses radon to compute complexity of a given file.
:param radon_ranks_info: The ranks (given by radon) to
treat as severity INFO.
:param radon_ranks_normal: The ranks (given by radon) to
treat as severity NORMAL.
:param radon_ranks_major: The ranks (given by radon) to
treat as severity MAJOR.
"""
severity_map = {
RESULT_SEVERITY.INFO: radon_ranks_info,
RESULT_SEVERITY.NORMAL: radon_ranks_normal,
RESULT_SEVERITY.MAJOR: radon_ranks_major
}
for visitor in radon.complexity.cc_visit("".join(file)):
rank = radon.complexity.cc_rank(visitor.complexity)
severity = None
for result_severity, rank_list in severity_map.items():
if rank in rank_list:
severity = result_severity
if severity is None:
continue
visitor_range = SourceRange.from_values(
filename, visitor.lineno, visitor.col_offset, visitor.endline)
message = "{} has a cyclomatic complexity of {}".format(
visitor.name, rank)
yield Result(self, message, severity=severity,
affected_code=(visitor_range,))<|fim▁end|> | radon_ranks_info: typed_list(str)=(),
radon_ranks_normal: typed_list(str)=('C', 'D'),
radon_ranks_major: typed_list(str)=('E', 'F')):
""" |
<|file_name|>MVCJComboBox.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2010 Daniel Murphy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/**
* Created at Aug 20, 2010, 2:58:08 AM
*/
package com.dmurph.mvc.gui.combo;
import java.awt.event.KeyEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedList;
import javax.swing.JComboBox;
import com.dmurph.mvc.model.MVCArrayList;
/**
* This class is for having a combo box that will always reflect the data of
* an MVCArrayList. There is a lot of flexibility provided with filtering
* and sorting the elements.
*
* @author Daniel Murphy
*/
public class MVCJComboBox<E> extends JComboBox {
private static final long serialVersionUID = 1L;
private MVCJComboBoxModel<E> model;
private MVCArrayList<E> data;
private IMVCJComboBoxFilter<E> filter;
private final Object lock = new Object();
private MVCJComboBoxStyle style;
private Comparator<E> comparator = null;
private final PropertyChangeListener plistener = new PropertyChangeListener() {
@SuppressWarnings("unchecked")
public void propertyChange(PropertyChangeEvent argEvt) {
String prop = argEvt.getPropertyName();
if (prop.equals(MVCArrayList.ADDED)) {
add((E)argEvt.getNewValue());
}
else if(prop.equals(MVCArrayList.ADDED_ALL)){
addAll((Collection<E>) argEvt.getNewValue());
}
else if (prop.equals(MVCArrayList.CHANGED)) {
change((E)argEvt.getOldValue(),(E) argEvt.getNewValue());
}
else if (prop.equals(MVCArrayList.REMOVED)) {
remove((E)argEvt.getOldValue());
}
else if(prop.equals(MVCArrayList.REMOVED_ALL)){
synchronized(lock){
model.removeAllElements();
}
}
}
};
/**
* Constructs with no data, no filter, no
* {@link Comparator}, and style set to
* {@link MVCJComboBoxStyle#ADD_NEW_TO_BEGINNING}.
*/
public MVCJComboBox() {
this(null, new IMVCJComboBoxFilter<E>() {
public boolean showItem(E argComponent) {
return true;
};
}, MVCJComboBoxStyle.SORT, null);
}
/**
* Constructs a combo box with the given style. If you want
* the {@link MVCJComboBoxStyle#SORT} style, then you'll want to specify
* a comparator as well.
* @param argData
* @param argStyle
*/
public MVCJComboBox(MVCJComboBoxStyle argStyle) {
this(null, new IMVCJComboBoxFilter<E>() {
public boolean showItem(E argComponent) {
return true;
};
}, argStyle, null);
}
/**
* Constracts a dynamic combo box with the given data and
* default style of {@link MVCJComboBoxStyle#SORT}.
* @param argData
*/
public MVCJComboBox(MVCArrayList<E> argData, Comparator<E> argComparator) {
this(argData, new IMVCJComboBoxFilter<E>() {
public boolean showItem(E argComponent) {
return true;
};
}, MVCJComboBoxStyle.SORT, argComparator);
}
/**
* Constructs a combo box with the given data and style. If you want
* the {@link MVCJComboBoxStyle#SORT} style, then you'll want to specify
* a comparator as well.
* @param argData
* @param argStyle
*/
public MVCJComboBox(MVCArrayList<E> argData, MVCJComboBoxStyle argStyle) {
this(argData, new IMVCJComboBoxFilter<E>() {
public boolean showItem(E argComponent) {
return true;
};
}, argStyle, null);
}
/**
* Constructs a dynamic combo box with the given data, filter, and comparator.
* The style will be {@link MVCJComboBoxStyle#SORT} by default.
* @param argData
* @param argFilter
* @param argComparator
*/
public MVCJComboBox(MVCArrayList<E> argData, IMVCJComboBoxFilter<E> argFilter, Comparator<E> argComparator) {
this(argData, argFilter, MVCJComboBoxStyle.SORT, null);
}
/**
*
* @param argData
* @param argFilter
* @param argStyle
* @param argComparator
*/
public MVCJComboBox(MVCArrayList<E> argData, IMVCJComboBoxFilter<E> argFilter, MVCJComboBoxStyle argStyle, Comparator<E> argComparator) {
data = argData;
style = argStyle;
filter = argFilter;
comparator = argComparator;
model = new MVCJComboBoxModel<E>();
super.setModel(model);
if(data != null){
argData.addPropertyChangeListener(plistener);
// add the data
for (E o : data) {
if(filter.showItem(o)){
model.addElement(o);
}
}
// start with allowing the comparator to be null, in case they intend to set it later. and call refreshData()
if(style == MVCJComboBoxStyle.SORT && comparator != null){
model.sort(comparator);
}
}
}
/**
* Gets the rendering style of this combo box. Default style is
* {@link MVCJComboBoxStyle#SORT}.
* @return
*/
public MVCJComboBoxStyle getStyle(){
return style;
}
/**
* Gets the data list. This is used to access
* data with {@link #refreshData()}, so override
* if you want to customize what the data is (sending
* null to the contructor for the data
* is a good idea in that case)
* @return
*/
public ArrayList<E> getData(){
return data;
}
/**
* Sets the data of this combo box. This causes the box
* to refresh it's model
* @param argData can be null
*/
public void setData(MVCArrayList<E> argData){
synchronized (lock) {
if(data != null){
data.removePropertyChangeListener(plistener);
}
data = argData;
if(data != null){
data.addPropertyChangeListener(plistener);
}
}
refreshData();
}
/**
* Sets the comparator used for the {@link MVCJComboBoxStyle#SORT} style.
* @param argComparator
*/
public void setComparator(Comparator<E> argComparator) {
this.comparator = argComparator;
}
/**
* Gets the comparator that's used for sorting.
* @return
*/
public Comparator<E> getComparator() {
return comparator;
}
/**
* @return the filter
*/
public IMVCJComboBoxFilter<E> getFilter() {
return filter;
}
/**
* @param argFilter the filter to set
*/
public void setFilter(IMVCJComboBoxFilter<E> argFilter) {
filter = argFilter;
}
/**
* @see javax.swing.JComboBox#processKeyEvent(java.awt.event.KeyEvent)
*/
@Override
public void processKeyEvent(KeyEvent argE) {
if(argE.getKeyChar() == KeyEvent.VK_BACK_SPACE || argE.getKeyChar() == KeyEvent.VK_DELETE){
setSelectedItem(null);
super.hidePopup();
}else{
super.processKeyEvent(argE);
}
}
/**
* Sets the style of this combo box
* @param argStyle
*/
public void setStyle(MVCJComboBoxStyle argStyle){
style = argStyle;
if(style == MVCJComboBoxStyle.SORT){
if(comparator == null){
throw new NullPointerException("DynamicJComboBox style is set to Alpha Sort, but the comparator is null.");
}
model.sort(comparator);
}
}
public void refreshData(){
synchronized (lock) {
// remove all elements
model.removeAllElements();
if(getData() == null){
return;
}
for(E e: getData()){
if(filter.showItem(e)){
model.addElement(e);
}
}
if(style == MVCJComboBoxStyle.SORT){
if(comparator == null){
throw new NullPointerException("DynamicJComboBox style is set to Alpha Sort, but the comparator is null.");
}
model.sort(comparator);
}
}
}
private void add(E argNewObj) {
boolean b = filter.showItem(argNewObj);
if (b == false) {
return;
}
synchronized (lock) {
switch(style){
case SORT:{
if(comparator == null){
throw new NullPointerException("DynamicJComboBox style is set to Alpha Sort, but the comparator is null.");
}
boolean inserted = false;
for(int i=0; i<model.getSize(); i++){
E e = model.getElementAt(i);
if(comparator.compare(e, argNewObj) > 0){
model.insertElementAt(argNewObj, i);
inserted = true;
break;
}
}
if(!inserted){
model.addElement(argNewObj);
}
break;
}
case ADD_NEW_TO_BEGINNING:{
model.insertElementAt(argNewObj, 0);
break;
}
case ADD_NEW_TO_END:{
model.addElement(argNewObj);
}
}
}
}
private void addAll(Collection<E> argNewObjects) {
LinkedList<E> filtered = new LinkedList<E>();
Iterator<E> it = argNewObjects.iterator();
while(it.hasNext()){
E e = it.next();
if(filter.showItem(e)){
filtered.add(e);
}
}
if(filtered.size() == 0){
return;
}
synchronized (lock) {
switch(style){
case SORT:{
if(comparator == null){
throw new NullPointerException("DynamicJComboBox style is set to Alpha Sort, but the comparator is null.");
}
model.addElements(filtered);
model.sort(comparator);
break;
}
case ADD_NEW_TO_BEGINNING:{
model.addElements(0, filtered);
break;
}
case ADD_NEW_TO_END:{
model.addElements(filtered);
}
}
}
}
private void change(E argOld, E argNew) {
boolean so = filter.showItem(argOld);
boolean sn = filter.showItem(argNew);
if(!sn){
remove(argOld);
return;
}
if(!so){
if(sn){
add(argNew);
return;
}else{
return;
}
}
synchronized (lock) {
int size = model.getSize();
for (int i = 0; i < size; i++) {
E e = model.getElementAt(i);
if (e == argOld) {
model.setElementAt(argNew, i);
return;
}
}
if(style == MVCJComboBoxStyle.SORT){
if(comparator == null){
throw new NullPointerException("DynamicJComboBox style is set to Alpha Sort, but the comparator is null.");
}
model.sort(comparator);
}
}
}
private void remove(E argVal) {
boolean is = filter.showItem(argVal);
if (!is) {<|fim▁hole|> E e = model.getElementAt(i);
if(e == argVal){
model.removeElementAt(i);
return;
}
}
}
}
}<|fim▁end|> | return;
}
synchronized (lock) {
for(int i=0; i<model.getSize();i ++){ |
<|file_name|>logger.py<|end_file_name|><|fim▁begin|># Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
<|fim▁hole|>import logging
from shotgun.settings import LOG_FILE
def configure_logger():
"""Configures shotgun logger
"""
logger = logging.getLogger('shotgun')
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s %(levelname)s %(process)d (%(module)s) %(message)s',
"%Y-%m-%d %H:%M:%S")
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG)
stream_handler.setFormatter(formatter)
file_handler = logging.FileHandler(LOG_FILE)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.addHandler(file_handler)<|fim▁end|> | |
<|file_name|>test.py<|end_file_name|><|fim▁begin|><|fim▁hole|>../../../../../../share/pyshared/mx/DateTime/mxDateTime/test.py<|fim▁end|> | |
<|file_name|>parsego.go<|end_file_name|><|fim▁begin|>// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"go/ast"
"go/parser"
"go/token"
"os"
"path"
"runtime"
"strings"
)
func isGoFile(dir os.FileInfo) bool {
return !dir.IsDir() &&
!strings.HasPrefix(dir.Name(), ".") && // ignore .files
path.Ext(dir.Name()) == ".go"
}
func isPkgFile(dir os.FileInfo) bool {
return isGoFile(dir) &&
!strings.HasSuffix(dir.Name(), "_test.go") // ignore test files
}
func pkgName(filename string) string {
file, err := parser.ParseFile(token.NewFileSet(), filename, nil, parser.PackageClauseOnly)
if err != nil || file == nil {
return ""
}
return file.Name.Name
}
func parseDir(dirpath string) map[string]*ast.Package {
// the package name is the directory name within its parent.<|fim▁hole|> // (use dirname instead of path because dirname is clean; it
// has no trailing '/')
_, pkgname := path.Split(dirpath)
// filter function to select the desired .go files
filter := func(d os.FileInfo) bool {
if isPkgFile(d) {
// Some directories contain main packages: Only accept
// files that belong to the expected package so that
// parser.ParsePackage doesn't return "multiple packages
// found" errors.
// Additionally, accept the special package name
// fakePkgName if we are looking at cmd documentation.
name := pkgName(dirpath + "/" + d.Name())
return name == pkgname
}
return false
}
// get package AST
pkgs, err := parser.ParseDir(token.NewFileSet(), dirpath, filter, parser.ParseComments)
if err != nil {
println("parse", dirpath, err.Error())
panic("go ParseDir fail: " + err.Error())
}
return pkgs
}
func stressParseGo() {
pkgroot := runtime.GOROOT() + "/src/"
for {
m := make(map[string]map[string]*ast.Package)
for _, pkg := range packages {
m[pkg] = parseDir(pkgroot + pkg)
Println("parsed go package", pkg)
}
}
}
// find . -type d -not -path "./exp" -not -path "./exp/*" -printf "\t\"%p\",\n" | sort | sed "s/\.\///" | grep -v testdata
var packages = []string{
"archive",
"archive/tar",
"archive/zip",
"bufio",
"builtin",
"bytes",
"compress",
"compress/bzip2",
"compress/flate",
"compress/gzip",
"compress/lzw",
"compress/zlib",
"container",
"container/heap",
"container/list",
"container/ring",
"crypto",
"crypto/aes",
"crypto/cipher",
"crypto/des",
"crypto/dsa",
"crypto/ecdsa",
"crypto/elliptic",
"crypto/hmac",
"crypto/md5",
"crypto/rand",
"crypto/rc4",
"crypto/rsa",
"crypto/sha1",
"crypto/sha256",
"crypto/sha512",
"crypto/subtle",
"crypto/tls",
"crypto/x509",
"crypto/x509/pkix",
"database",
"database/sql",
"database/sql/driver",
"debug",
"debug/dwarf",
"debug/elf",
"debug/gosym",
"debug/macho",
"debug/pe",
"encoding",
"encoding/ascii85",
"encoding/asn1",
"encoding/base32",
"encoding/base64",
"encoding/binary",
"encoding/csv",
"encoding/gob",
"encoding/hex",
"encoding/json",
"encoding/pem",
"encoding/xml",
"errors",
"expvar",
"flag",
"fmt",
"go",
"go/ast",
"go/build",
"go/doc",
"go/format",
"go/parser",
"go/printer",
"go/scanner",
"go/token",
"hash",
"hash/adler32",
"hash/crc32",
"hash/crc64",
"hash/fnv",
"html",
"html/template",
"image",
"image/color",
"image/draw",
"image/gif",
"image/jpeg",
"image/png",
"index",
"index/suffixarray",
"io",
"io/ioutil",
"log",
"log/syslog",
"math",
"math/big",
"math/cmplx",
"math/rand",
"mime",
"mime/multipart",
"net",
"net/http",
"net/http/cgi",
"net/http/cookiejar",
"net/http/fcgi",
"net/http/httptest",
"net/http/httputil",
"net/http/pprof",
"net/mail",
"net/rpc",
"net/rpc/jsonrpc",
"net/smtp",
"net/textproto",
"net/url",
"os",
"os/exec",
"os/signal",
"os/user",
"path",
"path/filepath",
"reflect",
"regexp",
"regexp/syntax",
"runtime",
"runtime/cgo",
"runtime/debug",
"runtime/pprof",
"runtime/race",
"sort",
"strconv",
"strings",
"sync",
"sync/atomic",
"syscall",
"testing",
"testing/iotest",
"testing/quick",
"text",
"text/scanner",
"text/tabwriter",
"text/template",
"text/template/parse",
"time",
"unicode",
"unicode/utf16",
"unicode/utf8",
"unsafe",
}<|fim▁end|> | |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.<|fim▁hole|>#
#########################################################################
from django import forms
from django.core.validators import validate_email, ValidationError
from slugify import slugify
from django.utils.translation import ugettext as _
from modeltranslation.forms import TranslationModelForm
from django.contrib.auth import get_user_model
from geonode.groups.models import GroupProfile
class GroupForm(TranslationModelForm):
slug = forms.SlugField(
max_length=20,
help_text=_("a short version of the name consisting only of letters, numbers, underscores and hyphens."),
widget=forms.HiddenInput,
required=False)
def clean_slug(self):
if GroupProfile.objects.filter(
slug__iexact=self.cleaned_data["slug"]).count() > 0:
raise forms.ValidationError(
_("A group already exists with that slug."))
return self.cleaned_data["slug"].lower()
def clean_title(self):
if GroupProfile.objects.filter(
title__iexact=self.cleaned_data["title"]).count() > 0:
raise forms.ValidationError(
_("A group already exists with that name."))
return self.cleaned_data["title"]
def clean(self):
cleaned_data = self.cleaned_data
name = cleaned_data.get("title")
slug = slugify(name)
cleaned_data["slug"] = slug
return cleaned_data
class Meta:
model = GroupProfile
exclude = ['group']
class GroupUpdateForm(forms.ModelForm):
def clean_name(self):
if GroupProfile.objects.filter(
name__iexact=self.cleaned_data["title"]).count() > 0:
if self.cleaned_data["title"] == self.instance.name:
pass # same instance
else:
raise forms.ValidationError(
_("A group already exists with that name."))
return self.cleaned_data["title"]
class Meta:
model = GroupProfile
exclude = ['group']
class GroupMemberForm(forms.Form):
role = forms.ChoiceField(choices=[
("member", "Member"),
("manager", "Manager"),
])
user_identifiers = forms.CharField(
widget=forms.TextInput(
attrs={
'class': 'user-select'}))
def clean_user_identifiers(self):
value = self.cleaned_data["user_identifiers"]
new_members, errors = [], []
for ui in value.split(","):
ui = ui.strip()
try:
validate_email(ui)
try:
new_members.append(get_user_model().objects.get(email=ui))
except get_user_model().DoesNotExist:
new_members.append(ui)
except ValidationError:
try:
new_members.append(
get_user_model().objects.get(
username=ui))
except get_user_model().DoesNotExist:
errors.append(ui)
if errors:
message = (
"The following are not valid email addresses or "
"usernames: %s; not added to the group" %
", ".join(errors))
raise forms.ValidationError(message)
return new_members
class GroupInviteForm(forms.Form):
invite_role = forms.ChoiceField(label="Role", choices=[
("member", "Member"),
("manager", "Manager"),
])
invite_user_identifiers = forms.CharField(
label="E-mail addresses list",
widget=forms.Textarea)
def clean_user_identifiers(self):
value = self.cleaned_data["invite_user_identifiers"]
invitees, errors = [], []
for ui in value.split(","):
ui = ui.strip()
try:
validate_email(ui)
try:
invitees.append(get_user_model().objects.get(email=ui))
except get_user_model().DoesNotExist:
invitees.append(ui)
except ValidationError:
try:
invitees.append(get_user_model().objects.get(username=ui))
except get_user_model().DoesNotExist:
errors.append(ui)
if errors:
message = (
"The following are not valid email addresses or "
"usernames: %s; no invitations sent" %
", ".join(errors))
raise forms.ValidationError(message)
return invitees<|fim▁end|> | #
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
<|file_name|>mng_v2_test.go<|end_file_name|><|fim▁begin|>package dao
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoBusinessAllV2(t *testing.T) {
convey.Convey("BusinessAllV2", t, func(convCtx convey.C) {
var (
c = context.Background()
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
list, err := d.BusinessAllV2(c)
convCtx.Convey("Then err should be nil.list should not be nil.", func(convCtx convey.C) {
convCtx.So(err, convey.ShouldBeNil)
convCtx.So(list, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoBusinessInfoV2(t *testing.T) {
convey.Convey("BusinessInfoV2", t, func(convCtx convey.C) {
var (
c = context.Background()
name = "dm"
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
b, err := d.BusinessInfoV2(c, name)
convCtx.Convey("Then err should be nil.b should not be nil.", func(convCtx convey.C) {
convCtx.So(err, convey.ShouldBeNil)
convCtx.So(b, convey.ShouldNotBeNil)
})
})
})
}
//func TestDaoBusinessIns(t *testing.T) {
// convey.Convey("BusinessIns", t, func(convCtx convey.C) {
// var (
// c = context.Background()
// pid = int64(0)
// name = ""
// description = ""
// )
// convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
// rows, err := d.BusinessIns(c, pid, name, description)
// convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) {
// convCtx.So(err, convey.ShouldBeNil)
// convCtx.So(rows, convey.ShouldNotBeNil)
// })
// })
// })
//}
//func TestDaoBusinessUpdate(t *testing.T) {
// convey.Convey("BusinessUpdate", t, func(convCtx convey.C) {
// var (
// c = context.Background()
// name = ""
// field = ""
// value = ""
// )
// convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
// rows, err := d.BusinessUpdate(c, name, field, value)
// convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) {
// convCtx.So(err, convey.ShouldBeNil)
// convCtx.So(rows, convey.ShouldNotBeNil)
// })
// })
// })
//}
func TestDaoAssetDBTables(t *testing.T) {
convey.Convey("AssetDBTables", t, func(convCtx convey.C) {
var (
c = context.Background()
)<|fim▁hole|> convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
list, err := d.AssetDBTables(c)
convCtx.Convey("Then err should be nil.list should not be nil.", func(convCtx convey.C) {
convCtx.So(err, convey.ShouldBeNil)
convCtx.So(list, convey.ShouldNotBeNil)
})
})
})
}
//
//func TestDaoAssetDBIns(t *testing.T) {
// convey.Convey("AssetDBIns", t, func(convCtx convey.C) {
// var (
// c = context.Background()
// name = ""
// description = ""
// dsn = ""
// )
// convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
// rows, err := d.AssetDBIns(c, name, description, dsn)
// convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) {
// convCtx.So(err, convey.ShouldBeNil)
// convCtx.So(rows, convey.ShouldNotBeNil)
// })
// })
// })
//}
//func TestDaoAssetTableIns(t *testing.T) {
// convey.Convey("AssetTableIns", t, func(convCtx convey.C) {
// var (
// c = context.Background()
// name = ""
// db = ""
// regex = ""
// fields = ""
// description = ""
// )
// convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
// rows, err := d.AssetTableIns(c, name, db, regex, fields, description)
// convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) {
// convCtx.So(err, convey.ShouldBeNil)
// convCtx.So(rows, convey.ShouldNotBeNil)
// })
// })
// })
//}
func TestDaoAsset(t *testing.T) {
convey.Convey("Asset", t, func(convCtx convey.C) {
var (
c = context.Background()
name = "bilibili_article"
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
r, err := d.Asset(c, name)
convCtx.Convey("Then err should be nil.r should not be nil.", func(convCtx convey.C) {
convCtx.So(err, convey.ShouldBeNil)
convCtx.So(r, convey.ShouldNotBeNil)
})
})
})
}<|fim▁end|> | |
<|file_name|>alt_dir_ind.rs<|end_file_name|><|fim▁begin|>use std::io::{self, Read, Write};
use std::fmt;
use instruction::parameter::{AltDirect, Indirect};
use instruction::parameter::{ParamType, ParamTypeOf};
use instruction::parameter::InvalidParamType;
use instruction::parameter::ind_reg::Error;
use instruction::mem_size::MemSize;
use instruction::write_to::WriteTo;<|fim▁hole|>use process::Context;
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub enum AltDirInd {
AltDirect(AltDirect),
Indirect(Indirect),
}
impl AltDirInd {
pub fn read_from<R: Read>(param_type: ParamType, reader: &mut R) -> Result<Self, Error> {
match param_type {
ParamType::Direct => Ok(AltDirInd::AltDirect(AltDirect::read_from(reader)?)),
ParamType::Indirect => Ok(AltDirInd::Indirect(Indirect::read_from(reader)?)),
_ => Err(Error::InvalidParamType(InvalidParamType(param_type))),
}
}
}
impl GetValue for AltDirInd {
fn get_value(&self, vm: &Machine, context: &Context) -> i32 {
match *self {
AltDirInd::AltDirect(alt_direct) => alt_direct.get_value(vm, context),
AltDirInd::Indirect(indirect) => indirect.get_value(vm, context),
}
}
fn get_value_long(&self, vm: &Machine, context: &Context) -> i32 {
match *self {
AltDirInd::AltDirect(alt_direct) => alt_direct.get_value_long(vm, context),
AltDirInd::Indirect(indirect) => indirect.get_value_long(vm, context),
}
}
}
impl MemSize for AltDirInd {
fn mem_size(&self) -> usize {
match *self {
AltDirInd::AltDirect(alt_direct) => alt_direct.mem_size(),
AltDirInd::Indirect(indirect) => indirect.mem_size(),
}
}
}
impl WriteTo for AltDirInd {
fn write_to<W: Write>(&self, writer: &mut W) -> io::Result<()> {
match *self {
AltDirInd::AltDirect(alt_direct) => alt_direct.write_to(writer),
AltDirInd::Indirect(indirect) => indirect.write_to(writer),
}
}
}
impl ParamTypeOf for AltDirInd {
fn param_type(&self) -> ParamType {
match *self {
AltDirInd::AltDirect(_) => ParamType::Direct,
AltDirInd::Indirect(_) => ParamType::Indirect,
}
}
}
impl fmt::Debug for AltDirInd {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
AltDirInd::AltDirect(alt_direct) => write!(f, "{:?}", alt_direct),
AltDirInd::Indirect(indirect) => write!(f, "{:?}", indirect),
}
}
}
impl fmt::Display for AltDirInd {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
AltDirInd::AltDirect(alt_direct) => alt_direct.fmt(f),
AltDirInd::Indirect(indirect) => indirect.fmt(f),
}
}
}<|fim▁end|> | use instruction::get_value::GetValue;
use machine::Machine; |
<|file_name|>KnownLayerWMS.js<|end_file_name|><|fim▁begin|>/**
* A test for ensuring that the WMS's registered in Known Layers are working as expected
*/
Ext.define('admin.tests.KnownLayerWMS', {
extend : 'admin.tests.SingleAJAXTest',
<|fim▁hole|> },
getDescription : function() {
var baseDescription = 'This tests the backend connection to all web map services that belong to known layers. A simple GetMap and GetFeatureInfo request is with an artificial bounding box.';
baseDescription += this.callParent(arguments);
return baseDescription;
},
/**
* The entirety of our test is making a request to the controller and parsing the resposne
*/
startTest : function() {
//Init our params
var bbox = Ext.create('portal.util.BBox',{
eastBoundLongitude : 116,
westBoundLongitude : 115,
northBoundLatitude : -31,
southBoundLatitude : -32
}); //rough bounds around Perth, WA
var layerNames = [];
var serviceUrls = [];
var onlineResources = this._getKnownLayerOnlineResources('WMS');
if (onlineResources.length == 0) {
this._changeStatus(admin.tests.TestStatus.Unavailable);
return;
}
for (var i = 0; i < onlineResources.length; i++) {
layerNames.push(onlineResources[i].get('name'));
serviceUrls.push(onlineResources[i].get('url'));
}
//Run our test
this._singleAjaxTest('testWMS.diag', {
bbox : Ext.JSON.encode(bbox),
serviceUrls : serviceUrls,
layerNames : layerNames
});
}
});<|fim▁end|> |
getTitle : function() {
return 'Known layer WMS availability';
|
<|file_name|>containers_test.go<|end_file_name|><|fim▁begin|>package main
import (
"bytes"
"encoding/json"
"fmt"
"github.com/cdwlabs/armor/pkg/config"
docker "github.com/fsouza/go-dockerclient"
vaultapi "github.com/hashicorp/vault/api"
"github.com/pborman/uuid"
// "github.com/spf13/viper"
"golang.org/x/net/context"
"io/ioutil"
"os"
"time"
)
type TestContainers struct {
client *docker.Client
vaultID string
vaultDataDir string
}
var (
// Set to false if you want to use test Vault container for debugging
// purposes (e.g. failing test cases?)
testRemoveContainer = true
// Set to true if you want to see logs from Vault container. NOTE: these
// logs are printed after tests have completed (either successfully or
// unsuccessfully).
testDumpContainerLogs = true
ctx = context.Background()
dockerEndpoint string
VaultImageName = "pinterb/vault"
VaultImageTag = "0.6.2"
// VaultDisableMlock if true, this will disable the server from executing the
// mlock syscall to prevent memory from being swapped to disk. This is not
// recommended for production!
VaultDisableMlock = false
VaultDisableCache = false
VaultMaxLeaseTTL = 32 * 24 * time.Hour
VaultMaxLeaseTTLRaw = "720h"
VaultDefaultLeaseTTL = 32 * 24 * time.Hour
VaultDefaultLeaseTTLRaw = "168h"
// A function with no arguments that outputs a valid JSON string to be used
// as the value of the environment variable VAULT_LOCAL_CONFIG.
VaultLocalConfigGen = DefaultVaultLocalConfig
)
func DefaultVaultLocalConfig() (string, error) {
type Backend struct {
Type string `json:"type,omitempty"`
RedirectAddr string `json:"redirect_addr,omitempty"`
ClusterAddr string `json:"cluster_addr,omitempty"`
DisableClustering bool `json:"disable_clustering,omitempty"`
Config map[string]string `json:"config,omitempty"`
}
type FileBackend struct {
Config map[string]string `json:"file,omitempty"`
}
type Listener struct {
Type string `json:"type,omitempty"`
Config map[string]string `json:"config,omitempty"`<|fim▁hole|> Config map[string]string `json:"tcp,omitempty"`
}
type Telemetry struct {
StatsiteAddr string `json:"statsite_address,omitempty"`
StatsdAddr string `json:"statsd_address,omitempty"`
DisableHostname bool `json:"disable_hostname,omitempty"`
CirconusAPIToken string `json:"circonus_api_token,omitempty"`
CirconusAPIApp string `json:"circonus_api_app,omitempty"`
CirconusAPIURL string `json:"circonus_api_url,omitempty"`
CirconusSubmissionInterval string `json:"circonus_submission_interval,omitempty"`
CirconusCheckSubmissionURL string `json:"circonus_submission_url,omitempty"`
CirconusCheckID string `json:"circonus_check_id,omitempty"`
CirconusCheckForceMetricActivation string `json:"circonus_check_force_metric_activation,omitempty"`
CirconusCheckInstanceID string `json:"circonus_check_instance_id,omitempty"`
CirconusCheckSearchTag string `json:"circonus_check_search_tag,omitempty"`
CirconusBrokerID string `json:"circonus_broker_id,omitempty"`
CirconusBrokerSelectTag string `json:"circonus_broker_select_tag,omitempty"`
}
type Config struct {
Listeners *TCPListener `json:"listener,omitempty"`
Backend *FileBackend `json:"backend,omitempty"`
HABackend *Backend `json:"ha_backend,omitempty"`
CacheSize int `json:"cache_size,omitempty"`
DisableCache bool `json:"disable_cache,omitempty"`
DisableMlock bool `json:"disable_mlock,omitempty"`
Telemetry *Telemetry `json:"telemetry,omitempty"`
MaxLeaseTTL time.Duration `json:"-,omitempty"`
MaxLeaseTTLRaw string `json:"max_lease_ttl,omitempty"`
DefaultLeaseTTL time.Duration `json:"-,omitempty"`
DefaultLeaseTTLRaw string `json:"default_lease_ttl,omitempty"`
ClusterName string `json:"cluster_name,omitempty"`
}
vals := &Config{
DisableCache: VaultDisableCache,
DisableMlock: VaultDisableMlock,
Backend: &FileBackend{
Config: map[string]string{
"path": "/vault/file",
},
},
Listeners: &TCPListener{
Config: map[string]string{
"address": "0.0.0.0:8200",
"tls_disable": "false",
"tls_cert_file": "/vault/tls/cert.pem",
"tls_key_file": "/vault/tls/cert-key.pem",
},
},
Telemetry: &Telemetry{},
MaxLeaseTTL: VaultMaxLeaseTTL,
MaxLeaseTTLRaw: VaultMaxLeaseTTLRaw,
DefaultLeaseTTL: VaultDefaultLeaseTTL,
DefaultLeaseTTLRaw: VaultDefaultLeaseTTLRaw,
}
ret, err := json.Marshal(vals)
if err != nil {
return "", err
}
return string(ret), nil
}
// NewTestContainers sets up our test containers.
func NewTestContainers() (*TestContainers, error) {
client, err := docker.NewClient(getDockerEndpoint())
if err != nil {
return nil, fmt.Errorf("Failed to create docker client: %v", err)
}
err = client.Ping()
if err != nil {
return nil, fmt.Errorf("Failed to ping docker w/client: %v", err)
}
// Create a temporary directory for vault data
dataDir, err := ioutil.TempDir("", "vaultdata")
if err != nil {
return nil, fmt.Errorf("Failed to temp directory for vault's data directory: %v", err)
}
// cfg := config.Config()
cwd, _ := os.Getwd()
// Don't think client certs are necessary...especially for testing(?)
//os.Setenv(vaultapi.EnvVaultClientCert, cwd+"/test-fixtures/keys/client.pem")
//os.Setenv(vaultapi.EnvVaultClientKey, cwd+"/test-fixtures/keys/client-key.pem")
// os.Setenv(vaultapi.EnvVaultCACert, cwd+"/test-fixtures/keys/ca-cert.pem")
//viper.Set("vault_ca_cert", cwd+"/test-fixtures/keys/ca-cert.pem")
os.Setenv(config.VaultCACertEnvVar, cwd+"/test-fixtures/keys/ca-cert.pem")
//os.Setenv(vaultapi.EnvVaultCAPath, cwd+"/test-fixtures/keys")
//viper.Set("vault_ca_path", cwd+"/test-fixtures/keys")
os.Setenv(config.VaultCAPathEnvVar, cwd+"/test-fixtures/keys")
//os.Setenv(vaultapi.EnvVaultInsecure, "true")
//viper.Set("vault_skip_verify", true)
os.Setenv(vaultapi.EnvVaultMaxRetries, "5")
// Define our Vault container host config...
mounts := []docker.Mount{
{Name: "data", Source: dataDir, Destination: "/vault/file", Mode: "rxw"},
{Name: "tls", Source: cwd + "/test-fixtures/keys", Destination: "/vault/tls", Mode: "rxw"},
}
vols := make(map[string]struct{})
for _, mount := range mounts {
vols[mount.Source] = struct{}{}
}
binds := make([]string, len(mounts))
for i, mount := range mounts {
binds[i] = fmt.Sprintf("%s:%s", mount.Source, mount.Destination)
}
capadd := make([]string, 1)
capadd[0] = "IPC_LOCK"
//viper.Set("vault_address", "https://127.0.0.1:8200")
portBindings := map[docker.Port][]docker.PortBinding{
"8200/tcp": {{HostIP: "0.0.0.0", HostPort: "8200"}}}
hostConfig := docker.HostConfig{
Binds: binds,
CapAdd: capadd,
PortBindings: portBindings,
PublishAllPorts: false,
Privileged: false,
}
// Define our Vault create container options...
containerName := fmt.Sprintf("vault-test-%s", uuid.New())
exposedVaultPort := map[docker.Port]struct{}{
"8200/tcp": {}}
genVaultConfig, err := VaultLocalConfigGen()
if err != nil {
return nil, fmt.Errorf("Failed to render vault configuration json: %v", err)
}
createOpts := docker.CreateContainerOptions{
Name: containerName,
Config: &docker.Config{
Image: fmt.Sprintf("%s:%s", VaultImageName, VaultImageTag),
Labels: map[string]string{"com.cdw.cdwlabs": "true"},
Hostname: "feh.cdw.com",
Volumes: vols,
Mounts: mounts,
ExposedPorts: exposedVaultPort,
Env: []string{fmt.Sprintf("VAULT_LOCAL_CONFIG=%s", genVaultConfig), "VAULT_CACERT=/vault/tls/ca-cert.pem"},
Cmd: []string{"server", "-log-level=debug"},
},
HostConfig: &hostConfig,
}
// Pull the vault image
opts := docker.PullImageOptions{Repository: VaultImageName, Tag: VaultImageTag, OutputStream: os.Stdout}
err = client.PullImage(opts, docker.AuthConfiguration{})
if err != nil {
return nil, fmt.Errorf("Failed to pull Vault test image: %v", err)
}
// Create the container
cont, err := client.CreateContainer(createOpts)
if err != nil {
return nil, fmt.Errorf("Failed to create Vault test container: %v", err)
}
// Start the container
err = client.StartContainer(cont.ID, nil)
if err != nil {
return nil, fmt.Errorf("Failed to start Vault test container: %v", err)
}
return &TestContainers{
client: client,
vaultID: cont.ID,
vaultDataDir: dataDir,
}, nil
}
// CleanUp removes our test containers.
func (containers *TestContainers) CleanUp() error {
// defer containers.writer.Flush()
defer os.RemoveAll(containers.vaultDataDir)
err := containers.client.StopContainer(containers.vaultID, 10)
if err != nil {
return fmt.Errorf("Failed to stop container: %v", err)
}
// Reading logs from container and sending them to buf.
if testDumpContainerLogs {
fmt.Println("")
fmt.Println("##############################################")
fmt.Println(" Vault Container Logs")
fmt.Println("")
var buf bytes.Buffer
err = containers.client.AttachToContainer(docker.AttachToContainerOptions{
Container: containers.vaultID,
OutputStream: &buf,
Logs: true,
Stdout: true,
Stderr: true,
})
if err != nil {
return fmt.Errorf("Failed to attach to stopped container: %v", err)
}
fmt.Println(buf.String())
fmt.Println("")
fmt.Println("##############################################")
fmt.Println("")
}
if testRemoveContainer {
opts := docker.RemoveContainerOptions{ID: containers.vaultID}
err = containers.client.RemoveContainer(opts)
if err != nil {
return fmt.Errorf("Failed to remove container: %v", err)
}
}
return nil
}
func getDockerEndpoint() string {
var endpoint string
if len(os.Getenv("DOCKER_HOST")) > 0 {
endpoint = os.Getenv("DOCKER_HOST")
} else {
endpoint = "unix:///var/run/docker.sock"
}
return endpoint
}
func containsString(stringSlice []string, searchString string) bool {
for _, value := range stringSlice {
if value == searchString {
return true
}
}
return false
}<|fim▁end|> | }
type TCPListener struct { |
<|file_name|>ProgressBarBase.ts<|end_file_name|><|fim▁begin|>/*
* The MIT License
*
* Copyright 2017 kuniaki.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:<|fim▁hole|> *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
import {Gauge1DOptions} from './GaugeBase'
import {Gauge1D} from './GaugeBase'
/**
* Progress bar option class.
*/
export class ProgressBarOptions extends Gauge1DOptions
{
/**
* Texture of prgress bar.
*/
public Texture: PIXI.Texture;
constructor()
{
super();
}
}
/**
* Progress bar class.
*/
export abstract class ProgressBar extends Gauge1D
{
private progressBarOptions: ProgressBarOptions;
private sprite : PIXI.Sprite;
private spriteMask : PIXI.Graphics;
/**
* @param options Option to set.
*/
constructor(options?: ProgressBarOptions)
{
let progressBarOptions: ProgressBarOptions;
if (!(options instanceof ProgressBarOptions))
{
progressBarOptions = new ProgressBarOptions();
}
else
{
progressBarOptions = options;
}
super(progressBarOptions);
this.progressBarOptions = progressBarOptions;
this.sprite = new PIXI.Sprite();
this.spriteMask = new PIXI.Graphics();
//Assign mask to sprite
this.sprite.mask = this.spriteMask;
//Assign texture to sprite
this.sprite.texture = this.progressBarOptions.Texture;
//Assign spirite and mask to container
super.addChild(this.sprite);
super.addChild(this.spriteMask);
}
/**
* Get Options.
* @return Options.
*/
get Options() {return this.progressBarOptions}
/**
* Get sprite mask.
*/
protected get SpriteMask(): PIXI.Graphics { return this.spriteMask; }
/**
* Get sprite.
*/
protected get Sprite(): PIXI.Sprite { return this.sprite; }
}<|fim▁end|> | *
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software. |
<|file_name|>form.js<|end_file_name|><|fim▁begin|>/**
* This file contains JS functionality required by mforms and is included automatically
* when required.
*/
// Namespace for the form bits and bobs
M.form = M.form || {};
/**
* Initialises the show advanced functionality and events.
* This should only ever happen ONCE per page.
*
* @param {YUI} Y
* @param {object} config
*/
M.form.initShowAdvanced = function(Y, config) {
if (M.form.showAdvanced) {
return M.form.showAdvanced;
}
var showAdvanced = function(config) {
showAdvanced.superclass.constructor.apply(this, arguments);
};
showAdvanced.prototype = {
_advButtons : [],
_advAreas : [],
_stateInput : null,
initializer : function() {
this._advAreas = Y.all('form .advanced');
this._advButtons = Y.all('.showadvancedbtn');
if (this._advButtons.size() > 0) {
this._stateInput = new Y.NodeList(document.getElementsByName('mform_showadvanced_last'));
this._advButtons.on('click', this.switchState, this);
}
},
/**
* Toggles between showing advanced items and hiding them.
* Should be fired by an event.
*/
switchState : function(e) {
e.preventDefault();
if (this._stateInput.get('value')=='1') {<|fim▁hole|> this._advButtons.setAttribute('value', M.str.form.showadvanced);
this._advAreas.addClass('hide');
} else {
this._stateInput.set('value', '1');
this._advButtons.setAttribute('value', M.str.form.hideadvanced);
this._advAreas.removeClass('hide');
}
}
};
// Extend it with the YUI widget fw.
Y.extend(showAdvanced, Y.Base, showAdvanced.prototype, {
NAME : 'mform-showAdvanced'
});
M.form.showAdvanced = new showAdvanced(config);
return M.form.showAdvanced;
};
/**
* Initialises a manager for a forms dependencies.
* This should happen once per form.
*/
M.form.initFormDependencies = function(Y, formid, dependencies) {
// If the dependencies isn't an array or object we don't want to
// know about it
if (!Y.Lang.isArray(dependencies) && !Y.Lang.isObject(dependencies)) {
return false;
}
/**
* Fixes an issue with YUI's processing method of form.elements property
* in Internet Explorer.
* http://yuilibrary.com/projects/yui3/ticket/2528030
*/
Y.Node.ATTRS.elements = {
getter: function() {
return Y.all(new Y.Array(this._node.elements, 0, true));
}
};
// Define the dependency manager if it hasn't already been defined.
M.form.dependencyManager = M.form.dependencyManager || (function(){
var dependencyManager = function(config) {
dependencyManager.superclass.constructor.apply(this, arguments);
};
dependencyManager.prototype = {
_form : null,
_depElements : [],
_nameCollections : [],
initializer : function(config) {
var i = 0, nodeName;
this._form = Y.one('#'+formid);
for (i in dependencies) {
this._depElements[i] = this.elementsByName(i);
if (this._depElements[i].size() == 0) {
continue;
}
this._depElements[i].each(function(node){
nodeName = node.get('nodeName').toUpperCase();
if (nodeName == 'INPUT') {
if (node.getAttribute('type').match(/^(button|submit|radio|checkbox)$/)) {
node.on('click', this.checkDependencies, this);
} else {
node.on('blur', this.checkDependencies, this);
}
node.on('change', this.checkDependencies, this);
} else if (nodeName == 'SELECT') {
node.on('change', this.checkDependencies, this);
} else {
node.on('click', this.checkDependencies, this);
node.on('blur', this.checkDependencies, this);
node.on('change', this.checkDependencies, this);
}
}, this);
}
this._form.get('elements').each(function(input){
if (input.getAttribute('type')=='reset') {
input.on('click', function(){
this._form.reset();
this.checkDependencies();
}, this);
}
}, this);
return this.checkDependencies(null);
},
/**
* Gets all elements in the form by thier name and returns
* a YUI NodeList
* @return Y.NodeList
*/
elementsByName : function(name) {
if (!this._nameCollections[name]) {
var elements = [];
this._form.get('elements').each(function(){
if (this.getAttribute('name') == name) {
elements.push(this);
}
});
this._nameCollections[name] = new Y.NodeList(elements);
}
return this._nameCollections[name];
},
/**
* Checks the dependencies the form has an makes any changes to the
* form that are required.
*
* Changes are made by functions title _dependency_{dependencytype}
* and more can easily be introduced by defining further functions.
*/
checkDependencies : function(e) {
var tolock = [],
tohide = [],
dependon, condition, value,
lock, hide, checkfunction, result;
for (dependon in dependencies) {
if (this._depElements[dependon].size() == 0) {
continue;
}
for (condition in dependencies[dependon]) {
for (value in dependencies[dependon][condition]) {
lock = false;
hide = false;
checkfunction = '_dependency_'+condition;
if (Y.Lang.isFunction(this[checkfunction])) {
result = this[checkfunction].apply(this, [this._depElements[dependon], value, e]);
} else {
result = this._dependency_default(this._depElements[dependon], value, e);
}
lock = result.lock || false;
hide = result.hide || false;
for (var ei in dependencies[dependon][condition][value]) {
var eltolock = dependencies[dependon][condition][value][ei];
if (hide) {
tohide[eltolock] = true;
}
if (tolock[eltolock] != null) {
tolock[eltolock] = lock || tolock[eltolock];
} else {
tolock[eltolock] = lock;
}
}
}
}
}
for (var el in tolock) {
this._disableElement(el, tolock[el]);
if (tohide.propertyIsEnumerable(el)) {
this._hideElement(el, tohide[el]);
}
}
return true;
},
/**
* Disabled all form elements with the given name
*/
_disableElement : function(name, disabled) {
var els = this.elementsByName(name);
els.each(function(){
if (disabled) {
this.setAttribute('disabled', 'disabled');
} else {
this.removeAttribute('disabled');
}
})
},
/**
* Hides all elements with the given name.
*/
_hideElement : function(name, hidden) {
var els = this.elementsByName(name);
els.each(function(){
var e = els.ancestor('.fitem');
if (e) {
e.setStyles({
display : (hidden)?'none':''
})
}
});
},
_dependency_notchecked : function(elements, value) {
var lock = false;
elements.each(function(){
if (this.getAttribute('type').toLowerCase()=='radio' && this.get('value') != value) {
return;
}
lock = lock || !Y.Node.getDOMNode(this).checked;
});
return {
lock : lock,
hide : false
}
},
_dependency_checked : function(elements, value) {
var lock = false;
elements.each(function(){
if (this.getAttribute('type').toLowerCase()=='radio' && this.get('value') != value) {
return;
}
lock = lock || Y.Node.getDOMNode(this).checked;
});
return {
lock : lock,
hide : false
}
},
_dependency_noitemselected : function(elements, value) {
var lock = false;
elements.each(function(){
lock = lock || this.get('selectedIndex') == -1;
});
return {
lock : lock,
hide : false
}
},
_dependency_eq : function(elements, value) {
var lock = false;
elements.each(function(){
if (this.getAttribute('type').toLowerCase()=='radio' && !Y.Node.getDOMNode(this).checked) {
return;
} else if (this.getAttribute('type').toLowerCase() == 'checkbox' && !Y.Node.getDOMNode(this).checked) {
return;
}
lock = lock || this.get('value') == value;
});
return {
lock : lock,
hide : false
}
},
_dependency_hide : function(elements, value) {
return {
lock : false,
hide : true
}
},
_dependency_default : function(elements, value, ev) {
var lock = false;
elements.each(function(){
if (this.getAttribute('type').toLowerCase()=='radio' && !Y.Node.getDOMNode(this).checked) {
return;
} else if (this.getAttribute('type').toLowerCase() == 'checkbox' && !Y.Node.getDOMNode(this).checked) {
return;
}
lock = lock || this.get('value') != value;
});
return {
lock : lock,
hide : false
}
}
};
Y.extend(dependencyManager, Y.Base, dependencyManager.prototype, {
NAME : 'mform-dependency-manager'
});
return dependencyManager;
})();
return new M.form.dependencyManager();
};<|fim▁end|> | this._stateInput.set('value', '0'); |
<|file_name|>sw-KE.js<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*<|fim▁hole|> * Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
export default [
[
['saa sita za usiku', 'saa sita za mchana', 'alfajiri', 'asubuhi', 'mchana', 'jioni', 'usiku'],
,
],
,
[
'00:00', '12:00', ['04:00', '07:00'], ['07:00', '12:00'], ['12:00', '16:00'],
['16:00', '19:00'], ['19:00', '04:00']
]
];
//# sourceMappingURL=sw-KE.js.map<|fim▁end|> | |
<|file_name|>IfExp.py<|end_file_name|><|fim▁begin|>"""
IfExp astroid node
An if statement written in an expression form.
Attributes:<|fim▁hole|> - test (Node)
- Holds a single node such as Compare.
- Body (List[Node])
- A list of nodes that will execute if the condition passes.
- orelse (List[Node])
- The else clause.
Example:
- test -> True
- Body -> [x = 1]
- orelse -> [0]
"""
x = 1 if True else 0<|fim▁end|> | |
<|file_name|>jquery.blockUI.d.ts<|end_file_name|><|fim▁begin|>// Typescript type definitions for jQuery.i18n.properties 2.55.0 by M. Alsup
// Project: http://malsup.com/jquery/block/
// Definitions by: Naoshi Tabuchi
// Definitions: https://bitbucket.org/ntabee/definitelytyped-clone.git
/// <reference path='../jquery/jquery.d.ts'/>
interface BlockUI {
(opts?: any): void;
defaults: any;
}<|fim▁hole|>}
interface JQuery {
blockUI: BlockUI;
unblockUI : UnblockUI;
}
interface JQueryStatic {
blockUI: BlockUI;
unblockUI: UnblockUI;
}<|fim▁end|> | interface UnblockUI {
(opts?: any): void;
defaults: any; |
<|file_name|>tasks.py<|end_file_name|><|fim▁begin|>import datetime
import httplib2
import itertools
from django.conf import settings
from django.db import connection
from django.db.models import Sum, Max
from apiclient.discovery import build
from elasticsearch.helpers import bulk_index
from oauth2client.client import OAuth2Credentials
import olympia.core.logger
from olympia import amo
from olympia.amo import search as amo_search
from olympia.addons.models import Addon
from olympia.amo.celery import task
from olympia.bandwagon.models import Collection
from olympia.reviews.models import Review
from olympia.users.models import UserProfile
from olympia.versions.models import Version
from . import search
from .models import (
AddonCollectionCount, CollectionCount, CollectionStats, DownloadCount,
ThemeUserCount, UpdateCount)
log = olympia.core.logger.getLogger('z.task')
@task
def update_addons_collections_downloads(data, **kw):
log.info("[%s] Updating addons+collections download totals." %
(len(data)))
query = (
"UPDATE addons_collections SET downloads=%s WHERE addon_id=%s "
"AND collection_id=%s;" * len(data))
with connection.cursor() as cursor:
cursor.execute(
query,
list(itertools.chain.from_iterable(
[var['sum'], var['addon'], var['collection']]
for var in data)))
@task
def update_collections_total(data, **kw):
log.info("[%s] Updating collections' download totals." %
(len(data)))
for var in data:
(Collection.objects.filter(pk=var['collection_id'])
.update(downloads=var['sum']))
def get_profile_id(service, domain):
"""
Fetch the profile ID for the given domain.
"""
accounts = service.management().accounts().list().execute()
account_ids = [a['id'] for a in accounts.get('items', ())]
for account_id in account_ids:
webproperties = service.management().webproperties().list(
accountId=account_id).execute()
webproperty_ids = [p['id'] for p in webproperties.get('items', ())]
for webproperty_id in webproperty_ids:
profiles = service.management().profiles().list(
accountId=account_id,
webPropertyId=webproperty_id).execute()
for p in profiles.get('items', ()):
# sometimes GA includes "http://", sometimes it doesn't.
if '://' in p['websiteUrl']:
name = p['websiteUrl'].partition('://')[-1]
else:
name = p['websiteUrl']
if name == domain:
return p['id']
@task
def update_google_analytics(date, **kw):
creds_data = getattr(settings, 'GOOGLE_ANALYTICS_CREDENTIALS', None)
if not creds_data:
log.critical('Failed to update global stats: '
'GOOGLE_ANALYTICS_CREDENTIALS not set')
return
creds = OAuth2Credentials(
*[creds_data[k] for k in
('access_token', 'client_id', 'client_secret',
'refresh_token', 'token_expiry', 'token_uri',
'user_agent')])
h = httplib2.Http()
creds.authorize(h)
service = build('analytics', 'v3', http=h)
domain = getattr(settings,
'GOOGLE_ANALYTICS_DOMAIN', None) or settings.DOMAIN
profile_id = get_profile_id(service, domain)
if profile_id is None:
log.critical('Failed to update global stats: could not access a Google'
' Analytics profile for ' + domain)
return
datestr = date.strftime('%Y-%m-%d')
try:
data = service.data().ga().get(ids='ga:' + profile_id,
start_date=datestr,
end_date=datestr,
metrics='ga:visits').execute()
# Storing this under the webtrends stat name so it goes on the
# same graph as the old webtrends data.
p = ['webtrends_DailyVisitors', data['rows'][0][0], date]
except Exception, e:
log.critical(
'Fetching stats data for %s from Google Analytics failed: %s' % e)
return
try:
cursor = connection.cursor()
cursor.execute('REPLACE INTO global_stats (name, count, date) '
'values (%s, %s, %s)', p)
except Exception, e:
log.critical('Failed to update global stats: (%s): %s' % (p, e))
else:
log.debug('Committed global stats details: (%s) has (%s) for (%s)'
% tuple(p))
finally:
cursor.close()
@task
def update_global_totals(job, date, **kw):
log.info('Updating global statistics totals (%s) for (%s)' % (job, date))
jobs = _get_daily_jobs(date)
jobs.update(_get_metrics_jobs(date))
num = jobs[job]()
q = """REPLACE INTO global_stats (`name`, `count`, `date`)
VALUES (%s, %s, %s)"""
p = [job, num or 0, date]
try:
cursor = connection.cursor()
cursor.execute(q, p)
except Exception, e:
log.critical('Failed to update global stats: (%s): %s' % (p, e))
else:
log.debug('Committed global stats details: (%s) has (%s) for (%s)'
% tuple(p))
finally:
cursor.close()
def _get_daily_jobs(date=None):
"""Return a dictionary of statistics queries.
If a date is specified and applies to the job it will be used. Otherwise
the date will default to the previous day.
"""
if not date:
date = datetime.date.today() - datetime.timedelta(days=1)
# Passing through a datetime would not generate an error,
# but would pass and give incorrect values.
if isinstance(date, datetime.datetime):
raise ValueError('This requires a valid date, not a datetime')
# Testing on lte created date doesn't get you todays date, you need to do
# less than next date. That's because 2012-1-1 becomes 2012-1-1 00:00
next_date = date + datetime.timedelta(days=1)
date_str = date.strftime('%Y-%m-%d')
extra = dict(where=['DATE(created)=%s'], params=[date_str])
# If you're editing these, note that you are returning a function! This
# cheesy hackery was done so that we could pass the queries to celery
# lazily and not hammer the db with a ton of these all at once.
stats = {
# Add-on Downloads
'addon_total_downloads': lambda: DownloadCount.objects.filter(
date__lt=next_date).aggregate(sum=Sum('count'))['sum'],
'addon_downloads_new': lambda: DownloadCount.objects.filter(
date=date).aggregate(sum=Sum('count'))['sum'],
# Listed Add-on counts
'addon_count_new': Addon.objects.valid().extra(**extra).count,
# Listed Version counts
'version_count_new': Version.objects.filter(
channel=amo.RELEASE_CHANNEL_LISTED).extra(**extra).count,
# User counts
'user_count_total': UserProfile.objects.filter(
created__lt=next_date).count,
'user_count_new': UserProfile.objects.extra(**extra).count,
# Review counts
'review_count_total': Review.objects.filter(created__lte=date,
editorreview=0).count,
# We can't use "**extra" here, because this query joins on reviews
# itself, and thus raises the following error:
# "Column 'created' in where clause is ambiguous".
'review_count_new': Review.objects.filter(editorreview=0).extra(
where=['DATE(reviews.created)=%s'], params=[date_str]).count,
# Collection counts
'collection_count_total': Collection.objects.filter(
created__lt=next_date).count,
'collection_count_new': Collection.objects.extra(**extra).count,
'collection_addon_downloads': (
lambda: AddonCollectionCount.objects.filter(
date__lte=date).aggregate(sum=Sum('count'))['sum']),
}
# If we're processing today's stats, we'll do some extras. We don't do
# these for re-processed stats because they change over time (eg. add-ons
# move from sandbox -> public
if date == (datetime.date.today() - datetime.timedelta(days=1)):
stats.update({
'addon_count_nominated': Addon.objects.filter(
created__lte=date, status=amo.STATUS_NOMINATED,
disabled_by_user=0).count,
'addon_count_public': Addon.objects.filter(
created__lte=date, status=amo.STATUS_PUBLIC,
disabled_by_user=0).count,
'addon_count_pending': Version.objects.filter(
created__lte=date, files__status=amo.STATUS_PENDING).count,
'collection_count_private': Collection.objects.filter(
created__lte=date, listed=0).count,
'collection_count_public': Collection.objects.filter(
created__lte=date, listed=1).count,
'collection_count_editorspicks': Collection.objects.filter(
created__lte=date, type=amo.COLLECTION_FEATURED).count,
'collection_count_normal': Collection.objects.filter(
created__lte=date, type=amo.COLLECTION_NORMAL).count,
})
return stats
def _get_metrics_jobs(date=None):
"""Return a dictionary of statistics queries.
If a date is specified and applies to the job it will be used. Otherwise
the date will default to the last date metrics put something in the db.
"""
if not date:
date = UpdateCount.objects.aggregate(max=Max('date'))['max']
# If you're editing these, note that you are returning a function!
stats = {
'addon_total_updatepings': lambda: UpdateCount.objects.filter(
date=date).aggregate(sum=Sum('count'))['sum'],
'collector_updatepings': lambda: UpdateCount.objects.get(
addon=settings.ADDON_COLLECTOR_ID, date=date).count,
}
return stats
@task
def index_update_counts(ids, index=None, **kw):
index = index or search.get_alias()
es = amo_search.get_es()
qs = UpdateCount.objects.filter(id__in=ids)
if qs:
log.info('Indexing %s updates for %s.' % (qs.count(), qs[0].date))
data = []
try:
for update in qs:
data.append(search.extract_update_count(update))
bulk_index(es, data, index=index,
doc_type=UpdateCount.get_mapping_type(), refresh=True)
except Exception, exc:
index_update_counts.retry(args=[ids, index], exc=exc, **kw)
raise
@task
def index_download_counts(ids, index=None, **kw):
index = index or search.get_alias()
es = amo_search.get_es()
qs = DownloadCount.objects.filter(id__in=ids)
if qs:
log.info('Indexing %s downloads for %s.' % (qs.count(), qs[0].date))
try:
data = []
for dl in qs:
data.append(search.extract_download_count(dl))
bulk_index(es, data, index=index,
doc_type=DownloadCount.get_mapping_type(), refresh=True)
except Exception, exc:
index_download_counts.retry(args=[ids, index], exc=exc)
raise
@task
def index_collection_counts(ids, index=None, **kw):
index = index or search.get_alias()
es = amo_search.get_es()
qs = CollectionCount.objects.filter(collection__in=ids)
if qs:
log.info('Indexing %s addon collection counts: %s'
% (qs.count(), qs[0].date))
data = []
try:
for collection_count in qs:
collection = collection_count.collection_id
filters = dict(collection=collection,
date=collection_count.date)
data.append(search.extract_addon_collection(
collection_count,
AddonCollectionCount.objects.filter(**filters),
CollectionStats.objects.filter(**filters)))
bulk_index(es, data, index=index,
doc_type=CollectionCount.get_mapping_type(),
refresh=True)
except Exception, exc:
index_collection_counts.retry(args=[ids], exc=exc)
raise
@task
def index_theme_user_counts(ids, index=None, **kw):
index = index or search.get_alias()
es = amo_search.get_es()
qs = ThemeUserCount.objects.filter(id__in=ids)<|fim▁hole|> data = []
try:
for user_count in qs:
data.append(search.extract_theme_user_count(user_count))
bulk_index(es, data, index=index,
doc_type=ThemeUserCount.get_mapping_type(), refresh=True)
except Exception, exc:
index_theme_user_counts.retry(args=[ids], exc=exc, **kw)
raise<|fim▁end|> |
if qs:
log.info('Indexing %s theme user counts for %s.'
% (qs.count(), qs[0].date)) |
<|file_name|>NodeSerializationCodes.py<|end_file_name|><|fim▁begin|>from .Node import error
SYNTAX_NODE_SERIALIZATION_CODES = {
# 0 is 'Token'. Needs to be defined manually
# 1 is 'Unknown'. Needs to be defined manually
'UnknownDecl': 2,
'TypealiasDecl': 3,
'AssociatedtypeDecl': 4,
'IfConfigDecl': 5,
'PoundErrorDecl': 6,
'PoundWarningDecl': 7,
'PoundSourceLocation': 8,
'ClassDecl': 9,
'StructDecl': 10,
'ProtocolDecl': 11,
'ExtensionDecl': 12,
'FunctionDecl': 13,
'InitializerDecl': 14,
'DeinitializerDecl': 15,
'SubscriptDecl': 16,
'ImportDecl': 17,
'AccessorDecl': 18,
'VariableDecl': 19,
'EnumCaseDecl': 20,
'EnumDecl': 21,
'OperatorDecl': 22,
'PrecedenceGroupDecl': 23,
'UnknownExpr': 24,
'InOutExpr': 25,
'PoundColumnExpr': 26,
'TryExpr': 27,
'AwaitExpr': 249,
'IdentifierExpr': 28,
'SuperRefExpr': 29,
'NilLiteralExpr': 30,
'DiscardAssignmentExpr': 31,
'AssignmentExpr': 32,
'SequenceExpr': 33,
'PoundLineExpr': 34,
'PoundFileExpr': 35,
'PoundFunctionExpr': 36,
'PoundDsohandleExpr': 37,
'SymbolicReferenceExpr': 38,
'PrefixOperatorExpr': 39,
'BinaryOperatorExpr': 40,
'ArrowExpr': 41,
'FloatLiteralExpr': 42,
'TupleExpr': 43,
'ArrayExpr': 44,
'DictionaryExpr': 45,
'ImplicitMemberExpr': 46,
'IntegerLiteralExpr': 47,
'StringLiteralExpr': 48,
'BooleanLiteralExpr': 49,
'TernaryExpr': 50,
'MemberAccessExpr': 51,
'DotSelfExpr': 52,
'IsExpr': 53,
'AsExpr': 54,
'TypeExpr': 55,
'ClosureExpr': 56,
'UnresolvedPatternExpr': 57,
'FunctionCallExpr': 58,
'SubscriptExpr': 59,
'OptionalChainingExpr': 60,
'ForcedValueExpr': 61,
'PostfixUnaryExpr': 62,
'SpecializeExpr': 63,
'KeyPathExpr': 65,
'KeyPathBaseExpr': 66,
'ObjcKeyPathExpr': 67,
'ObjcSelectorExpr': 68,
'EditorPlaceholderExpr': 69,
'ObjectLiteralExpr': 70,
'UnknownStmt': 71,
'ContinueStmt': 72,
'WhileStmt': 73,
'DeferStmt': 74,
'ExpressionStmt': 75,
'RepeatWhileStmt': 76,
'GuardStmt': 77,
'ForInStmt': 78,
'SwitchStmt': 79,
'DoStmt': 80,
'ReturnStmt': 81,
'FallthroughStmt': 82,
'BreakStmt': 83,
'DeclarationStmt': 84,
'ThrowStmt': 85,
'IfStmt': 86,
'Decl': 87,
'Expr': 88,
'Stmt': 89,
'Type': 90,
'Pattern': 91,
'CodeBlockItem': 92,
'CodeBlock': 93,
'DeclNameArgument': 94,
'DeclNameArguments': 95,
# removed: 'FunctionCallArgument': 96,
'TupleExprElement': 97,
'ArrayElement': 98,
'DictionaryElement': 99,
'ClosureCaptureItem': 100,
'ClosureCaptureSignature': 101,
'ClosureParam': 102,
'ClosureSignature': 103,
'StringSegment': 104,
'ExpressionSegment': 105,
'ObjcNamePiece': 106,
'TypeInitializerClause': 107,
'ParameterClause': 108,
'ReturnClause': 109,
'FunctionSignature': 110,
'IfConfigClause': 111,
'PoundSourceLocationArgs': 112,
'DeclModifier': 113,
'InheritedType': 114,
'TypeInheritanceClause': 115,
'MemberDeclBlock': 116,
'MemberDeclListItem': 117,
'SourceFile': 118,
'InitializerClause': 119,
'FunctionParameter': 120,
'AccessLevelModifier': 121,<|fim▁hole|> 'AccessPathComponent': 122,
'AccessorParameter': 123,
'AccessorBlock': 124,
'PatternBinding': 125,
'EnumCaseElement': 126,
'OperatorPrecedenceAndTypes': 127,
'PrecedenceGroupRelation': 128,
'PrecedenceGroupNameElement': 129,
'PrecedenceGroupAssignment': 130,
'PrecedenceGroupAssociativity': 131,
'Attribute': 132,
'LabeledSpecializeEntry': 133,
'ImplementsAttributeArguments': 134,
'ObjCSelectorPiece': 135,
'WhereClause': 136,
'ConditionElement': 137,
'AvailabilityCondition': 138,
'MatchingPatternCondition': 139,
'OptionalBindingCondition': 140,
'ElseIfContinuation': 141,
'ElseBlock': 142,
'SwitchCase': 143,
'SwitchDefaultLabel': 144,
'CaseItem': 145,
'SwitchCaseLabel': 146,
'CatchClause': 147,
'GenericWhereClause': 148,
'SameTypeRequirement': 149,
'GenericParameter': 150,
'GenericParameterClause': 151,
'ConformanceRequirement': 152,
'CompositionTypeElement': 153,
'TupleTypeElement': 154,
'GenericArgument': 155,
'GenericArgumentClause': 156,
'TypeAnnotation': 157,
'TuplePatternElement': 158,
'AvailabilityArgument': 159,
'AvailabilityLabeledArgument': 160,
'AvailabilityVersionRestriction': 161,
'VersionTuple': 162,
'CodeBlockItemList': 163,
# removed: 'FunctionCallArgumentList': 164,
'TupleExprElementList': 165,
'ArrayElementList': 166,
'DictionaryElementList': 167,
'StringLiteralSegments': 168,
'DeclNameArgumentList': 169,
'ExprList': 170,
'ClosureCaptureItemList': 171,
'ClosureParamList': 172,
'ObjcName': 173,
'FunctionParameterList': 174,
'IfConfigClauseList': 175,
'InheritedTypeList': 176,
'MemberDeclList': 177,
'ModifierList': 178,
'AccessPath': 179,
'AccessorList': 180,
'PatternBindingList': 181,
'EnumCaseElementList': 182,
'PrecedenceGroupAttributeList': 183,
'PrecedenceGroupNameList': 184,
'TokenList': 185,
'NonEmptyTokenList': 186,
'AttributeList': 187,
'SpecializeAttributeSpecList': 188,
'ObjCSelector': 189,
'SwitchCaseList': 190,
'CatchClauseList': 191,
'CaseItemList': 192,
'ConditionElementList': 193,
'GenericRequirementList': 194,
'GenericParameterList': 195,
'CompositionTypeElementList': 196,
'TupleTypeElementList': 197,
'GenericArgumentList': 198,
'TuplePatternElementList': 199,
'AvailabilitySpecList': 200,
'UnknownPattern': 201,
'EnumCasePattern': 202,
'IsTypePattern': 203,
'OptionalPattern': 204,
'IdentifierPattern': 205,
'AsTypePattern': 206,
'TuplePattern': 207,
'WildcardPattern': 208,
'ExpressionPattern': 209,
'ValueBindingPattern': 210,
'UnknownType': 211,
'SimpleTypeIdentifier': 212,
'MemberTypeIdentifier': 213,
'ClassRestrictionType': 214,
'ArrayType': 215,
'DictionaryType': 216,
'MetatypeType': 217,
'OptionalType': 218,
'ImplicitlyUnwrappedOptionalType': 219,
'CompositionType': 220,
'TupleType': 221,
'FunctionType': 222,
'AttributedType': 223,
'YieldStmt': 224,
'YieldList': 225,
'IdentifierList': 226,
'NamedAttributeStringArgument': 227,
'DeclName': 228,
'PoundAssertStmt': 229,
'SomeType': 230,
'CustomAttribute': 231,
'GenericRequirement': 232,
'DifferentiableAttributeArguments': 233,
'DifferentiabilityParamsClause': 234,
'DifferentiabilityParams': 235,
'DifferentiabilityParamList': 236,
'DifferentiabilityParam': 237,
# removed: 'DifferentiableAttributeFuncSpecifier': 238,
'FunctionDeclName': 239,
'PoundFilePathExpr': 240,
'DerivativeRegistrationAttributeArguments': 241,
'QualifiedDeclName': 242,
'CatchItem': 243,
'CatchItemList': 244,
'MultipleTrailingClosureElementList': 245,
'MultipleTrailingClosureElement': 246,
'PoundFileIDExpr': 247,
'TargetFunctionEntry': 248,
}
def verify_syntax_node_serialization_codes(nodes, serialization_codes):
# Verify that all nodes have serialization codes
for node in nodes:
if not node.is_base() and node.syntax_kind not in serialization_codes:
error('Node %s has no serialization code' % node.syntax_kind)
# Verify that no serialization code is used twice
used_codes = set()
for serialization_code in serialization_codes.values():
if serialization_code in used_codes:
error("Serialization code %d used twice" % serialization_code)
used_codes.add(serialization_code)
def get_serialization_code(syntax_kind):
return SYNTAX_NODE_SERIALIZATION_CODES[syntax_kind]<|fim▁end|> | |
<|file_name|>DisableDynamoDBTest.java<|end_file_name|><|fim▁begin|>package com.playtika.test.dynamodb;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.testcontainers.containers.Container;
public class DisableDynamoDBTest {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()<|fim▁hole|> .withConfiguration(AutoConfigurations.of(
EmbeddedDynamoDBBootstrapConfiguration.class,
EmbeddedDynamoDBDependenciesAutoConfiguration.class));
@Test
public void contextLoads() {
contextRunner
.withPropertyValues(
"embedded.dynamodb.enabled=false"
)
.run((context) -> Assertions.assertThat(context)
.hasNotFailed()
.doesNotHaveBean(Container.class)
.doesNotHaveBean("dynamodbDependencyPostProcessor"));
}
}<|fim▁end|> | |
<|file_name|>config.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package transport
import (
"context"
"crypto/tls"
"net"
"net/http"
"net/url"
)
// Config holds various options for establishing a transport.
type Config struct {
// UserAgent is an optional field that specifies the caller of this
// request.
UserAgent string
// The base TLS configuration for this transport.
TLS TLSConfig
// Username and password for basic authentication
Username string
Password string
// Bearer token for authentication
BearerToken string
// Path to a file containing a BearerToken.
// If set, the contents are periodically read.
// The last successfully read value takes precedence over BearerToken.
BearerTokenFile string
// Impersonate is the config that this Config will impersonate using
Impersonate ImpersonationConfig
// DisableCompression bypasses automatic GZip compression requests to the
// server.
DisableCompression bool
// Transport may be used for custom HTTP behavior. This attribute may
// not be specified with the TLS client certificate options. Use
// WrapTransport for most client level operations.
Transport http.RoundTripper
// WrapTransport will be invoked for custom HTTP behavior after the
// underlying transport is initialized (either the transport created
// from TLSClientConfig, Transport, or http.DefaultTransport). The
// config may layer other RoundTrippers on top of the returned
// RoundTripper.
//
// A future release will change this field to an array. Use config.Wrap()
// instead of setting this value directly.
WrapTransport WrapperFunc
// Dial specifies the dial function for creating unencrypted TCP connections.
Dial func(ctx context.Context, network, address string) (net.Conn, error)
// Proxy is the proxy func to be used for all requests made by this
// transport. If Proxy is nil, http.ProxyFromEnvironment is used. If Proxy
// returns a nil *URL, no proxy is used.
//
// socks5 proxying does not currently support spdy streaming endpoints.
Proxy func(*http.Request) (*url.URL, error)
}
// ImpersonationConfig has all the available impersonation options
type ImpersonationConfig struct {
// UserName matches user.Info.GetName()
UserName string
// Groups matches user.Info.GetGroups()
Groups []string
// Extra matches user.Info.GetExtra()
Extra map[string][]string
}
// HasCA returns whether the configuration has a certificate authority or not.
func (c *Config) HasCA() bool {
return len(c.TLS.CAData) > 0 || len(c.TLS.CAFile) > 0
}
// HasBasicAuth returns whether the configuration has basic authentication or not.
func (c *Config) HasBasicAuth() bool {
return len(c.Username) != 0
}
// HasTokenAuth returns whether the configuration has token authentication or not.
func (c *Config) HasTokenAuth() bool {
return len(c.BearerToken) != 0 || len(c.BearerTokenFile) != 0
}
// HasCertAuth returns whether the configuration has certificate authentication or not.
func (c *Config) HasCertAuth() bool {
return (len(c.TLS.CertData) != 0 || len(c.TLS.CertFile) != 0) && (len(c.TLS.KeyData) != 0 || len(c.TLS.KeyFile) != 0)
}
// HasCertCallbacks returns whether the configuration has certificate callback or not.
func (c *Config) HasCertCallback() bool {
return c.TLS.GetCert != nil
}
// Wrap adds a transport middleware function that will give the caller
// an opportunity to wrap the underlying http.RoundTripper prior to the
// first API call being made. The provided function is invoked after any
// existing transport wrappers are invoked.
func (c *Config) Wrap(fn WrapperFunc) {
c.WrapTransport = Wrappers(c.WrapTransport, fn)
}
<|fim▁hole|> KeyFile string // Path of the PEM-encoded client key.
ReloadTLSFiles bool // Set to indicate that the original config provided files, and that they should be reloaded
Insecure bool // Server should be accessed without verifying the certificate. For testing only.
ServerName string // Override for the server name passed to the server for SNI and used to verify certificates.
CAData []byte // Bytes of the PEM-encoded server trusted root certificates. Supercedes CAFile.
CertData []byte // Bytes of the PEM-encoded client certificate. Supercedes CertFile.
KeyData []byte // Bytes of the PEM-encoded client key. Supercedes KeyFile.
// NextProtos is a list of supported application level protocols, in order of preference.
// Used to populate tls.Config.NextProtos.
// To indicate to the server http/1.1 is preferred over http/2, set to ["http/1.1", "h2"] (though the server is free to ignore that preference).
// To use only http/1.1, set to ["http/1.1"].
NextProtos []string
GetCert func() (*tls.Certificate, error) // Callback that returns a TLS client certificate. CertData, CertFile, KeyData and KeyFile supercede this field.
}<|fim▁end|> | // TLSConfig holds the information needed to set up a TLS transport.
type TLSConfig struct {
CAFile string // Path of the PEM-encoded server trusted root certificates.
CertFile string // Path of the PEM-encoded client certificate. |
<|file_name|>stream.rs<|end_file_name|><|fim▁begin|>use ffi::{sockaddr, socklen_t, AF_UNIX, SOCK_STREAM};
use core::{Endpoint, Protocol};
use socket_listener::SocketListener;
use stream_socket::StreamSocket;
use local::LocalEndpoint;
use std::fmt;
use std::mem;
/// The stream-oriented UNIX domain protocol.
///
/// # Example
/// Create a server and client sockets.
///
/// ```rust,no_run
/// use asyncio::{IoContext, Endpoint};
/// use asyncio::local::{LocalStream, LocalStreamEndpoint, LocalStreamSocket, LocalStreamListener};
///
/// let ctx = &IoContext::new().unwrap();
/// let ep = LocalStreamEndpoint::new("example.sock").unwrap();
///
/// let sv = LocalStreamListener::new(ctx, LocalStream).unwrap();
/// sv.bind(&ep).unwrap();
/// sv.listen().unwrap();
///
/// let cl = LocalStreamSocket::new(ctx, ep.protocol()).unwrap();
/// cl.connect(&ep).unwrap();
/// ```
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub struct LocalStream;
impl Protocol for LocalStream {
type Endpoint = LocalEndpoint<Self>;
type Socket = LocalStreamSocket;
fn family_type(&self) -> i32 {
AF_UNIX
}
fn socket_type(&self) -> i32 {
SOCK_STREAM
}
fn protocol_type(&self) -> i32 {
0
}
unsafe fn uninitialized(&self) -> Self::Endpoint {
mem::uninitialized()
}
}
impl Endpoint<LocalStream> for LocalEndpoint<LocalStream> {
fn protocol(&self) -> LocalStream {
LocalStream
}
fn as_ptr(&self) -> *const sockaddr {
&self.sun as *const _ as *const _
}
fn as_mut_ptr(&mut self) -> *mut sockaddr {
&mut self.sun as *mut _ as *mut _
}
fn capacity(&self) -> socklen_t {
self.sun.capacity() as socklen_t<|fim▁hole|> fn size(&self) -> socklen_t {
self.sun.size() as socklen_t
}
unsafe fn resize(&mut self, size: socklen_t) {
debug_assert!(size <= self.capacity());
self.sun.resize(size as u8)
}
}
impl fmt::Debug for LocalEndpoint<LocalStream> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}:{:?})", self.protocol(), self.as_pathname())
}
}
/// The stream-oriented UNIX domain endpoint type
pub type LocalStreamEndpoint = LocalEndpoint<LocalStream>;
/// The stream-oriented UNIX domain socket type.
pub type LocalStreamSocket = StreamSocket<LocalStream>;
/// The stream-oriented UNIX domain listener type.
pub type LocalStreamListener = SocketListener<LocalStream>;
#[test]
fn test_getsockname_local() {
use core::IoContext;
use local::*;
use std::fs;
let ctx = &IoContext::new().unwrap();
let ep = LocalStreamEndpoint::new(".asio_foo.sock").unwrap();
println!("{:?}", ep.as_pathname().unwrap());
let _ = fs::remove_file(ep.as_pathname().unwrap());
let soc = LocalStreamSocket::new(ctx, ep.protocol()).unwrap();
soc.bind(&ep).unwrap();
assert_eq!(soc.local_endpoint().unwrap(), ep);
assert_eq!(soc.local_endpoint().unwrap(), ep);
let _ = fs::remove_file(ep.as_pathname().unwrap());
}
#[test]
fn test_format() {
use core::IoContext;
let _ctx = &IoContext::new().unwrap();
println!("{:?}", LocalStream);
println!("{:?}", LocalStreamEndpoint::new("foo/bar").unwrap());
}<|fim▁end|> | }
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>//! A helper tool for generating urls and sha256 checksums of cargo-bazel binaries and writing them to a module.
use std::collections::HashMap;
use std::io::{BufRead, BufReader};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::{env, fs};
use clap::Parser;
use hex::ToHex;
use sha2::{Digest, Sha256};
#[derive(Parser, Debug)]
struct Options {
/// The path to an artifacts directory expecting to contain directories
/// named after platform tripes with binaries inside.
#[clap(long)]
pub artifacts_dir: PathBuf,
/// A url prefix where the artifacts can be found
#[clap(long)]
pub url_prefix: String,
/// The path to a buildifier binary. If set, it will be ran on the module
#[clap(long)]
pub buildifier: Option<PathBuf>,
}
struct Artifact {
pub url: String,
pub triple: String,
pub sha256: String,
}
fn calculate_sha256(file_path: &Path) -> String {
let file = fs::File::open(file_path).unwrap();
let mut reader = BufReader::new(file);<|fim▁hole|> let buffer = reader.fill_buf().unwrap();
if buffer.is_empty() {
break;
}
hasher.update(buffer);
buffer.len()
};
reader.consume(consummed);
}
let digest = hasher.finalize();
digest.encode_hex::<String>()
}
fn locate_artifacts(artifacts_dir: &Path, url_prefix: &str) -> Vec<Artifact> {
let artifact_dirs: Vec<PathBuf> = artifacts_dir
.read_dir()
.unwrap()
.flatten()
.filter(|entry| entry.path().is_dir())
.map(|entry| entry.path())
.collect();
artifact_dirs
.iter()
.map(|path| {
let triple = path.file_name().unwrap().to_string_lossy();
let mut artifacts: Vec<Artifact> = path
.read_dir()
.unwrap()
.flatten()
.map(|f_entry| {
let f_path = f_entry.path();
let stem = f_path.file_stem().unwrap().to_string_lossy();
Artifact {
url: format!("{}/{}-{}", url_prefix, stem, triple),
triple: triple.to_string(),
sha256: calculate_sha256(&f_entry.path()),
}
})
.collect();
if artifacts.len() > 1 {
panic!("Too many artifacts given for {}", triple)
}
artifacts.pop().unwrap()
})
.collect()
}
const TEMPLATE: &str = r#""""A file containing urls and associated sha256 values for cargo-bazel binaries
This file is auto-generated for each release to match the urls and sha256s of
the binaries produced for it.
"""
# Example:
# {
# "x86_64-unknown-linux-gnu": "https://domain.com/downloads/cargo-bazel-x86_64-unknown-linux-gnu",
# "x86_64-apple-darwin": "https://domain.com/downloads/cargo-bazel-x86_64-apple-darwin",
# "x86_64-pc-windows-msvc": "https://domain.com/downloads/cargo-bazel-x86_64-pc-windows-msvc",
# }
CARGO_BAZEL_URLS = {}
# Example:
# {
# "x86_64-unknown-linux-gnu": "1d687fcc860dc8a1aa6198e531f0aee0637ed506d6a412fe2b9884ff5b2b17c0",
# "x86_64-apple-darwin": "0363e450125002f581d29cf632cc876225d738cfa433afa85ca557afb671eafa",
# "x86_64-pc-windows-msvc": "f5647261d989f63dafb2c3cb8e131b225338a790386c06cf7112e43dd9805882",
# }
CARGO_BAZEL_SHA256S = {}
# Example:
# Label("//crate_universe:cargo_bazel_bin")
CARGO_BAZEL_LABEL = Label("@cargo_bazel_bootstrap//:binary")
"#;
fn render_module(artifacts: &[Artifact]) -> String {
let urls: HashMap<&String, &String> = artifacts
.iter()
.map(|artifact| (&artifact.triple, &artifact.url))
.collect();
let sha256s: HashMap<&String, &String> = artifacts
.iter()
.map(|artifact| (&artifact.triple, &artifact.sha256))
.collect();
TEMPLATE
.replace(
"CARGO_BAZEL_URLS = {}",
&format!(
"CARGO_BAZEL_URLS = {}",
serde_json::to_string_pretty(&urls).unwrap()
),
)
.replace(
"CARGO_BAZEL_SHA256S = {}",
&format!(
"CARGO_BAZEL_SHA256S = {}",
serde_json::to_string_pretty(&sha256s).unwrap()
),
)
.replace(
"CARGO_BAZEL_LABEL = Label(\"@cargo_bazel_bootstrap//:binary\")",
"CARGO_BAZEL_LABEL = Label(\"//crate_universe:cargo_bazel_bin\")",
)
}
fn write_module(content: &str) -> PathBuf {
let dest = PathBuf::from(
env::var("BUILD_WORKSPACE_DIRECTORY").expect("This binary is required to run under Bazel"),
)
.join(env!("MODULE_ROOT_PATH"));
fs::write(&dest, content).unwrap();
dest
}
fn run_buildifier(buildifier_path: &Path, module: &Path) {
Command::new(buildifier_path)
.arg("-lint=fix")
.arg("-mode=fix")
.arg("-warnings=all")
.arg(module)
.output()
.unwrap();
}
fn main() {
let opt = Options::parse();
let artifacts = locate_artifacts(&opt.artifacts_dir, &opt.url_prefix);
let content = render_module(&artifacts);
let path = write_module(&content);
if let Some(buildifier_path) = opt.buildifier {
run_buildifier(&buildifier_path, &path);
}
}<|fim▁end|> | let mut hasher = Sha256::new();
loop {
let consummed = { |
<|file_name|>withPropsOnChange.tsx<|end_file_name|><|fim▁begin|>/* tslint:disable: no-console */
/* tslint:disable: no-var-requires */
/* tslint:disable: only-arrow-functions */
import * as Benchmark from "benchmark";
import * as recompose from "recompose";
import * as recompact from "recompact";
import * as React from "react";
import * as ReactDOM from "react-dom";
import * as reassemble from "../src";
require("../test/setupDOM");
const suite = new Benchmark.Suite();
const Component: React.StatelessComponent<any> = () => null;
const container = document.createElement("div");
document.body.appendChild(container);
function render(node: React.ReactElement<any>) {
ReactDOM.render(node, container);
}
function cleanup() {
ReactDOM.unmountComponentAtNode(container);
}
const create = (lib: any) => {
const { withPropsOnChange } = lib;
return lib.compose(
...Array(10).fill(withPropsOnChange(["foo"], () => ({ foo: "bar" }))),
);
};
const Composed = create(recompose)(Component);
const Compacted = create(recompact)(Component);
const Assembled = create(reassemble)(Component);<|fim▁hole|> render(<Composed />);
cleanup();
})
.add("recompact", () => {
render(<Compacted />);
cleanup();
})
.add("reassemble", () => {
render(<Assembled />);
cleanup();
})
// add listeners
.on("cycle", (event: any) => {
console.log(String(event.target));
})
.on("complete", function() {
console.log("Fastest is " + this.filter("fastest").map("name"));
})
// run async
.run({ async: true });<|fim▁end|> |
// add tests
suite
.add("recompose", () => { |
<|file_name|>trusts.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from oslo.config import cfg
from sahara import conductor as c
from sahara import context
from sahara.utils.openstack import keystone
conductor = c.API
CONF = cfg.CONF<|fim▁hole|>
def create_trust(cluster):
client = keystone.client()
ctx = context.current()
trustee_id = keystone.client_for_admin().user_id
trust = client.trusts.create(trustor_user=client.user_id,
trustee_user=trustee_id,
impersonation=True,
role_names=ctx.roles,
project=client.tenant_id)
conductor.cluster_update(ctx,
cluster,
{'trust_id': trust.id})
def use_os_admin_auth_token(cluster):
if cluster.trust_id:
ctx = context.current()
ctx.username = CONF.keystone_authtoken.admin_user
ctx.tenant_id = cluster.tenant_id
client = keystone.client_for_trusts(cluster.trust_id)
ctx.token = client.auth_token
ctx.service_catalog = json.dumps(
client.service_catalog.catalog['catalog'])
def delete_trust(cluster):
if cluster.trust_id:
keystone_client = keystone.client_for_trusts(cluster.trust_id)
keystone_client.trusts.delete(cluster.trust_id)<|fim▁end|> | |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>import analytics
import anyjson
from channels import Group
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from lily.accounts.api.serializers import RelatedAccountSerializer
from lily.api.fields import SanitizedHtmlCharField
from lily.api.nested.mixins import RelatedSerializerMixin
from lily.api.nested.serializers import WritableNestedSerializer
from lily.api.serializers import ContentTypeSerializer
from lily.contacts.api.serializers import RelatedContactSerializer
from lily.contacts.models import Function
from lily.users.api.serializers import RelatedLilyUserSerializer, RelatedTeamSerializer
from lily.utils.api.serializers import RelatedTagSerializer
from lily.utils.request import is_external_referer
from ..models import Case, CaseStatus, CaseType
class CaseStatusSerializer(serializers.ModelSerializer):
"""
Serializer for case status model.
"""
class Meta:
model = CaseStatus
fields = (
'id',
'name',
)
class RelatedCaseStatusSerializer(RelatedSerializerMixin, CaseStatusSerializer):
pass
class CaseTypeSerializer(serializers.ModelSerializer):
"""
Serializer for case type model.
"""
class Meta:
model = CaseType
fields = (
'id',
'is_archived',
'name',
'use_as_filter',
)
class RelatedCaseTypeSerializer(RelatedSerializerMixin, CaseTypeSerializer):
pass
class CaseSerializer(WritableNestedSerializer):
"""
Serializer for the case model.
"""
# Set non mutable fields.
created_by = RelatedLilyUserSerializer(read_only=True)
content_type = ContentTypeSerializer(
read_only=True,
help_text='This is what the object is identified as in the back-end.',
)
# Related fields.
account = RelatedAccountSerializer(
required=False,
allow_null=True,
help_text='Account for which the case is being created.',
)
contact = RelatedContactSerializer(
required=False,
allow_null=True,
help_text='Contact for which the case is being created.',
)
assigned_to = RelatedLilyUserSerializer(
required=False,
allow_null=True,
assign_only=True,
help_text='Person which the case is assigned to.',
)
assigned_to_teams = RelatedTeamSerializer(
many=True,
required=False,
assign_only=True,
help_text='List of teams the case is assigned to.',
)
type = RelatedCaseTypeSerializer(
assign_only=True,
help_text='The type of case.',
)
status = RelatedCaseStatusSerializer(
assign_only=True,
help_text='Status of the case.',
)
tags = RelatedTagSerializer(
many=True,
required=False,
create_only=True,
help_text='Any tags used to further categorize the case.',
)
description = SanitizedHtmlCharField(
help_text='Any extra text to describe the case (supports Markdown).',
)
# Show string versions of fields.
priority_display = serializers.CharField(
source='get_priority_display',
read_only=True,
help_text='Human readable value of the case\'s priority.',
)
def validate(self, data):
contact_id = data.get('contact', {})
if isinstance(contact_id, dict):
contact_id = contact_id.get('id')
account_id = data.get('account', {})
if isinstance(account_id, dict):
account_id = account_id.get('id')
if contact_id and account_id:
if not Function.objects.filter(contact_id=contact_id, account_id=account_id).exists():
raise serializers.ValidationError({'contact': _('Given contact must work at the account.')})
# Check if we are related and if we only passed in the id, which means user just wants new reference.
errors = {
'account': _('Please enter an account and/or contact.'),
'contact': _('Please enter an account and/or contact.'),
}
if not self.partial:
# For POST or PUT we always want to check if either is set.
if not (account_id or contact_id):
raise serializers.ValidationError(errors)
else:
# For PATCH only check the data if both account and contact are passed.
if ('account' in data and 'contact' in data) and not (account_id or contact_id):
raise serializers.ValidationError(errors)
return super(CaseSerializer, self).validate(data)
def create(self, validated_data):
user = self.context.get('request').user
assigned_to = validated_data.get('assigned_to')
validated_data.update({
'created_by_id': user.pk,
})
if assigned_to:
Group('tenant-%s' % user.tenant.id).send({
'text': anyjson.dumps({
'event': 'case-assigned',
}),
})
if assigned_to.get('id') != user.pk:
validated_data.update({
'newly_assigned': True,
})
else:
Group('tenant-%s' % user.tenant.id).send({
'text': anyjson.dumps({
'event': 'case-unassigned',
}),
})
instance = super(CaseSerializer, self).create(validated_data)
# Track newly ceated accounts in segment.
if not settings.TESTING:
analytics.track(
user.id,
'case-created', {
'expires': instance.expires,
'assigned_to_id': instance.assigned_to_id if instance.assigned_to else '',
'creation_type': 'automatic' if is_external_referer(self.context.get('request')) else 'manual',
},
)
return instance
def update(self, instance, validated_data):
user = self.context.get('request').user
status_id = validated_data.get('status', instance.status_id)
assigned_to = validated_data.get('assigned_to')
if assigned_to:
assigned_to = assigned_to.get('id')
if isinstance(status_id, dict):
status_id = status_id.get('id')
status = CaseStatus.objects.get(pk=status_id)
# Automatically archive the case if the status is set to 'Closed'.
if status.name == 'Closed' and 'is_archived' not in validated_data:
validated_data.update({
'is_archived': True
})
# Check if the case being reassigned. If so we want to notify that user.
if assigned_to and assigned_to != user.pk:
validated_data.update({
'newly_assigned': True,
})
elif 'assigned_to' in validated_data and not assigned_to:
# Case is unassigned, so clear newly assigned flag.
validated_data.update({
'newly_assigned': False,
})
if (('status' in validated_data and status.name == 'Open') or
('is_archived' in validated_data and not validated_data.get('is_archived'))):
# Case is reopened or unarchived, so we want to notify the user again.
validated_data.update({
'newly_assigned': True,
})
if 'assigned_to' in validated_data or instance.assigned_to_id:
Group('tenant-%s' % user.tenant.id).send({
'text': anyjson.serialize({
'event': 'case-assigned',
}),
})
if (not instance.assigned_to_id or
instance.assigned_to_id and
'assigned_to' in validated_data and
not validated_data.get('assigned_to')):
Group('tenant-%s' % user.tenant.id).send({
'text': anyjson.serialize({
'event': 'case-unassigned',
}),
})
return super(CaseSerializer, self).update(instance, validated_data)
class Meta:
model = Case
fields = (
'id',
'account',
'assigned_to',
'assigned_to_teams',
'contact',
'content_type',
'created',
'created_by',
'description',
'expires',
'is_archived',
'modified',
'newly_assigned',
'priority',
'priority_display',
'status',
'tags',
'subject',
'type',
)
extra_kwargs = {
'created': {
'help_text': 'Shows the date and time when the deal was created.',
},
'expires': {<|fim▁hole|> 'help_text': 'Shows the date and time when the case was last modified.',
},
'newly_assigned': {
'help_text': 'True if the assignee was changed and that person hasn\'t accepted yet.',
},
'subject': {
'help_text': 'A short description of the case.',
},
}
class RelatedCaseSerializer(RelatedSerializerMixin, CaseSerializer):
"""
Serializer for the case model when used as a relation.
"""
class Meta:
model = Case
# Override the fields because we don't want related fields in this serializer.
fields = (
'id',
'assigned_to',
'assigned_to_teams',
'created',
'created_by',
'description',
'expires',
'is_archived',
'modified',
'priority',
'priority_display',
'subject',
)<|fim▁end|> | 'help_text': 'Shows the date and time for when the case should be completed.',
},
'modified': { |
<|file_name|>autoscaling-plans.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "autoscaling-plans")]
extern crate rusoto_autoscaling_plans;
extern crate rusoto_core;<|fim▁hole|>
use rusoto_autoscaling_plans::{
AutoscalingPlans, AutoscalingPlansClient, DescribeScalingPlansRequest,
};
use rusoto_core::Region;
#[tokio::test]
async fn should_describe_scaling_plans() {
let client = AutoscalingPlansClient::new(Region::UsEast1);
let request = DescribeScalingPlansRequest::default();
let res = client.describe_scaling_plans(request).await;
match res {
Err(e) => panic!("Error getting scaling plans: {:?}", e),
Ok(response) => println!("Got this response: {:?}", response),
}
}<|fim▁end|> | |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#!/usr/pkg/bin/python
import os, sys, time
from reportlab.graphics.barcode.common import *
from reportlab.graphics.barcode.code39 import *
from reportlab.graphics.barcode.code93 import *
from reportlab.graphics.barcode.code128 import *
from reportlab.graphics.barcode.usps import *
from reportlab.graphics.barcode.usps4s import USPS_4State
<|fim▁hole|>from reportlab.lib.units import inch, cm
from reportlab.lib import colors
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.platypus.paragraph import Paragraph
from reportlab.platypus.frames import Frame
from reportlab.platypus.flowables import XBox, KeepTogether
from reportlab.graphics.shapes import Drawing
from reportlab.graphics.barcode import getCodes, getCodeNames, createBarcodeDrawing, createBarcodeImageInMemory
def run():
styles = getSampleStyleSheet()
styleN = styles['Normal']
styleH = styles['Heading1']
story = []
#for codeNames in code
story.append(Paragraph('I2of5', styleN))
story.append(I2of5(1234, barWidth = inch*0.02, checksum=0))
story.append(Paragraph('MSI', styleN))
story.append(MSI(1234))
story.append(Paragraph('Codabar', styleN))
story.append(Codabar("A012345B", barWidth = inch*0.02))
story.append(Paragraph('Code 11', styleN))
story.append(Code11("01234545634563"))
story.append(Paragraph('Code 39', styleN))
story.append(Standard39("A012345B%R"))
story.append(Paragraph('Extended Code 39', styleN))
story.append(Extended39("A012345B}"))
story.append(Paragraph('Code93', styleN))
story.append(Standard93("CODE 93"))
story.append(Paragraph('Extended Code93', styleN))
story.append(Extended93("L@@K! Code 93 :-)")) #, barWidth=0.005 * inch))
story.append(Paragraph('Code 128', styleN))
c=Code128("AB-12345678") #, barWidth=0.005 * inch)
#print 'WIDTH =', (c.width / inch), 'barWidth =', (c.barWidth / inch)
#print 'LQ =', (c.lquiet / inch), 'RQ =', (c.rquiet / inch)
story.append(c)
story.append(Paragraph('USPS FIM', styleN))
story.append(FIM("A"))
story.append(Paragraph('USPS POSTNET', styleN))
story.append(POSTNET('78247-1043'))
story.append(Paragraph('USPS 4 State', styleN))
story.append(USPS_4State('01234567094987654321','01234567891'))
from reportlab.graphics.barcode import createBarcodeDrawing
story.append(Paragraph('EAN13', styleN))
bcd = createBarcodeDrawing('EAN13', value='123456789012')
story.append(bcd)
story.append(Paragraph('EAN8', styleN))
bcd = createBarcodeDrawing('EAN8', value='1234567')
story.append(bcd)
story.append(Paragraph('UPCA', styleN))
bcd = createBarcodeDrawing('UPCA', value='03600029145')
story.append(bcd)
story.append(Paragraph('USPS_4State', styleN))
bcd = createBarcodeDrawing('USPS_4State', value='01234567094987654321',routing='01234567891')
story.append(bcd)
story.append(Paragraph('Label Size', styleN))
story.append(XBox((2.0 + 5.0/8.0)*inch, 1 * inch, '1x2-5/8"'))
story.append(Paragraph('Label Size', styleN))
story.append(XBox((1.75)*inch, .5 * inch, '1/2x1-3/4"'))
c = Canvas('out.pdf')
f = Frame(inch, inch, 6*inch, 9*inch, showBoundary=1)
f.addFromList(story, c)
c.save()
print 'saved out.pdf'
def fullTest(fileName="test_full.pdf"):
"""Creates large-ish test document with a variety of parameters"""
story = []
styles = getSampleStyleSheet()
styleN = styles['Normal']
styleH = styles['Heading1']
styleH2 = styles['Heading2']
story = []
story.append(Paragraph('ReportLab Barcode Test Suite - full output', styleH))
story.append(Paragraph('Generated on %s' % time.ctime(time.time()), styleN))
story.append(Paragraph('', styleN))
story.append(Paragraph('Repository information for this build:', styleN))
#see if we can figure out where it was built, if we're running in source
if os.path.split(os.getcwd())[-1] == 'barcode' and os.path.isdir('.svn'):
#runnning in a filesystem svn copy
infoLines = os.popen('svn info').read()
story.append(Preformatted(infoLines, styles["Code"]))
story.append(Paragraph('About this document', styleH2))
story.append(Paragraph('History and Status', styleH2))
story.append(Paragraph("""
This is the test suite and docoumentation for the ReportLab open source barcode API,
being re-released as part of the forthcoming ReportLab 2.0 release.
""", styleN))
story.append(Paragraph("""
Several years ago Ty Sarna contributed a barcode module to the ReportLab community.
Several of the codes were used by him in hiw work and to the best of our knowledge
this was correct. These were written as flowable objects and were available in PDFs,
but not in our graphics framework. However, we had no knowledge of barcodes ourselves
and did not advertise or extend the package.
""", styleN))
story.append(Paragraph("""
We "wrapped" the barcodes to be usable within our graphics framework; they are now available
as Drawing objects which can be rendered to EPS files or bitmaps. For the last 2 years this
has been available in our Diagra and Report Markup Language products. However, we did not
charge separately and use was on an "as is" basis.
""", styleN))
story.append(Paragraph("""
A major licensee of our technology has kindly agreed to part-fund proper productisation
of this code on an open source basis in Q1 2006. This has involved addition of EAN codes
as well as a proper testing program. Henceforth we intend to publicise the code more widely,
gather feedback, accept contributions of code and treat it as "supported".
""", styleN))
story.append(Paragraph("""
This involved making available both downloads and testing resources. This PDF document
is the output of the current test suite. It contains codes you can scan (if you use a nice sharp
laser printer!), and will be extended over coming weeks to include usage examples and notes on
each barcode and how widely tested they are. This is being done through documentation strings in
the barcode objects themselves so should always be up to date.
""", styleN))
story.append(Paragraph('Usage examples', styleH2))
story.append(Paragraph("""
To be completed
""", styleN))
story.append(Paragraph('The codes', styleH2))
story.append(Paragraph("""
Below we show a scannable code from each barcode, with and without human-readable text.
These are magnified about 2x from the natural size done by the original author to aid
inspection. This will be expanded to include several test cases per code, and to add
explanations of checksums. Be aware that (a) if you enter numeric codes which are too
short they may be prefixed for you (e.g. "123" for an 8-digit code becomes "00000123"),
and that the scanned results and readable text will generally include extra checksums
at the end.
""", styleN))
codeNames = getCodeNames()
from reportlab.lib.utils import flatten
width = [float(x[8:]) for x in sys.argv if x.startswith('--width=')]
height = [float(x[9:]) for x in sys.argv if x.startswith('--height=')]
isoScale = [int(x[11:]) for x in sys.argv if x.startswith('--isoscale=')]
options = {}
if width: options['width'] = width[0]
if height: options['height'] = height[0]
if isoScale: options['isoScale'] = isoScale[0]
scales = [x[8:].split(',') for x in sys.argv if x.startswith('--scale=')]
scales = map(float,scales and flatten(scales) or [1])
scales = map(float,scales and flatten(scales) or [1])
for scale in scales:
story.append(PageBreak())
story.append(Paragraph('Scale = %.1f'%scale, styleH2))
story.append(Spacer(36, 12))
for codeName in codeNames:
s = [Paragraph('Code: ' + codeName, styleH2)]
for hr in (0,1):
s.append(Spacer(36, 12))
dr = createBarcodeDrawing(codeName, humanReadable=hr,**options)
dr.renderScale = scale
s.append(dr)
s.append(Spacer(36, 12))
s.append(Paragraph('Barcode should say: ' + dr._bc.value, styleN))
story.append(KeepTogether(s))
SimpleDocTemplate(fileName).build(story)
print 'created', fileName
if __name__=='__main__':
run()
fullTest()
def createSample(name,memory):
f = open(name,'wb')
f.write(memory)
f.close()
createSample('test_cbcim.png',createBarcodeImageInMemory('EAN13', value='123456789012'))
createSample('test_cbcim.gif',createBarcodeImageInMemory('EAN8', value='1234567', format='gif'))
createSample('test_cbcim.pdf',createBarcodeImageInMemory('UPCA', value='03600029145',format='pdf'))
createSample('test_cbcim.tiff',createBarcodeImageInMemory('USPS_4State', value='01234567094987654321',routing='01234567891',format='tiff'))<|fim▁end|> |
from reportlab.platypus import Spacer, SimpleDocTemplate, Table, TableStyle, Preformatted, PageBreak |
<|file_name|>ASTPath.cpp<|end_file_name|><|fim▁begin|>/**************************************************************************
**
** This file is part of Qt Creator
**
** Copyright (c) 2012 Nokia Corporation and/or its subsidiary(-ies).
**
** Contact: Nokia Corporation ([email protected])
**
**
** GNU Lesser General Public License Usage
**
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this file.
** Please review the following information to ensure the GNU Lesser General
** Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** Other Usage
**
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
** If you have questions regarding the use of this file, please contact
** Nokia at [email protected].
**
**************************************************************************/
#include "ASTPath.h"
#include <AST.h>
#include <TranslationUnit.h>
#ifdef DEBUG_AST_PATH
# include <QDebug>
# include <typeinfo>
#endif // DEBUG_AST_PATH
using namespace CPlusPlus;
QList<AST *> ASTPath::operator()(int line, int column)
{
_nodes.clear();
_line = line;
_column = column;
if (_doc) {
if (TranslationUnit *unit = _doc->translationUnit())
accept(unit->ast());
}
return _nodes;
}
#ifdef DEBUG_AST_PATH
void ASTPath::dump(const QList<AST *> nodes)
{
qDebug() << "ASTPath dump," << nodes.size() << "nodes:";
for (int i = 0; i < nodes.size(); ++i)
qDebug() << qPrintable(QString(i + 1, QLatin1Char('-'))) << typeid(*nodes.at(i)).name();
}
#endif // DEBUG_AST_PATH
bool ASTPath::preVisit(AST *ast)<|fim▁hole|> unsigned firstToken = ast->firstToken();
unsigned lastToken = ast->lastToken();
if (firstToken > 0) {
if (lastToken <= firstToken)
return false;
unsigned startLine, startColumn;
getTokenStartPosition(firstToken, &startLine, &startColumn);
if (_line > startLine || (_line == startLine && _column >= startColumn)) {
unsigned endLine, endColumn;
getTokenEndPosition(lastToken - 1, &endLine, &endColumn);
if (_line < endLine || (_line == endLine && _column <= endColumn)) {
_nodes.append(ast);
return true;
}
}
}
return false;
}<|fim▁end|> | { |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export * from './themes'; |
<|file_name|>filter_dates.py<|end_file_name|><|fim▁begin|>'''
Filter text output by date ranges
'''
import os
import csv
import sys
import dateutil.parser
import argparse
import metadata
settings = None
def get_settings():
''' Return command-line settings '''
parser = argparse.ArgumentParser(description='Filter text corpus by date range. Only updates the metadata file.')
parser.add_argument('-i', dest='input', required=True, help='Input CSV of metadata describing files')<|fim▁hole|> parser.add_argument('-s', '--start', dest='start', help='Start date, YYYY-MM-DD format')
parser.add_argument('-e', '--end', dest='end', help='End date, YYYY-MM-DD format')
return parser.parse_args()
def filter_dates(metadata, start, end):
results = list()
if start is not None:
start = dateutil.parser.parse(start)
if end is not None:
end = dateutil.parser.parse(end)
for row in metadata:
try:
date = dateutil.parser.parse(row['date'])
except ValueError as err:
print('No date found in row: {}'.format(row))
else:
# if date is None:
# continue
if (start is None or start <= date) and (end is None or date <= end):
results.append(row)
return results
def main():
global settings
settings = get_settings()
md = metadata.read_csv(settings.input)
filtered = filter_dates(md, settings.start, settings.end)
metadata.write_csv(settings.output, filtered)
if __name__ == '__main__':
if sys.version_info < (3,0):
print("This script requires Python 3")
exit(-1)
main()<|fim▁end|> | parser.add_argument('-o', dest='output', required=True,
help='Output CSV for filtered results') |
<|file_name|>KPassivePopupMessageHandler.py<|end_file_name|><|fim▁begin|># encoding: utf-8
# module PyKDE4.kdeui
# from /usr/lib/python3/dist-packages/PyKDE4/kdeui.cpython-34m-x86_64-linux-gnu.so<|fim▁hole|>import PyKDE4.kdecore as __PyKDE4_kdecore
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
import PyQt4.QtSvg as __PyQt4_QtSvg
class KPassivePopupMessageHandler(__PyQt4_QtCore.QObject, __PyKDE4_kdecore.KMessageHandler):
# no doc
def message(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass<|fim▁end|> | # by generator 1.135
# no doc
# imports |
<|file_name|>MovementPacketBuilder.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2013-2015 DeathCore <http://www.noffearrdeathproject.net/>
* Copyright (C) 2005-2011 MaNGOS <http://getmangos.com/>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "MovementPacketBuilder.h"
#include "MoveSpline.h"
#include "ByteBuffer.h"
namespace Movement
{
inline void operator << (ByteBuffer& b, const Vector3& v)
{
b << v.x << v.y << v.z;
}
inline void operator >> (ByteBuffer& b, Vector3& v)
{
b >> v.x >> v.y >> v.z;
}
enum MonsterMoveType
{
MonsterMoveNormal = 0,
MonsterMoveStop = 1,
MonsterMoveFacingSpot = 2,
MonsterMoveFacingTarget = 3,
MonsterMoveFacingAngle = 4
};
void PacketBuilder::WriteCommonMonsterMovePart(const MoveSpline& move_spline, ByteBuffer& data)
{
MoveSplineFlag splineflags = move_spline.splineflags;
data << uint8(0); // sets/unsets MOVEMENTFLAG2_UNK7 (0x40)
data << move_spline.spline.getPoint(move_spline.spline.first());
data << move_spline.GetId();
switch (splineflags & MoveSplineFlag::Mask_Final_Facing)
{
case MoveSplineFlag::Final_Target:
data << uint8(MonsterMoveFacingTarget);
data << move_spline.facing.target;
break;
case MoveSplineFlag::Final_Angle:
data << uint8(MonsterMoveFacingAngle);
data << move_spline.facing.angle;
break;
case MoveSplineFlag::Final_Point:
data << uint8(MonsterMoveFacingSpot);
data << move_spline.facing.f.x << move_spline.facing.f.y << move_spline.facing.f.z;
break;
default:
data << uint8(MonsterMoveNormal);
break;
}
// add fake Enter_Cycle flag - needed for client-side cyclic movement (client will erase first spline vertex after first cycle done)
splineflags.enter_cycle = move_spline.isCyclic();
data << uint32(splineflags & uint32(~MoveSplineFlag::Mask_No_Monster_Move));
if (splineflags.animation)
{
data << splineflags.getAnimationId();
data << move_spline.effect_start_time;
}
data << move_spline.Duration();
if (splineflags.parabolic)
{
data << move_spline.vertical_acceleration;
data << move_spline.effect_start_time;
}
}
void PacketBuilder::WriteStopMovement(Vector3 const& pos, uint32 splineId, ByteBuffer& data)
{
data << uint8(0); // sets/unsets MOVEMENTFLAG2_UNK7 (0x40)
data << pos;
data << splineId;
data << uint8(MonsterMoveStop);
}
void WriteLinearPath(const Spline<int32>& spline, ByteBuffer& data)
{
uint32 last_idx = spline.getPointCount() - 3;
const Vector3 * real_path = &spline.getPoint(1);<|fim▁hole|> if (last_idx > 1)
{
Vector3 middle = (real_path[0] + real_path[last_idx]) / 2.f;
Vector3 offset;
// first and last points already appended
for (uint32 i = 1; i < last_idx; ++i)
{
offset = middle - real_path[i];
data.appendPackXYZ(offset.x, offset.y, offset.z);
}
}
}
void WriteCatmullRomPath(const Spline<int32>& spline, ByteBuffer& data)
{
uint32 count = spline.getPointCount() - 3;
data << count;
data.append<Vector3>(&spline.getPoint(2), count);
}
void WriteCatmullRomCyclicPath(const Spline<int32>& spline, ByteBuffer& data)
{
uint32 count = spline.getPointCount() - 3;
data << uint32(count + 1);
data << spline.getPoint(1); // fake point, client will erase it from the spline after first cycle done
data.append<Vector3>(&spline.getPoint(1), count);
}
void PacketBuilder::WriteMonsterMove(const MoveSpline& move_spline, ByteBuffer& data)
{
WriteCommonMonsterMovePart(move_spline, data);
const Spline<int32>& spline = move_spline.spline;
MoveSplineFlag splineflags = move_spline.splineflags;
if (splineflags & MoveSplineFlag::Mask_CatmullRom)
{
if (splineflags.cyclic)
WriteCatmullRomCyclicPath(spline, data);
else
WriteCatmullRomPath(spline, data);
}
else
WriteLinearPath(spline, data);
}
void PacketBuilder::WriteCreate(const MoveSpline& move_spline, ByteBuffer& data)
{
//WriteClientStatus(mov, data);
//data.append<float>(&mov.m_float_values[SpeedWalk], SpeedMaxCount);
//if (mov.SplineEnabled())
{
MoveSplineFlag const& splineFlags = move_spline.splineflags;
data << splineFlags.raw();
if (splineFlags.final_angle)
{
data << move_spline.facing.angle;
}
else if (splineFlags.final_target)
{
data << move_spline.facing.target;
}
else if (splineFlags.final_point)
{
data << move_spline.facing.f.x << move_spline.facing.f.y << move_spline.facing.f.z;
}
data << move_spline.timePassed();
data << move_spline.Duration();
data << move_spline.GetId();
data << float(1.f); // splineInfo.duration_mod; added in 3.1
data << float(1.f); // splineInfo.duration_mod_next; added in 3.1
data << move_spline.vertical_acceleration; // added in 3.1
data << move_spline.effect_start_time; // added in 3.1
uint32 nodes = move_spline.getPath().size();
data << nodes;
data.append<Vector3>(&move_spline.getPath()[0], nodes);
data << uint8(move_spline.spline.mode()); // added in 3.1
data << (move_spline.isCyclic() ? Vector3::zero() : move_spline.FinalDestination());
}
}
}<|fim▁end|> |
data << last_idx;
data << real_path[last_idx]; // destination |
<|file_name|>partials.js<|end_file_name|><|fim▁begin|>var partialsTemp = [
"login",
"profile"
];
exports.partialRender = function (req, res) {
var pageIndex = req.params[0];
if (partialsTemp.indexOf("" + pageIndex) > -1) {
res.render("partials/" + pageIndex, {});
} else {<|fim▁hole|> }
};<|fim▁end|> | res.render("common/404", {}); |
<|file_name|>OBC_main.py<|end_file_name|><|fim▁begin|>__author__ = 'Debha'
from read_data import read_data, read_map, gene_muts, make_GO_map
from cluster import make_cluster_mat, make_sub_mut_map, make_GO_cluster_mat, cluster_main, clean, pairwiseDist, clustered_GOs, sim_matrix
import numpy as np
data_file = "../inputs/somatic.csv"
map_file = "../inputs/mart_export.csv"
GO_file = "../inputs/c5.all.v5.0.symbols.gmt"
#data = read_data(data_file)
#gene_map = read_map(map_file)
#gene_mut = gene_muts(data, gene_map)
#GO_map = make_GO_map(GO_file)
#[cluster_mat, cluster_mat_nonbinary, subjects, genes, gene_length_map] = make_cluster_mat(data, list(gene_mut.keys()))
cluster_mat = np.load("../intermediates/cluster_mat.npy")
subjects = np.load("../intermediates/subjects.npy"); genes = np.load("../intermediates/genes.npy")
GO_terms = np.load("../intermediates/GO_terms.npy")
# sub_mut_map = make_sub_mut_map(cluster_mat, subjects, genes)
# GO_cluster_mat = make_GO_cluster_mat(sub_mut_map, GO_map, subjects)
GO_cluster_mat = np.load("../intermediates/GO_cluster_mat.npy")
[c_GO_cluster_mat, c_GO_terms] = clean(GO_cluster_mat, GO_terms)<|fim▁hole|>runs = 5
for i in range(runs):
print "Run " + str(i+1) + " out of " + str(runs)
[mu, clusters, distortion, subject_clusters] = cluster_main(c_GO_cluster_mat.tolist(), k, iterations, subjects)
agg_sub_clusters.append(subject_clusters)
#for key in clusters:
# print key, len(clusters[key]), len(subject_clusters[key])
#clustered_GOs(clusters, c_GO_terms, mu)
#pairwiseDist(c_GO_cluster_mat)
#sim_matrix(c_GO_cluster_mat)<|fim▁end|> |
agg_sub_clusters = []
k = 3
iterations = 20 |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import sys
import traceback
from StringIO import StringIO
import re
import datetime
from urllib import urlencode
from collections import defaultdict
from django import http
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import redirect_to_login
from django.db import transaction
from django.core.urlresolvers import reverse
from django.conf import settings
from django.shortcuts import redirect, get_object_or_404
from django.contrib import messages
from django.db.models import Q
from django.template import Context, loader
from django.core.mail import get_connection, EmailMessage
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from django.shortcuts import render
from django.views.decorators.http import require_POST
from django.contrib.sites.models import RequestSite
from django.core.cache import cache
from django.db.models import Min, Count
import vobject
from .models import Entry, Hours, BlacklistedUser, FollowingUser, UserKey
from pto.apps.users.models import UserProfile, User
from pto.apps.users.utils import ldap_lookup
from .utils import parse_datetime, DatetimeParseError
from .utils.countrytotals import UnrecognizedCountryError, get_country_totals
import utils
import forms
from .decorators import json_view
from .csv_export import UnicodeWriter as CSVUnicodeWriter
def valid_email(value):
try:
validate_email(value)
return True
except ValidationError:
return False
def handler500(request):
data = {}
if settings.TRACEBACKS_ON_500:
err_type, err_value, err_traceback = sys.exc_info()
out = StringIO()
traceback.print_exc(file=out)
traceback_formatted = out.getvalue()
data['err_type'] = err_type
data['err_value'] = err_value
data['err_traceback'] = traceback_formatted
data['_report_traceback'] = True
else:
data['_report_traceback'] = False
return render(request, '500.html', data, status=500)
def home(request): # aka dashboard
data = {}
data['mobile'] = request.MOBILE # thank you django-mobility (see settings)
if data['mobile']:
# unless an explicit cookie it set, redirect to /mobile/
if not request.COOKIES.get('no-mobile', False):
return redirect(reverse('mobile.home'))
# now do what the login_required would usually do
if not request.user.is_authenticated():
path = request.get_full_path()
return redirect_to_login(path)
data['page_title'] = "Dashboard"
profile = request.user.get_profile()
if profile and profile.country in ('GB', 'FR', 'DE'):
first_day = 1 # 1=Monday
else:
first_day = 0 # default to 0=Sunday
data['first_day'] = first_day
if 'all-rightnow' in request.GET:
MAX_RIGHT_NOWS = 9999
else:
MAX_RIGHT_NOWS = 20
## Commented out whilst we decide whether to keep it at all
#right_nows, right_now_users = get_right_nows()
#data['right_nows'] = right_nows
#data['right_now_users'] = right_now_users
#if len(right_now_users) > MAX_RIGHT_NOWS:
# data['right_now_too_many'] = (len(data['right_now_users'])
# - MAX_RIGHT_NOWS)
# data['right_now_users'] = data['right_now_users'][:MAX_RIGHT_NOWS]
#else:
# data['right_now_too_many'] = None
data.update(get_taken_info(request.user))
data['calendar_url'] = _get_user_calendar_url(request)
cache_key = 'recently_created_%s' % request.user.pk
recently_created = cache.get(cache_key)
if recently_created:
data['recently_created'] = recently_created
cache.delete(cache_key)
return render(request, 'dates/home.html', data)
def _get_user_calendar_url(request):
user_key, __ = UserKey.objects.get_or_create(user=request.user)
base_url = '%s://%s' % (request.is_secure() and 'https' or 'http',
RequestSite(request).domain)
return base_url + reverse('dates.calendar_vcal', args=(user_key.key,))
def get_taken_info(user):
data = {}
profile = user.get_profile()
if profile.country:
data['country'] = profile.country
try:
data['country_totals'] = get_country_totals(profile.country)
except UnrecognizedCountryError:
data['unrecognized_country'] = True
today = datetime.date.today()
start_date = datetime.date(today.year, 1, 1)
last_date = datetime.date(today.year + 1, 1, 1)
from django.db.models import Sum
qs = Entry.objects.filter(
user=user,
start__gte=start_date,
end__lt=last_date
)
agg = qs.aggregate(Sum('total_hours'))
total_hours = agg['total_hours__sum']
if total_hours is None:
total_hours = 0
data['taken'] = _friendly_format_hours(total_hours)
return data
def _friendly_format_hours(total_hours):
days = 1.0 * total_hours / settings.WORK_DAY
hours = total_hours % settings.WORK_DAY
if not total_hours:
return '0 days'
elif total_hours < settings.WORK_DAY:
return '%s hours' % total_hours
elif total_hours == settings.WORK_DAY:
return '1 day'
else:
if not hours:
return '%d days' % days
else:
return '%s days' % days
def get_right_nows():
right_now_users = []
right_nows = defaultdict(list)
_today = datetime.date.today()
for entry in (Entry.objects
.filter(start__lte=_today,
end__gte=_today,
total_hours__gte=0)
.order_by('user__first_name',
'user__last_name',
'user__username')):
if entry.user not in right_now_users:
right_now_users.append(entry.user)
left = (entry.end - _today).days + 1
right_nows[entry.user].append((left, entry))
return right_nows, right_now_users
def get_upcomings(max_days=14):
users = []
upcoming = defaultdict(list)
today = datetime.date.today()
max_future = today + datetime.timedelta(days=max_days)
for entry in (Entry.objects
.filter(start__gt=today,
start__lt=max_future,
total_hours__gte=0)
.order_by('user__first_name',
'user__last_name',
'user__username')):
if entry.user not in users:
users.append(entry.user)
days = (entry.start - today).days + 1
upcoming[entry.user].append((days, entry))
return upcoming, users
def make_entry_title(entry, this_user, include_details=True):
if entry.user != this_user:
if entry.user.first_name:
title = '%s %s - ' % (entry.user.first_name,
entry.user.last_name)
else:
title = '%s - ' % entry.user.username
else:
title = ''
days = 0
for hour in Hours.objects.filter(entry=entry):
if hour.hours == 8:
days += 1
elif hour.hours == 4:
days += 0.5
if days > 1:
if int(days) == days:
title += '%d days' % days
else:
title += '%s days' % days
if Hours.objects.filter(entry=entry, birthday=True).exists():
title += ' (includes birthday)'
elif (days == 1 and entry.total_hours == 0 and
Hours.objects.filter(entry=entry, birthday=True)):
title += 'Birthday!'
elif days == 1 and entry.total_hours == 8:
title += '1 day'
else:
title += '%s hours' % entry.total_hours
if entry.details:
if days == 1:
max_length = 20
else:
max_length = 40
if include_details:
title += ', '
if len(entry.details) > max_length:
title += entry.details[:max_length] + '...'
else:
title += entry.details
return title
@json_view
def calendar_events(request):
if not request.user.is_authenticated():
return http.HttpResponseForbidden('Must be logged in')
if not request.GET.get('start'):
return http.HttpResponseBadRequest('Argument start missing')
if not request.GET.get('end'):
return http.HttpResponseBadRequest('Argument end missing')
try:
start = parse_datetime(request.GET['start'])
except DatetimeParseError:
return http.HttpResponseBadRequest('Invalid start')
try:
end = parse_datetime(request.GET['end'])
except DatetimeParseError:
return http.HttpResponseBadRequest('Invalid end')
entries = []
COLORS = ("#EAA228", "#c5b47f", "#579575", "#839557", "#958c12",
"#953579", "#4b5de4", "#d8b83f", "#ff5800", "#0085cc",
"#c747a3", "#cddf54", "#FBD178", "#26B4E3", "#bd70c7")
user_ids = [request.user.pk]
colors = {}
colors_fullnames = []
colors[request.user.pk] = None
colors_fullnames.append((request.user.pk, 'Me myself and I', '#3366CC'))
for i, user_ in enumerate(get_observed_users(request.user, max_depth=2)):
user_ids.append(user_.pk)
colors[user_.pk] = COLORS[i]
full_name = user_.get_full_name()
if not full_name:
full_name = user_.username
colors_fullnames.append((
user_.pk,
full_name,
colors[user_.pk]
))
_managers = {}
def can_see_details(user):
if request.user.is_superuser:
return True
if request.user.pk == user.pk:
return True
if user.pk not in _managers:
_profile = user.get_profile()
_manager = None
if _profile and _profile.manager_user:
_manager = _profile.manager_user.pk
_managers[user.pk] = _manager
return _managers[user.pk] == request.user.pk
visible_user_ids = set()
for entry in (Entry.objects
.filter(user__in=user_ids,
total_hours__gte=0,
total_hours__isnull=False)
.select_related('user')
.exclude(Q(end__lt=start) | Q(start__gt=end))):
visible_user_ids.add(entry.user.pk)
entries.append({
'id': entry.pk,
'title': make_entry_title(entry, request.user,
include_details=can_see_details(entry.user)),
'start': entry.start.strftime('%Y-%m-%d'),
'end': entry.end.strftime('%Y-%m-%d'),
'color': colors[entry.user.pk],
'mine': entry.user.pk == request.user.pk,
})
colors = [dict(name=x, color=y) for (pk, x, y) in colors_fullnames
if pk in visible_user_ids]
return {'events': entries, 'colors': colors}
def get_minions(user, depth=1, max_depth=2):
minions = []
for minion in (UserProfile.objects.filter(manager_user=user)
.select_related('manager_user')
.order_by('manager_user')):
minions.append(minion.user)
if depth < max_depth:
minions.extend(get_minions(minion.user,
depth=depth + 1,
max_depth=max_depth))
return minions
def get_siblings(user):
profile = user.get_profile()
if not profile.manager_user:
return []
users = []
for profile in (UserProfile.objects
.filter(manager_user=profile.manager_user)
.exclude(pk=user.pk)
.select_related('user')):
users.append(profile.user)
return users
def get_followed_users(user):
users = []
for each in (FollowingUser.objects
.filter(follower=user)
.select_related('following')):
users.append(each.following)
return users
def get_observed_users(this_user, depth=1, max_depth=2):
users = []
def is_blacklisted(user):
return (BlacklistedUser.objects
.filter(observer=this_user, observable=user)
.exists())
for user in get_minions(this_user, depth=depth, max_depth=max_depth):
if user not in users:
if not is_blacklisted(user):
users.append(user)
for user in get_siblings(this_user):
if user not in users:
if not is_blacklisted(user):
users.append(user)
profile = this_user.get_profile()
manager = profile.manager_user
if manager and manager not in users:
if not is_blacklisted(manager):
users.append(manager)
for user in get_followed_users(this_user):
if user not in users:
users.append(user)
return users
@transaction.commit_on_success
@login_required
def notify(request):
data = {}
data['page_title'] = "Notify about new vacation"
if request.method == 'POST':
form = forms.AddForm(request.user, data=request.POST)
if form.is_valid():
start = form.cleaned_data['start']
end = form.cleaned_data['end']
details = form.cleaned_data['details'].strip()
notify = form.cleaned_data['notify']
entry = Entry.objects.create(
user=request.user,
start=start,
end=end,
details=details,
)
clean_unfinished_entries(entry)
messages.info(request, 'Entry added, now specify hours')
url = reverse('dates.hours', args=[entry.pk])
request.session['notify_extra'] = notify
return redirect(url)
else:
initial = {}
if request.GET.get('start'):
try:
initial['start'] = parse_datetime(request.GET['start'])
except DatetimeParseError:
pass
if request.GET.get('end'):
try:
initial['end'] = parse_datetime(request.GET['end'])
except DatetimeParseError:
pass
form = forms.AddForm(request.user, initial=initial)
profile = request.user.get_profile()
manager = None
if profile and profile.manager:
manager = ldap_lookup.fetch_user_details(profile.manager)
data['hr_managers'] = [x.user for x in
(UserProfile.objects
.filter(hr_manager=True)
.select_related('user'))]
data['manager'] = manager
data['all_managers'] = [x for x in data['hr_managers'] if x]
if manager:
data['all_managers'].append(manager)
data['form'] = form
return render(request, 'dates/notify.html', data)
@transaction.commit_on_success
@login_required
def cancel_notify(request):
Entry.objects.filter(user=request.user, total_hours__isnull=True).delete()
return redirect(reverse('dates.home'))
def clean_unfinished_entries(good_entry):
# delete all entries that don't have total_hours and touch on the
# same dates as this good one
bad_entries = (Entry.objects
.filter(user=good_entry.user,
total_hours__isnull=True)
.exclude(pk=good_entry.pk))
for entry in bad_entries:
entry.delete()
@transaction.commit_on_success
@login_required
def hours(request, pk):
data = {}
entry = get_object_or_404(Entry, pk=pk)
if entry.user != request.user:
if not (request.user.is_staff or request.user.is_superuser):
return http.HttpResponseForbidden('insufficient access')
if request.method == 'POST':
form = forms.HoursForm(entry, data=request.POST)
if form.is_valid():
total_hours, is_edit = save_entry_hours(entry, form)
extra_users = request.session.get('notify_extra', '')
extra_users = [x.strip() for x
in extra_users.split(';')
if x.strip()]
success, email_addresses = send_email_notification(
entry,
extra_users,
is_edit=is_edit,
)
assert success
#messages.info(request,
# '%s hours of vacation logged.' % total_hours
#)
recently_created = make_entry_title(entry, request.user)
cache_key = 'recently_created_%s' % request.user.pk
cache.set(cache_key, recently_created, 60)
url = reverse('dates.emails_sent', args=[entry.pk])
url += '?' + urlencode({'e': email_addresses}, True)
return redirect(url)
else:
initial = {}
for date in utils.get_weekday_dates(entry.start, entry.end):
try:
#hours_ = Hours.objects.get(entry=entry, date=date)
hours_ = Hours.objects.get(date=date, entry__user=entry.user)
initial[date.strftime('d-%Y%m%d')] = hours_.hours
except Hours.DoesNotExist:
initial[date.strftime('d-%Y%m%d')] = settings.WORK_DAY
form = forms.HoursForm(entry, initial=initial)
data['form'] = form
if entry.total_hours:
data['total_hours'] = entry.total_hours
else:
total_days = 0
for date in utils.get_weekday_dates(entry.start, entry.end):
try:
hours_ = Hours.objects.get(entry=entry, date=date)
print hours_.hours
if hours_.hours == settings.WORK_DAY:
total_days += 1
elif hours_.hours:
total_days += .5
except Hours.DoesNotExist:
total_days += 1
data['total_days'] = total_days
notify = request.session.get('notify_extra', [])
data['notify'] = notify
return render(request, 'dates/hours.html', data)
def save_entry_hours(entry, form):
assert form.is_valid()
total_hours = 0
for date in utils.get_weekday_dates(entry.start, entry.end):
hours = int(form.cleaned_data[date.strftime('d-%Y%m%d')])
birthday = False
if hours == -1:
birthday = True
hours = 0
assert hours >= 0 and hours <= settings.WORK_DAY, hours
try:
hours_ = Hours.objects.get(entry__user=entry.user,
date=date)
if hours_.hours:
# this nullifies the previous entry on this date
reverse_entry = Entry.objects.create(
user=hours_.entry.user,
start=date,
end=date,
details=hours_.entry.details,
total_hours=hours_.hours * -1,
)
Hours.objects.create(
entry=reverse_entry,
hours=hours_.hours * -1,
date=date,
)
#hours_.hours = hours # nasty stuff!
#hours_.birthday = birthday
#hours_.save()
except Hours.DoesNotExist:
# nothing to credit
pass
Hours.objects.create(
entry=entry,
hours=hours,
date=date,
birthday=birthday,
)
total_hours += hours
#raise NotImplementedError
is_edit = entry.total_hours is not None
#if entry.total_hours is not None:
entry.total_hours = total_hours
entry.save()
return total_hours, is_edit
def send_email_notification(entry, extra_users, is_edit=False):
email_addresses = []
for profile in (UserProfile.objects
.filter(hr_manager=True,
user__email__isnull=False)):
email_addresses.append(profile.user.email)
profile = entry.user.get_profile()
if profile and profile.manager:
manager = ldap_lookup.fetch_user_details(profile.manager)
if manager.get('mail'):
email_addresses.append(manager['mail'])
if extra_users:
email_addresses.extend(extra_users)
email_addresses = list(set(email_addresses)) # get rid of dupes
if not email_addresses:
email_addresses = [settings.FALLBACK_TO_ADDRESS]
if is_edit:
subject = settings.EMAIL_SUBJECT_EDIT
else:
subject = settings.EMAIL_SUBJECT
subject = subject % dict(
first_name=entry.user.first_name,
last_name=entry.user.last_name,
username=entry.user.username,
email=entry.user.email,
)
message = template = loader.get_template('dates/notification.txt')
context = {
'entry': entry,
'user': entry.user,
'is_edit': is_edit,
'settings': settings,
'start_date': entry.start.strftime(settings.DEFAULT_DATE_FORMAT),
}
body = template.render(Context(context)).strip()
connection = get_connection()
message = EmailMessage(
subject=subject,
body=body,
from_email=entry.user.email,
to=email_addresses,
cc=entry.user.email and [entry.user.email] or None,
connection=connection
)
success = message.send()
return success, email_addresses
@login_required
def emails_sent(request, pk):
data = {}
entry = get_object_or_404(Entry, pk=pk)
if entry.user != request.user:
if not (request.user.is_staff or request.user.is_superuser):
return http.HttpResponseForbidden('insufficient access')
emails = request.REQUEST.getlist('e')
if isinstance(emails, basestring):
emails = [emails]
data['emails'] = emails
data['emailed_users'] = []
for email in emails:
record = ldap_lookup.fetch_user_details(email)
if record:
data['emailed_users'].append(record)
else:
data['emailed_users'].append(email)
show_fireworks = not request.COOKIES.get('no_fw', False)
data['show_fireworks'] = show_fireworks
return render(request, 'dates/emails_sent.html', data)
@login_required
def list_(request):
data = {}
form = forms.ListFilterForm(date_format='%d %B %Y',
data=request.GET)
if form.is_valid():
data['filters'] = form.cleaned_data
data['today'] = datetime.date.today()
entries_base = Entry.objects.all()
try:
data['first_date'] = entries_base.order_by('start')[0].start
data['last_date'] = entries_base.order_by('-end')[0].end
data['first_filed_date'] = (entries_base
.order_by('add_date')[0]
.add_date)
except IndexError:
# first run, not so important
data['first_date'] = datetime.date(2000, 1, 1)
data['last_date'] = datetime.date(2000, 1, 1)
data['first_filed_date'] = datetime.date(2000, 1, 1)
data['form'] = form
data['query_string'] = request.META.get('QUERY_STRING')
return render(request, 'dates/list.html', data)
@login_required
def list_csv(request):
entries = get_entries_from_request(request.GET)
response = http.HttpResponse(mimetype='text/csv')
writer = CSVUnicodeWriter(response)
writer.writerow((
'ID',
'EMAIL',
'FIRST NAME',
'LAST NAME',
'ADDED',
'START',
'END',
'DAYS',
'DETAILS',
'CITY',
'COUNTRY',
'START DATE',
))
profiles = {} # basic memoization
for entry in entries:
if entry.user.pk not in profiles:
profiles[entry.user.pk] = entry.user.get_profile()
profile = profiles[entry.user.pk]
writer.writerow((
str(entry.pk),
entry.user.email,
entry.user.first_name,
entry.user.last_name,
entry.add_date.strftime('%Y-%m-%d'),
entry.start.strftime('%Y-%m-%d'),
entry.end.strftime('%Y-%m-%d'),
str(entry.total_days),
entry.details,
profile.city,
profile.country,
(profile.start_date and
profile.start_date.strftime('%Y-%m-%d') or ''),
))
return response
@json_view
@login_required
def list_json(request):
entries = get_entries_from_request(request.GET)
_managers = {}
def can_see_details(user):
if request.user.is_superuser:
return True
if request.user.pk == user.pk:
return True
if user.pk not in _managers:
_profile = user.get_profile()
_manager = None
if _profile and _profile.manager_user:
_manager = _profile.manager_user.pk
_managers[user.pk] = _manager
return _managers[user.pk] == request.user.pk
data = []
profiles = {}
for entry in entries:
if entry.user.pk not in profiles:
profiles[entry.user.pk] = entry.user.get_profile()
profile = profiles[entry.user.pk]
if entry.total_hours < 0:
details = '*automatic edit*'
elif can_see_details(entry.user):
details = entry.details
else:
details = ''
row = [entry.user.email,
entry.user.first_name,
entry.user.last_name,
entry.add_date.strftime('%Y-%m-%d'),
entry.total_days,
entry.start.strftime('%Y-%m-%d'),
entry.end.strftime('%Y-%m-%d'),
profile.city,
profile.country,
details,
#edit_link,
#hours_link
]
data.append(row)
return {'aaData': data}
def get_entries_from_request(data):
form = forms.ListFilterForm(date_format='%d %B %Y', data=data)
if not form.is_valid():
return Entry.objects.none()
fdata = form.cleaned_data
entries = (Entry.objects.exclude(total_hours=None)
.select_related('user'))
if fdata.get('date_from'):
entries = entries.filter(end__gte=fdata.get('date_from'))
if fdata.get('date_to'):
entries = entries.filter(start__lte=fdata.get('date_to'))
if fdata.get('date_filed_from'):
entries = entries.filter(
add_date__gte=fdata.get('date_filed_from'))
if fdata.get('date_filed_to'):
entries = entries.filter(
add_date__lt=fdata.get('date_filed_to') +
datetime.timedelta(days=1))
if fdata.get('name'):
name = fdata['name'].strip()
if valid_email(name):
entries = entries.filter(user__email__iexact=name)
else:
entries = entries.filter(
Q(user__first_name__istartswith=name.split()[0]) |
Q(user__last_name__iendswith=name.split()[-1])
)
if fdata.get('country'):
country = fdata['country'].strip()
_users = UserProfile.objects.filter(country=country).values('user_id')
entries = entries.filter(user__id__in=_users)
return entries
@login_required
def following(request):
data = {}
observed = []
_followed = get_followed_users(request.user)
_minions_1 = get_minions(request.user, depth=1, max_depth=1)
_minions_2 = get_minions(request.user, depth=1, max_depth=2)
_manager = request.user.get_profile().manager_user
for user in sorted(get_observed_users(request.user, max_depth=2),
lambda x, y: cmp(x.first_name.lower(),
y.first_name.lower())):
if user in _minions_1:
reason = 'direct manager of'
elif user in _minions_2:
reason = 'indirect manager of'
elif user == _manager:
reason = 'your manager'
elif user in _followed:
reason = 'curious'
else:
reason = 'teammate'
observed.append((user, reason))
not_observed = (BlacklistedUser.objects
.filter(observer=request.user)
.order_by('observable__first_name'))
data['observed'] = observed
data['not_observed'] = [x.observable for x in not_observed]
return render(request, 'dates/following.html', data)
@json_view
@login_required
@transaction.commit_on_success
@require_POST
def save_following(request):
search = request.POST.get('search')
if not search:
return http.HttpResponseBadRequest('Missing search')
if (-1 < search.rfind('<') < search.rfind('@') < search.rfind('>')):
try:
email = re.findall('<([\w\.\-]+@[\w\.\-]+)>', search)[0]
email = email.strip()
validate_email(email)
except (ValidationError, IndexError):
email = None
elif search.isdigit():
try:
email = User.objects.get(pk=search).email
except User.DoesNotExist:
email = None # will deal with this later
else:
found = []
result = ldap_lookup.search_users(search, 30, autocomplete=True)
for each in result:
try:
found.append(User.objects.get(email__iexact=each['mail']))
except User.DoesNotExist:
pass
if len(found) > 1:
return http.HttpResponseBadRequest('More than one user found')
elif not found:
return http.HttpResponseBadRequest('No user found')
else:
email = found[0].email
# if no email is found in the search, it's an error
if not email:
return http.HttpResponseBadRequest('No email found')
try:
user = User.objects.get(email__iexact=email)
except User.DoesNotExist:
return http.HttpResponseBadRequest('No user by that email found')
FollowingUser.objects.get_or_create(
follower=request.user,
following=user,
)
# find a reason why we're following this user
_minions_1 = get_minions(request.user, depth=1, max_depth=1)
_minions_2 = get_minions(request.user, depth=1, max_depth=2)
if user in _minions_1:
reason = 'direct manager of'
elif user in _minions_2:
reason = 'indirect manager of'
elif user == request.user.get_profile().manager_user:
reason = 'your manager'
elif (request.user.get_profile().manager_user
and user in _minions_1):
reason = 'teammate'
else:
reason = 'curious'
name = ('%s %s' % (user.first_name,
user.last_name)).strip()
if not name:
name = user.username
data = {
'id': user.pk,
'name': name,
'reason': reason,
}
return data
@json_view
@login_required
@transaction.commit_on_success
@require_POST
def save_unfollowing(request):
remove = request.POST.get('remove')
try:
user = User.objects.get(pk=remove)
except (ValueError, User.DoesNotExist):
return http.HttpResponseBadRequest('Invalid user ID')
for f in (FollowingUser.objects
.filter(follower=request.user, following=user)):
f.delete()
data = {}
if user in get_observed_users(request.user, max_depth=2):
# if not blacklisted, this user will automatically re-appear
BlacklistedUser.objects.get_or_create(
observer=request.user,
observable=user
)
data['id'] = user.pk
name = ('%s %s' % (user.first_name,
user.last_name)).strip()
if not name:
name = user.username
data['name'] = name
return data
def calendar_vcal(request, key):
base_url = '%s://%s' % (request.is_secure() and 'https' or 'http',
RequestSite(request).domain)
home_url = base_url + '/'
cal = vobject.iCalendar()
cal.add('x-wr-calname').value = 'Mozilla Vacation'
try:
user = UserKey.objects.get(key=key).user
except UserKey.DoesNotExist:
# instead of raising a HTTP error, respond a calendar
# that urges the user to update the stale URL
event = cal.add('vevent')
event.add('summary').value = (
"Calendar expired. Visit %s#calendarurl to get the "
"new calendar URL" % home_url
)
today = datetime.date.today()
event.add('dtstart').value = today
event.add('dtend').value = today
event.add('url').value = '%s#calendarurl' % (home_url,)
event.add('description').value = ("The calendar you used has expired "
"and is no longer associated with any user")
return _render_vcalendar(cal, key)
# always start on the first of this month
today = datetime.date.today()
#first = datetime.date(today.year, today.month, 1)
user_ids = [user.pk]
for user_ in get_observed_users(user, max_depth=2):
user_ids.append(user_.pk)
entries = (Entry.objects
.filter(user__in=user_ids,
total_hours__gte=0,
total_hours__isnull=False,
end__gte=today)
.select_related('user')
)
_list_base_url = base_url + reverse('dates.list')
def make_list_url(entry):
name = entry.user.get_full_name()
if not name:
name = entry.user.username
data = {
'date_from': entry.start.strftime('%d %B %Y'),
'date_to': entry.end.strftime('%d %B %Y'),
'name': name
}
return _list_base_url + '?' + urlencode(data, True)
for entry in entries:
event = cal.add('vevent')<|fim▁hole|> include_details=False)
event.add('dtstart').value = entry.start
event.add('dtend').value = entry.end
#url = (home_url + '?cal_y=%d&cal_m=%d' %
# (slot.date.year, slot.date.month))
event.add('url').value = make_list_url(entry)
#event.add('description').value = entry.details
event.add('description').value = "Log in to see the details"
return _render_vcalendar(cal, key)
def _render_vcalendar(cal, key):
#return http.HttpResponse(cal.serialize(),
# mimetype='text/plain;charset=utf-8'
# )
resp = http.HttpResponse(cal.serialize(),
mimetype='text/calendar;charset=utf-8'
)
filename = '%s.ics' % (key,)
resp['Content-Disposition'] = 'inline; filename="%s"' % filename
return resp
@login_required
@transaction.commit_on_success
def reset_calendar_url(request):
for each in UserKey.objects.filter(user=request.user):
each.delete()
return redirect(reverse('dates.home') + '#calendarurl')
@login_required
def about_calendar_url(request):
data = {}
data['calendar_url'] = _get_user_calendar_url(request)
return render(request, 'dates/about-calendar-url.html', data)
@login_required
def duplicate_report(request):
data = {
'filter_errors': None,
}
if request.method == 'POST':
raise NotImplementedError
else:
form = forms.DuplicateReportFilterForm(date_format='%d %B %Y',
data=request.GET)
user = request.user
filter_ = dict(user=user)
if form.is_valid():
if form.cleaned_data['user']:
user = form.cleaned_data['user']
if user != request.user:
if not (request.user.is_superuser
or request.user.is_staff):
if user != request.user:
return http.HttpResponse(
"Only available for admins")
filter_['user'] = user
if form.cleaned_data['since']:
filter_['start__gte'] = form.cleaned_data['since']
data['since'] = form.cleaned_data['since']
else:
data['filter_errors'] = form.errors
data['first_date'] = (Entry.objects
.filter(user=user)
.aggregate(Min('start'))
['start__min'])
start_dates = (Entry.objects
.filter(**filter_)
.values("start")
.annotate(Count("start"))
.order_by('-start__count'))
groups = []
for each in start_dates:
if each['start__count'] <= 1:
break
entries = Entry.objects.filter(user=user, start=each['start'])
details = [x.details for x in entries]
note = "Probably not a mistake"
if len(set(details)) == 1:
note = ("Probably a duplicate! "
"The details are the same for each entry")
else:
note = "Possibly not a duplicate since the details different"
groups.append((entries, note))
data['groups'] = groups
if 'since' not in data:
data['since'] = data['first_date']
return render(request, 'dates/duplicate-report.html', data)<|fim▁end|> | event.add('summary').value = '%s Vacation' % make_entry_title(entry, user, |
<|file_name|>lottery_v3.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
""" Example program for JDEV Mercurial tutorial """
from optparse import OptionParser
def calculate_result(white_balls, power_ball):
""" Computation is lauched here """
for ball in white_balls:
if ball < 1 or ball > 59:
return -1
if power_ball < 1 or power_ball > 39:
return -1
return 0
def main():
""" Program used to compute the integer percent of chance
of winning at the lottery.
Five white balls and a power ball are drawn"""
usage = "Usage: %prog power_ball (5 white balls)"
parser = OptionParser(usage)
<|fim▁hole|>
power_ball = int(args[0])
white_balls = [int(arg) for arg in args[1:]]
result = calculate_result(white_balls, power_ball)
print "White balls : %s" % white_balls
print "Chance ball : %s" % power_ball
print "%d percent chance of winning" % result
return 0
if __name__ == "__main__":
main()<|fim▁end|> | (_, args) = parser.parse_args()
if len(args) != 6:
parser.error("incorrect number of arguments") |
<|file_name|>vec-matching-legal-tail-element-borrow.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let x = &[1i, 2, 3, 4, 5];
let x: &[int] = &[1, 2, 3, 4, 5];
if !x.is_empty() {
let el = match x {
[1, ..ref tail] => &tail[0],
_ => unreachable!()
};
println!("{}", *el);<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>snmp_mib_remote.py<|end_file_name|><|fim▁begin|>from ..remote import RemoteModel
class SnmpMibRemote(RemoteModel):
"""
| ``id:`` none
| ``attribute type:`` string
| ``mib:`` none<|fim▁hole|> | ``attribute type:`` string
| ``version:`` none
| ``attribute type:`` string
| ``source:`` none
| ``attribute type:`` string
| ``vendor:`` none
| ``attribute type:`` string
"""
properties = ("id",
"mib",
"version",
"source",
"vendor",
)<|fim▁end|> | |
<|file_name|>MigrationRunner.java<|end_file_name|><|fim▁begin|>package com.github.mmonkey.Automator.Migrations;
import com.github.mmonkey.Automator.Automator;
<|fim▁hole|> protected Automator plugin;
protected int version;
protected int latest = 0;
public void run() {
while (this.version != this.latest) {
MigrationInterface migration = this.getMigration(this.version);
if (migration != null) {
migration.up();
this.version++;
}
}
}
abstract MigrationInterface getMigration(int version);
public MigrationRunner(Automator plugin, int version) {
this.plugin = plugin;
this.version = version;
}
}<|fim▁end|> | public abstract class MigrationRunner {
|
<|file_name|>issue-17913.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: too big for the current architecture
#![feature(box_syntax)]
#[cfg(target_pointer_width = "64")]
fn main() {
let n = 0_usize;
let a: Box<_> = box [&n; 0xF000000000000000_usize];
println!("{}", a[0xFFFFFF_usize]);
}
#[cfg(target_pointer_width = "32")]
fn main() {
let n = 0_usize;
let a: Box<_> = box [&n; 0xFFFFFFFF_usize];<|fim▁hole|><|fim▁end|> | println!("{}", a[0xFFFFFF_usize]);
} |
<|file_name|>ami2stomp.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# vim: set expandtab shiftwidth=4:
# http://www.voip-info.org/wiki/view/asterisk+manager+events
import asterisk.manager
import sys,os,time
import simplejson as json
from stompy.simple import Client
import ConfigParser
from sqlobject import *
from handlers.command_handler_factory import CommandHandlerFactory
from handlers.command_constants import Protocol
#sys.stdout = open("/var/log/requests/connector2.log","a")
#sys.stderr = open("/var/log/requests/connector-err2.log","a")
<|fim▁hole|> datefmt='%a, %d %b %Y %H:%M:%S',
filename='/tmp/myapp.log',
filemode='a+')
import fcntl
lockfile = os.path.normpath('/tmp/' + os.path.basename(__file__) + '.lock')
exclusive_lock = open(lockfile, 'w')
try:
fcntl.lockf(exclusive_lock, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print "Another instance is already running, quitting."
time.sleep(1)
sys.exit(-1)
config = ConfigParser.ConfigParser()
config.read('/opt/ucall/etc/config.ini')
stomp_host = config.get('STOMP', 'host')
stomp_username = config.get('STOMP', 'username')
stomp_password = config.get('STOMP', 'password')
print '='*80
print 'Stomp host:', stomp_host
print 'Stomp username:', stomp_username
print 'Stomp password:', stomp_password
print '='*80
ami_host = config.get('AMI', 'host')
ami_username = config.get('AMI', 'username')
ami_password = config.get('AMI', 'password')
print 'AMI host:', ami_host
print 'AMI username:', ami_username
print 'AMI password:', ami_password
print '='*80
sql_dsn = config.get('SQL', 'dsn')
print 'SQL:', sql_dsn
print '='*80
stomp = Client(stomp_host)
stomp.connect(stomp_username, stomp_password)
stomp.agent_channel = 'jms.queue.msg.'
connection = connectionForURI(sql_dsn)
sqlhub.processConnection = connection
manager = asterisk.manager.Manager()
#try:
#try:
manager.connect(ami_host)
manager.login(ami_username, ami_password)
manager.destination = stomp
asteriskProtocolVersion = None
if manager.version == '1.0':
asteriskProtocolVersion = Protocol.ASTERISK_1_0
elif manager.version == '1.1':
asteriskProtocolVersion = Protocol.ASTERISK_1_1
else:
sys.exit()
command_handler = CommandHandlerFactory(asteriskProtocolVersion).create_command_handler()
manager.register_event('Shutdown', command_handler.handle_Shutdown)
manager.register_event('Hangup', command_handler.handle_Hangup)
manager.register_event('Link', command_handler.handle_Link)
manager.register_event('Bridge', command_handler.handle_Bridge)
manager.register_event('Dial', command_handler.handle_Dial)
manager.register_event('Newstate', command_handler.handle_Newstate)
manager.register_event('QueueMemberAdded', command_handler.handle_QueueMemberAdded)
manager.register_event('QueueMemberRemoved', command_handler.handle_QueueMemberRemoved)
manager.register_event('QueueMemberPaused', command_handler.handle_QueueMemberPaused)
manager.register_event('QueueMember', command_handler.handle_QueueMember)
manager.message_loop()
manager.logoff()
#except asterisk.manager.ManagerSocketException, (errno, reason):
# print "Error connecting to the manager: %s" % reason
#except asterisk.manager.ManagerAuthException, reason:
# print "Error logging in to the manager: %s" % reason
#except asterisk.manager.ManagerException, reason:
# print "Error: %s" % reason
#except:
# sys.exit()
#finally:
manager.close()<|fim▁end|> | import logging
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)-8s %(message)s', |
<|file_name|>showCSV.js<|end_file_name|><|fim▁begin|>function processData(allText) {
var allTextLines = allText.split(/\r\n|\n/);
var headers = allTextLines[0].split(',');
var lines = [];
for (var i=1; i<allTextLines.length; i++) {
var data = allTextLines[i].split(',');
if (data.length == headers.length) {
var tarr = [];
for (var j=0; j<headers.length; j++) {
tarr.push(headers[j]+":"+data[j]);
}
lines.push(tarr);
}
}
console.log(lines);
}
csv = "heading1,heading2,heading3,heading4,heading5\nvalue1_1,value2_1,value3_1,value4_1,value5_1\nvalue1_2,value2_2,value3_2,value4_2,value5_2"
$(document).ready(function() {
$.ajax({
type: "POST",
url: "/echo/html/",
dataType: "text",
data: {
html: csv<|fim▁hole|> success: function(data) {
processData(data);
}
});
});<|fim▁end|> | }, |
<|file_name|>CassandraEndpointUserTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.common.nosql.cassandra.dao.model;
import nl.jqno.equalsverifier.EqualsVerifier;
import nl.jqno.equalsverifier.Warning;
import org.junit.Test;<|fim▁hole|> EqualsVerifier.forClass(CassandraEndpointUser.class).suppress(Warning.NONFINAL_FIELDS).verify();
}
}<|fim▁end|> |
public class CassandraEndpointUserTest {
@Test
public void hashCodeEqualsTest(){ |
<|file_name|>Checkbox.stories.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { action } from '@storybook/addon-actions';
import Checkbox from '.';
const onChange = action('onChange');
const defaultProps = {
id: 'id1',
onChange,
};
const intermediate = {
id: 'id2',
onChange,
intermediate: true,
};
const checked = {
id: 'id3',
onChange,
checked: true,
};
const disabled = {
id: 'id4',
onChange,
disabled: true,
};
const withLabel = {
id: 'id5',
onChange,
label: 'Some label',
};
<|fim▁hole|>export default {
title: 'Form/Controls/Checkbox',
};
export const Default = () => (
<div style={{ padding: 30 }}>
<h1>Checkbox</h1>
<h2>Definition</h2>
<p>The Checkbox component is basically a fancy checkbox like you have in your iphone</p>
<h2>Examples</h2>
<form>
<h3>Default Checkbox</h3>
<Checkbox {...defaultProps} />
<h3>
Checkbox with <code>intermediate: true</code>
</h3>
<Checkbox {...intermediate} />
<h3>
Checkbox with <code>checked: true</code>
</h3>
<Checkbox {...checked} />
<h3>
Checkbox with <code>disabled: true</code>
</h3>
<Checkbox {...disabled} />
<h3>
Checkbox with <code>label: Some label</code>
</h3>
<Checkbox {...withLabel} />
</form>
</div>
);<|fim▁end|> | |
<|file_name|>files.js<|end_file_name|><|fim▁begin|>"use strict";
var chokidar = require("chokidar"),
shell = require("shelljs"),
files = {
"./src/pages/schema-edit/parse-schema.js" : "./gen/parse-schema.js"
};
exports.watch = function() {
// Make sure files stay up to date in the /gen folder
chokidar.watch(Object.keys(files)).on("all", function(event, file) {
if(event !== "add" && event !== "change") {
return;
}
file = "./" + file;
shell.cp(file, files[file]);
});
};
<|fim▁hole|> shell.cp(file, files[file]);
});
};<|fim▁end|> | exports.copy = function() {
Object.keys(files).forEach(function(file) { |
<|file_name|>postcount.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|>
投稿件数ユースケース
"""
import niascape
from niascape.repository import postcount
from niascape.utility.database import get_db
def day(option: dict) -> list:
with get_db(niascape.ini['database']) as db: # type: ignore # XXX セクションぶっこむとmypyさんにおこられ 辞書化すべきか
return postcount.day(db, **option)
def month(option: dict) -> list:
with get_db(niascape.ini['database']) as db: # type: ignore # XXX セクションぶっこむとmypyさんにおこられ 辞書化すべきか
return postcount.month(db, **option)
def hour(option: dict) -> list:
with get_db(niascape.ini['database']) as db: # type: ignore # XXX セクションぶっこむとmypyさんにおこられ 辞書化すべきか
return postcount.hour(db, **option)
def week(option: dict) -> list:
with get_db(niascape.ini['database']) as db: # type: ignore # XXX セクションぶっこむとmypyさんにおこられ 辞書化すべきか
return postcount.week(db, **option)
def tag(option: dict) -> list:
with get_db(niascape.ini['database']) as db: # type: ignore # XXX セクションぶっこむとmypyさんにおこられ 辞書化すべきか
return postcount.tag(db, **option)<|fim▁end|> | niascape.usecase.postcount |
<|file_name|>AccountApplyPaymentActionForm.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*<|fim▁hole|> *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.accounts.struts.actionforms;
import static org.mifos.framework.util.helpers.DateUtils.dateFallsBeforeDate;
import static org.mifos.framework.util.helpers.DateUtils.getDateAsSentFromBrowser;
import java.sql.Date;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.commons.lang.StringUtils;
import org.apache.struts.Globals;
import org.apache.struts.action.ActionErrors;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.joda.time.LocalDate;
import org.mifos.accounts.servicefacade.AccountTypeDto;
import org.mifos.accounts.util.helpers.AccountConstants;
import org.mifos.application.admin.servicefacade.InvalidDateException;
import org.mifos.application.master.business.MifosCurrency;
import org.mifos.config.AccountingRules;
import org.mifos.framework.business.util.helpers.MethodNameConstants;
import org.mifos.framework.struts.actionforms.BaseActionForm;
import org.mifos.framework.util.helpers.Constants;
import org.mifos.framework.util.helpers.DateUtils;
import org.mifos.framework.util.helpers.DoubleConversionResult;
import org.mifos.framework.util.helpers.SessionUtils;
import org.mifos.security.login.util.helpers.LoginConstants;
import org.mifos.security.util.ActivityMapper;
import org.mifos.security.util.UserContext;
public class AccountApplyPaymentActionForm extends BaseActionForm {
private String input;
private String transactionDateDD;
private String transactionDateMM;
private String transactionDateYY;
private String amount;
private Short currencyId;
private String receiptId;
private String receiptDateDD;
private String receiptDateMM;
private String receiptDateYY;
/*
* Among other things, this field holds the PaymentTypes value for disbursements.
*/
private String paymentTypeId;
private String waiverInterest;
private String globalAccountNum;
private String accountId;
private String prdOfferingName;
private boolean amountCannotBeZero = true;
private java.util.Date lastPaymentDate;
private String accountForTransfer;
private Short transferPaymentTypeId;
public boolean amountCannotBeZero() {
return this.amountCannotBeZero;
}
public void setAmountCannotBeZero(boolean amountCannotBeZero) {
this.amountCannotBeZero = amountCannotBeZero;
}
public String getPrdOfferingName() {
return prdOfferingName;
}
public void setPrdOfferingName(String prdOfferingName) {
this.prdOfferingName = prdOfferingName;
}
public String getAmount() {
return amount;
}
public void setAmount(String amount) {
this.amount = amount;
}
public String getInput() {
return input;
}
@Override
public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) {
String methodCalled = request.getParameter(MethodNameConstants.METHOD);
ActionErrors errors = new ActionErrors();
if (methodCalled != null && methodCalled.equals("preview")) {
validateTransfer(errors);
validateTransactionDate(errors);
validatePaymentType(errors);
validateReceiptDate(errors);
String accountType = (String) request.getSession().getAttribute(Constants.ACCOUNT_TYPE);
validateAccountType(errors, accountType);
validateAmount(errors);
validateModeOfPaymentSecurity(request, errors);
}
if (!errors.isEmpty()) {
request.setAttribute(Globals.ERROR_KEY, errors);
request.setAttribute("methodCalled", methodCalled);
}
return errors;
}
private void validateModeOfPaymentSecurity(HttpServletRequest request, ActionErrors errors){
UserContext userContext = (UserContext) SessionUtils.getAttribute(Constants.USER_CONTEXT_KEY, request.getSession());
if(getPaymentTypeId().equals("4") && !ActivityMapper.getInstance().isModeOfPaymentSecurity(userContext)){
errors.add(AccountConstants.LOAN_TRANSFER_PERMISSION, new ActionMessage(AccountConstants.LOAN_TRANSFER_PERMISSION,
getLocalizedMessage("accounts.mode_of_payment_permission")));
}
}
private void validateTransfer(ActionErrors errors) {
if (paymentTypeId.equals(String.valueOf(transferPaymentTypeId))
&& StringUtils.isBlank(accountForTransfer)) {
errors.add(AccountConstants.NO_ACCOUNT_FOR_TRANSFER, new ActionMessage(AccountConstants.NO_ACCOUNT_FOR_TRANSFER));
}
}
private void validateAccountType(ActionErrors errors, String accountType) {
if (accountType != null && accountType.equals(AccountTypeDto.LOAN_ACCOUNT.name())) {
if (getAmount() == null || getAmount().equals("")) {
errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY,
getLocalizedMessage("accounts.amt")));
}
}
}
private void validatePaymentType(ActionErrors errors) {
if (StringUtils.isEmpty(getPaymentTypeId())) {
errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY,
getLocalizedMessage("accounts.mode_of_payment")));
}
}
private void validateReceiptDate(ActionErrors errors) {
if (getReceiptDate() != null && !getReceiptDate().equals("")) {
ActionErrors validationErrors = validateDate(getReceiptDate(), getLocalizedMessage("accounts.receiptdate"));
if (null != validationErrors && !validationErrors.isEmpty()) {
errors.add(validationErrors);
}
}
}
private void validateTransactionDate(ActionErrors errors) {
String fieldName = "accounts.date_of_trxn";
ActionErrors validationErrors = validateDate(getTransactionDate(), getLocalizedMessage(fieldName));
if (null != validationErrors && !validationErrors.isEmpty()) {
errors.add(validationErrors);
}
if (null != getTransactionDate()){
validationErrors = validatePaymentDate(getTransactionDate(), getLocalizedMessage(fieldName));
if (validationErrors != null && !validationErrors.isEmpty()) {
errors.add(validationErrors);
}
}
}
//exposed for testing
public ActionErrors validatePaymentDate(String transactionDate, String fieldName) {
ActionErrors errors = null;
try {
if (lastPaymentDate != null && dateFallsBeforeDate(getDateAsSentFromBrowser(transactionDate), lastPaymentDate)) {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_PAYMENT_DATE_BEFORE_LAST_PAYMENT,
new ActionMessage(AccountConstants.ERROR_PAYMENT_DATE_BEFORE_LAST_PAYMENT,
fieldName));
}
} catch (InvalidDateException ide) {
errors = new ActionErrors(); //dont add a message, since it was already added in validateDate()
}
return errors;
}
protected ActionErrors validateDate(String date, String fieldName) {
ActionErrors errors = null;
java.sql.Date sqlDate = null;
if (date != null && !date.equals("")) {
try {
sqlDate = getDateAsSentFromBrowser(date);
if (DateUtils.whichDirection(sqlDate) > 0) {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_FUTUREDATE, new ActionMessage(AccountConstants.ERROR_FUTUREDATE,
fieldName));
}
} catch (InvalidDateException ide) {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_INVALIDDATE, new ActionMessage(AccountConstants.ERROR_INVALIDDATE,
fieldName));
}
} else {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY,
fieldName));
}
return errors;
}
protected Locale getUserLocale(HttpServletRequest request) {
Locale locale = null;
HttpSession session = request.getSession();
if (session != null) {
UserContext userContext = (UserContext) session.getAttribute(LoginConstants.USERCONTEXT);
if (null != userContext) {
locale = userContext.getCurrentLocale();
}
}
return locale;
}
protected void validateAmount(ActionErrors errors) {
MifosCurrency currency = null;
if (getCurrencyId() != null && AccountingRules.isMultiCurrencyEnabled()) {
currency = AccountingRules.getCurrencyByCurrencyId(getCurrencyId());
}
DoubleConversionResult conversionResult = validateAmount(getAmount(), currency , AccountConstants.ACCOUNT_AMOUNT, errors, "");
if (amountCannotBeZero() && conversionResult.getErrors().size() == 0 && !(conversionResult.getDoubleValue() > 0.0)) {
addError(errors, AccountConstants.ACCOUNT_AMOUNT, AccountConstants.ERRORS_MUST_BE_GREATER_THAN_ZERO,
getLocalizedMessage(AccountConstants.ACCOUNT_AMOUNT));
}
}
public void setInput(String input) {
this.input = input;
}
public String getPaymentTypeId() {
return paymentTypeId;
}
public void setPaymentTypeId(String paymentTypeId) {
this.paymentTypeId = paymentTypeId;
}
public String getReceiptDate() {
return compileDateString(receiptDateDD, receiptDateMM, receiptDateYY);
}
public void setReceiptDate(String receiptDate) throws InvalidDateException {
if (StringUtils.isBlank(receiptDate)) {
receiptDateDD = null;
receiptDateMM = null;
receiptDateYY = null;
} else {
Calendar cal = new GregorianCalendar();
java.sql.Date date = getDateAsSentFromBrowser(receiptDate);
cal.setTime(date);
receiptDateDD = Integer.toString(cal.get(Calendar.DAY_OF_MONTH));
receiptDateMM = Integer.toString(cal.get(Calendar.MONTH) + 1);
receiptDateYY = Integer.toString(cal.get(Calendar.YEAR));
}
}
public String getReceiptId() {
return receiptId;
}
public void setReceiptId(String receiptId) {
this.receiptId = receiptId;
}
public String getTransactionDate() {
return compileDateString(transactionDateDD, transactionDateMM, transactionDateYY);
}
public void setTransactionDate(String receiptDate) throws InvalidDateException {
if (StringUtils.isBlank(receiptDate)) {
transactionDateDD = null;
transactionDateMM = null;
transactionDateYY = null;
} else {
Calendar cal = new GregorianCalendar();
java.sql.Date date = getDateAsSentFromBrowser(receiptDate);
cal.setTime(date);
transactionDateDD = Integer.toString(cal.get(Calendar.DAY_OF_MONTH));
transactionDateMM = Integer.toString(cal.get(Calendar.MONTH) + 1);
transactionDateYY = Integer.toString(cal.get(Calendar.YEAR));
}
}
public String getAccountId() {
return accountId;
}
public void setAccountId(String accountId) {
this.accountId = accountId;
}
public String getGlobalAccountNum() {
return globalAccountNum;
}
public void setGlobalAccountNum(String globalAccountNum) {
this.globalAccountNum = globalAccountNum;
}
protected void clear() throws InvalidDateException {
this.amount = null;
this.paymentTypeId = null;
setReceiptDate(null);
this.receiptId = null;
}
public String getReceiptDateDD() {
return receiptDateDD;
}
public void setReceiptDateDD(String receiptDateDD) {
this.receiptDateDD = receiptDateDD;
}
public String getReceiptDateMM() {
return receiptDateMM;
}
public void setReceiptDateMM(String receiptDateMM) {
this.receiptDateMM = receiptDateMM;
}
public String getReceiptDateYY() {
return receiptDateYY;
}
public void setReceiptDateYY(String receiptDateYY) {
this.receiptDateYY = receiptDateYY;
}
public String getTransactionDateDD() {
return transactionDateDD;
}
public void setTransactionDateDD(String transactionDateDD) {
this.transactionDateDD = transactionDateDD;
}
public String getTransactionDateMM() {
return transactionDateMM;
}
public void setTransactionDateMM(String transactionDateMM) {
this.transactionDateMM = transactionDateMM;
}
public String getTransactionDateYY() {
return transactionDateYY;
}
public void setTransactionDateYY(String transactionDateYY) {
this.transactionDateYY = transactionDateYY;
}
public Short getCurrencyId() {
return this.currencyId;
}
public void setCurrencyId(Short currencyId) {
this.currencyId = currencyId;
}
public String getWaiverInterest() {
return waiverInterest;
}
public void setWaiverInterest(String waiverInterest) {
this.waiverInterest = waiverInterest;
}
public LocalDate getReceiptDateAsLocalDate() throws InvalidDateException {
Date receiptDateStr = getDateAsSentFromBrowser(getReceiptDate());
return (receiptDateStr != null) ? new LocalDate(receiptDateStr.getTime()) : null;
}
public LocalDate getTrxnDateAsLocalDate() throws InvalidDateException {
return new LocalDate(getTrxnDate().getTime());
}
public Date getTrxnDate() throws InvalidDateException {
return getDateAsSentFromBrowser(getTransactionDate());
}
public void setLastPaymentDate(java.util.Date lastPaymentDate) {
this.lastPaymentDate = lastPaymentDate;
}
public String getAccountForTransfer() {
return accountForTransfer;
}
public void setAccountForTransfer(String accountForTransfer) {
this.accountForTransfer = accountForTransfer;
}
public Short getTransferPaymentTypeId() {
return transferPaymentTypeId;
}
public void setTransferPaymentTypeId(Short transferPaymentTypeId) {
this.transferPaymentTypeId = transferPaymentTypeId;
}
}<|fim▁end|> | * http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>base_graph.py<|end_file_name|><|fim▁begin|>import tensorflow as tf
class BaseGraph:
"""
A generic computation graph class, with only two constraints are
1. The requirements for net instance passed in:
- net should have method of define_net
- net should have method of get_placeholders
2. Losses can only be calculated with x_pl and y_pl (considering moving it out to network module)
"""
graph = None
init_graph = None
losses = {}
optimizers = {}
predictions = None
loss = None
optimizer = None
def __init__(self, net, config, logger):
self.learning_rate = config.LEARNING_RATE
self.momentum = config.MOMENTUM
self.loss_type = config.LOSS_TYPE
self.optimizer_type = config.OPTIMIZER_TYPE
self.config = config
self.logger = logger
self.net = net
self._define_graph()
def _define_graph(self):
self.logger.info('[Graph] Constructing graph now...')
self.graph = tf.Graph()
with self.graph.as_default():
self.predictions = self.net.define_net()
self.x_pl, self.y_pl, self.is_training_pl = self.net.get_placeholders()
<|fim▁hole|> tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.y_pl, logits=self.predictions))
except ValueError as e:
self.logger.warn(e)
self.loss = self.losses[self.loss_type]
tf.summary.scalar("loss", self.loss)
tf.summary.scalar("lr", self.learning_rate)
# Optimizer
self.optimizers['momentum'] = tf.train.MomentumOptimizer(learning_rate=self.learning_rate,
momentum=self.momentum,
use_nesterov=True).minimize(self.loss)
self.optimizers['adam'] = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss)
self.optimizer = self.optimizers[self.optimizer_type]
# Init nodes initializer
self.init_graph = tf.global_variables_initializer()
self.logger.info('[Graph] Graph constructed!')
def get_placeholders(self):
return self.x_pl, self.y_pl, self.is_training_pl
def get_graph(self):
return self.graph, self.init_graph
def get_optimizer(self):
return self.optimizer<|fim▁end|> | # Loss
try:
self.losses['mse'] = tf.reduce_mean(tf.square(self.predictions - self.y_pl))
self.losses['cross_entropy'] = tf.reduce_mean( |
<|file_name|>matrix_updates.rs<|end_file_name|><|fim▁begin|>#![doc="Implementation of matrix updates
"]
/// std imports
// external imports
use num::traits::Num;
/// local imports
use algebra::structure::{MagmaBase};
use error::SRResult;
use error::SRError;
use matrix::traits::{Shape, MatrixBuffer, Strided};
use matrix::update::traits::{InPlaceUpdates, CopyUpdates};
use matrix::eo::eo_traits::{ERO, ECO};
use matrix::matrix::Matrix;
/// Implementation of Matrix general update operations.
impl<T:MagmaBase + Num> InPlaceUpdates<T> for Matrix<T> {
fn add_scalar(&mut self, rhs: T) -> &mut Matrix<T> {
let rows = self.num_rows();
let cols = self.num_cols();
let pa = self.as_mut_ptr();
let stride = self.stride() as isize;
let mut offset = self.start_offset();
for _ in 0..cols{
for r in 0..rows as isize{
unsafe {
let v = *pa.offset(offset + r);
*pa.offset(offset + r) = v + rhs;
}
}
offset += stride;
}
self
}
fn mul_scalar(&mut self, rhs: T) -> &mut Matrix<T> {
let rows = self.num_rows();
let cols = self.num_cols();
let pa = self.as_mut_ptr();
let stride = self.stride() as isize;
let mut offset = self.start_offset();
for _ in 0..cols{
for r in 0..rows as isize{
unsafe {
let v = *pa.offset(offset + r);
*pa.offset(offset + r) = v * rhs;
}<|fim▁hole|> offset += stride;
}
self
}
fn div_scalar(&mut self, rhs: T) -> SRResult<&mut Matrix<T>> {
if rhs.is_zero(){
return Err(SRError::DivideByZero);
}
let rows = self.num_rows();
let cols = self.num_cols();
let pa = self.as_mut_ptr();
let stride = self.stride() as isize;
let mut offset = self.start_offset();
for _ in 0..cols{
for r in 0..rows as isize{
unsafe {
let v = *pa.offset(offset + r);
*pa.offset(offset + r) = v / rhs;
}
}
offset += stride;
}
Ok(self)
}
fn scale_row_lt(&mut self, r : usize, scale_factor : T)-> &mut Matrix<T>{
debug_assert!(r < self.num_rows());
let stride = self.stride() as isize;
let ptr = self.as_mut_ptr();
let mut offset = self.cell_to_offset(r, 0);
for _ in 0..(r + 1){
debug_assert!(offset < self.capacity() as isize);
unsafe{
let v = *ptr.offset(offset);
*ptr.offset(offset) = scale_factor * v;
offset += stride;
}
}
self
}
fn scale_col_lt(&mut self, c : usize, scale_factor : T)-> &mut Matrix<T>{
debug_assert!(c < self.num_cols());
let ptr = self.as_mut_ptr();
let mut offset = self.cell_to_offset(c, c);
for _ in c..self.num_cols(){
unsafe{
let v = *ptr.offset(offset);
*ptr.offset(offset) = scale_factor * v;
offset += 1;
}
}
self
}
fn scale_row_ut(&mut self, r : usize, scale_factor : T)-> &mut Matrix<T>{
debug_assert!(r < self.num_rows());
let stride = self.stride() as isize;
let ptr = self.as_mut_ptr();
let mut offset = self.cell_to_offset(r, r);
for _ in r..self.num_cols(){
unsafe{
let v = *ptr.offset(offset);
*ptr.offset(offset) = scale_factor * v;
offset += stride;
}
}
self
}
fn scale_col_ut(&mut self, c : usize, scale_factor : T)-> &mut Matrix<T>{
debug_assert!(c < self.num_cols());
let ptr = self.as_mut_ptr();
let mut offset = self.cell_to_offset(0, c);
for _ in 0..(c + 1){
unsafe{
let v = *ptr.offset(offset);
*ptr.offset(offset) = scale_factor * v;
offset += 1;
}
}
self
}
fn scale_rows(&mut self, scale_factors : &Matrix<T>)-> &mut Matrix<T>{
assert!(scale_factors.is_col());
assert_eq!(scale_factors.num_cells(), self.num_rows());
for r in 0..self.num_rows(){
let factor = scale_factors[r];
self.ero_scale(r, factor);
}
self
}
fn scale_cols(&mut self, scale_factors : &Matrix<T>)-> &mut Matrix<T>{
assert!(scale_factors.is_col());
assert_eq!(scale_factors.num_cells(), self.num_cols());
for c in 0..self.num_cols(){
let factor = scale_factors[c];
self.eco_scale(c, factor);
}
self
}
/// Subtract a vector from each column
fn sub_vec_from_cols(&mut self, vec: &Matrix<T>)->SRResult<()>{
if ! vec.is_col() {
return Err(SRError::IsNotAColVector);
}
let rows = self.num_rows();
if vec.num_rows() != rows {
return Err(SRError::RowsMismatch);
}
let cols = self.num_cols();
let pd = self.as_mut_ptr();
let ps = vec.as_ptr();
let stride = self.stride() as isize;
let mut offset = self.start_offset();
for _ in 0..cols{
for r in 0..rows as isize{
unsafe{
let v1 = *pd.offset(offset + r);
let v2 = *ps.offset(r);
*pd.offset(offset + r) = v1 - v2;
}
}
offset += stride;
}
Ok(())
}
/// Subtract a vector from each row
fn sub_vec_from_rows(&mut self, vec: &Matrix<T>)->SRResult<()>{
if ! vec.is_row() {
return Err(SRError::IsNotARowVector);
}
let cols = self.num_cols();
if vec.num_cols() != cols {
return Err(SRError::ColsMismatch);
}
let rows = self.num_rows();
let pd = self.as_mut_ptr();
let ps = vec.as_ptr();
let stride = self.stride() as isize;
let mut offset = self.start_offset();
for c in 0..cols{
let v2 = unsafe{*ps.offset(c as isize)};
for r in 0..rows as isize{
unsafe{
let v1 = *pd.offset(offset + r);
*pd.offset(offset + r) = v1 - v2;
}
}
offset += stride;
}
Ok(())
}
/// Subtract a vector from each column
fn add_vec_to_cols(&mut self, vec: &Matrix<T>)->SRResult<()>{
if ! vec.is_col() {
return Err(SRError::IsNotAColVector);
}
let rows = self.num_rows();
if vec.num_rows() != rows {
return Err(SRError::RowsMismatch);
}
let cols = self.num_cols();
let pd = self.as_mut_ptr();
let ps = vec.as_ptr();
let stride = self.stride() as isize;
let mut offset = self.start_offset();
for _ in 0..cols{
for r in 0..rows as isize{
unsafe{
let v1 = *pd.offset(offset + r);
let v2 = *ps.offset(r);
*pd.offset(offset + r) = v1 + v2;
}
}
offset += stride;
}
Ok(())
}
/// Subtract a vector from each row
fn add_vec_to_rows(&mut self, vec: &Matrix<T>)->SRResult<()>{
if ! vec.is_row() {
return Err(SRError::IsNotARowVector);
}
let cols = self.num_cols();
if vec.num_cols() != cols {
return Err(SRError::ColsMismatch);
}
let rows = self.num_rows();
let pd = self.as_mut_ptr();
let ps = vec.as_ptr();
let stride = self.stride() as isize;
let mut offset = self.start_offset();
for c in 0..cols{
let v2 = unsafe{*ps.offset(c as isize)};
for r in 0..rows as isize{
unsafe{
let v1 = *pd.offset(offset + r);
*pd.offset(offset + r) = v1 + v2;
}
}
offset += stride;
}
Ok(())
}
/// Subtract a vector from each column
fn mul_vec_to_cols(&mut self, vec: &Matrix<T>)->SRResult<()>{
if ! vec.is_col() {
return Err(SRError::IsNotAColVector);
}
let rows = self.num_rows();
if vec.num_rows() != rows {
return Err(SRError::RowsMismatch);
}
let cols = self.num_cols();
let pd = self.as_mut_ptr();
let ps = vec.as_ptr();
let stride = self.stride() as isize;
let mut offset = self.start_offset();
for _ in 0..cols{
for r in 0..rows as isize{
unsafe{
let v1 = *pd.offset(offset + r);
let v2 = *ps.offset(r);
*pd.offset(offset + r) = v1 * v2;
}
}
offset += stride;
}
Ok(())
}
/// Subtract a vector from each row
fn mul_vec_to_rows(&mut self, vec: &Matrix<T>)->SRResult<()>{
if ! vec.is_row() {
return Err(SRError::IsNotARowVector);
}
let cols = self.num_cols();
if vec.num_cols() != cols {
return Err(SRError::ColsMismatch);
}
let rows = self.num_rows();
let pd = self.as_mut_ptr();
let ps = vec.as_ptr();
let stride = self.stride() as isize;
let mut offset = self.start_offset();
for c in 0..cols{
let v2 = unsafe{*ps.offset(c as isize)};
for r in 0..rows as isize{
unsafe{
let v1 = *pd.offset(offset + r);
*pd.offset(offset + r) = v1 * v2;
}
}
offset += stride;
}
Ok(())
}
fn ut_to_lt(&mut self)->&mut Matrix<T>{
let p = self.smaller_dim();
let ptr = self.as_mut_ptr();
let stride = self.stride() as isize;
let mut psrc_col = ptr;
unsafe{
for c in 1..p{
psrc_col = psrc_col.offset(stride);
let mut pdst_row = ptr.offset(c as isize);
for r in 0..c{
*pdst_row = *psrc_col.offset(r as isize);
pdst_row = pdst_row.offset(stride);
}
}
}
self
}
}
/// Implementation of Matrix general copy and update operations.
/// TODO Optimize implementations.
impl<T:MagmaBase + Num> CopyUpdates<T> for Matrix<T> {
/// Add the matrix by a scalar
/// Returns a new matrix
fn copy_add_scalar(&self, rhs: T) -> Matrix<T> {
let rows = self.num_rows();
let cols = self.num_cols();
let mut result : Matrix<T> = Matrix::new(rows, cols);
let pa = self.as_ptr();
let pc = result.as_mut_ptr();
for r in 0..rows{
for c in 0..cols{
let offset = self.cell_to_offset(r, c);
unsafe {
*pc.offset(offset) = *pa.offset(offset) + rhs;
}
}
}
result
}
/// Multiply the matrix by a scalar
/// Returns a new matrix
fn copy_mul_scalar(&self, rhs: T) -> Matrix<T> {
let rows = self.num_rows();
let cols = self.num_cols();
let mut result : Matrix<T> = Matrix::new(rows, cols);
let pa = self.as_ptr();
let pc = result.as_mut_ptr();
for r in 0..rows{
for c in 0..cols{
let offset = self.cell_to_offset(r, c);
unsafe {
*pc.offset(offset) = *pa.offset(offset) * rhs;
}
}
}
result
}
/// Divide the matrix by a scalar
/// Returns a new matrix
fn copy_div_scalar(&self, rhs: T) -> Matrix<T> {
let rows = self.num_rows();
let cols = self.num_cols();
let mut result : Matrix<T> = Matrix::new(rows, cols);
let pa = self.as_ptr();
let pc = result.as_mut_ptr();
for r in 0..rows{
for c in 0..cols{
let offset = self.cell_to_offset(r, c);
unsafe {
*pc.offset(offset) = *pa.offset(offset) / rhs;
}
}
}
result
}
/// Subtract a vector from each column
fn copy_sub_vec_from_cols(&self, vec: &Matrix<T>)->SRResult<Matrix<T>>{
let mut m = self.clone();
let result = m.sub_vec_from_cols(vec);
match result {
Err(code) => Err(code),
Ok(_) => Ok(m)
}
}
/// Subtract a vector from each row
fn copy_sub_vec_from_rows(&self, vec: &Matrix<T>)->SRResult<Matrix<T>>{
let mut m = self.clone();
let result = m.sub_vec_from_rows(vec);
match result {
Err(code) => Err(code),
Ok(_) => Ok(m)
}
}
/// Subtract a vector from each column
fn copy_add_vec_to_cols(&self, vec: &Matrix<T>)->SRResult<Matrix<T>>{
let mut m = self.clone();
let result = m.add_vec_to_cols(vec);
match result {
Err(code) => Err(code),
Ok(_) => Ok(m)
}
}
/// Subtract a vector from each row
fn copy_add_vec_to_rows(&self, vec: &Matrix<T>)->SRResult<Matrix<T>>{
let mut m = self.clone();
let result = m.add_vec_to_rows(vec);
match result {
Err(code) => Err(code),
Ok(_) => Ok(m)
}
}
/// Subtract a vector from each column
fn copy_mul_vec_to_cols(&self, vec: &Matrix<T>)->SRResult<Matrix<T>>{
let mut m = self.clone();
let result = m.mul_vec_to_cols(vec);
match result {
Err(code) => Err(code),
Ok(_) => Ok(m)
}
}
/// Subtract a vector from each row
fn copy_mul_vec_to_rows(&self, vec: &Matrix<T>)->SRResult<Matrix<T>>{
let mut m = self.clone();
let result = m.mul_vec_to_rows(vec);
match result {
Err(code) => Err(code),
Ok(_) => Ok(m)
}
}
}
/******************************************************
*
* Unit tests
*
*******************************************************/
#[cfg(test)]
mod test{
use api::*;
#[test]
fn test_set_diag(){
let mut m = from_range_rw_f64(4, 4, 1., 100.);
m.set_diagonal(0.);
let m2 = matrix_rw_f64(4, 4, &[
0., 2., 3., 4.,
5., 0., 7., 8.,
9., 10., 0., 12.,
13., 14., 15., 0.,
]);
assert_eq!(m, m2);
m.set_row(0, 1.);
let m2 = matrix_rw_f64(4, 4, &[
1., 1., 1., 1.,
5., 0., 7., 8.,
9., 10., 0., 12.,
13., 14., 15., 0.,
]);
assert_eq!(m, m2);
m.set_col(2, 20.);
let m2 = matrix_rw_f64(4, 4, &[
1., 1., 20., 1.,
5., 0., 20., 8.,
9., 10., 20., 12.,
13., 14., 20., 0.,
]);
assert_eq!(m, m2);
m.set_block(2, 2, 2, 2, 30.);
let m2 = matrix_rw_f64(4, 4, &[
1., 1., 20., 1.,
5., 0., 20., 8.,
9., 10., 30., 30.,
13., 14., 30., 30.,
]);
assert_eq!(m, m2);
}
#[test]
fn test_scale_row_lt(){
let mut m = matrix_rw_i64(3, 3, &[
1, 2, 3,
4, 5, 6,
7, 8, 9
]);
let m2 = matrix_rw_i64(3, 3, &[
1, 2, 3,
4, 5, 6,
21, 24, 27
]);
m.scale_row_lt(2, 3);
assert_eq!(m, m2);
}
#[test]
fn test_scale_col_lt(){
let mut m = matrix_rw_i64(3, 3, &[
1, 2, 3,
4, 5, 6,
7, 8, 9
]);
let m2 = matrix_rw_i64(3, 3, &[
1, 2, 3,
4, 5, 6,
7, 8, 27
]);
m.scale_col_lt(2, 3);
assert_eq!(m, m2);
let m2 = matrix_rw_i64(3, 3, &[
1, 2, 3,
4, 10, 6,
7, 16, 27
]);
m.scale_col_lt(1, 2);
assert_eq!(m, m2);
}
#[test]
fn test_scale_row_ut(){
let mut m = matrix_rw_i64(3, 3, &[
1, 2, 3,
4, 5, 6,
7, 8, 9
]);
let m2 = matrix_rw_i64(3, 3, &[
1, 2, 3,
4, 5, 6,
7, 8, 27
]);
m.scale_row_ut(2, 3);
assert_eq!(m, m2);
m.scale_row_ut(1, 2);
let m2 = matrix_rw_i64(3, 3, &[
1, 2, 3,
4, 10, 12,
7, 8, 27
]);
assert_eq!(m, m2);
}
#[test]
fn test_scale_col_ut(){
let mut m = matrix_rw_i64(3, 3, &[
1, 2, 3,
4, 5, 6,
7, 8, 9
]);
let m2 = matrix_rw_i64(3, 3, &[
1, 2, 9,
4, 5, 18,
7, 8, 27
]);
m.scale_col_ut(2, 3);
assert_eq!(m, m2);
let m2 = matrix_rw_i64(3, 3, &[
1, 4, 9,
4, 10, 18,
7, 8, 27
]);
m.scale_col_ut(1, 2);
assert_eq!(m, m2);
}
#[test]
fn test_scale_rows(){
let mut m = matrix_rw_i64(4, 3, &[
1, 2, 3,
4, 5, 6,
7, 8, 9,
10, 11, 12
]);
let factors = vector_i64(&[1, 2, 3, 4]);
m.scale_rows(&factors);
let m2 = matrix_rw_i64(4, 3, &[
1, 2, 3,
8, 10, 12,
21, 24, 27,
40, 44, 48
]);
assert_eq!(m, m2);
}
#[test]
fn test_scale_cols(){
let mut m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let factors = vector_i64(&[1, 2, 3]);
m.scale_cols(&factors);
let m2 = matrix_rw_i64(2, 3, &[
1, 4, 9,
4, 10, 18,
]);
assert_eq!(m, m2);
}
#[test]
fn test_add_scalar (){
let m : MatrixI64 = Matrix::from_iter_cw(2, 2, (0..4));
let mut m2 = m.copy_add_scalar(2);
assert_eq!(m2.to_std_vec(), vec![2, 3, 4, 5]);
m2.add_scalar(3);
assert_eq!(m2.to_std_vec(), vec![5, 6, 7, 8]);
}
#[test]
fn test_mul_scalar (){
let m : MatrixI64 = Matrix::from_iter_cw(2, 2, (0..4));
let mut m2 = m.copy_mul_scalar(2);
assert_eq!(m2.to_std_vec(), vec![0, 2, 4, 6]);
m2.mul_scalar(3);
assert_eq!(m2.to_std_vec(), vec![0, 6, 12, 18]);
}
#[test]
fn test_div_scalar (){
let m : MatrixI64 = Matrix::from_iter_cw(2, 2, (0..4).map(|x| x * 3));
let mut m2 = m.copy_div_scalar(3);
assert_eq!(m2.to_std_vec(), vec![0, 1, 2, 3]);
m2.mul_scalar(3);
m2.div_scalar(3).unwrap();
assert_eq!(m2.to_std_vec(), vec![0, 1, 2, 3]);
}
#[test]
fn test_sub_vec_from_cols(){
let mut m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[1, 2]);
assert!(m.sub_vec_from_cols(&v).is_ok());
let m2 = matrix_rw_i64(2, 3, &[
0, 1, 2,
2, 3, 4,
]);
assert_eq!(m, m2);
}
#[test]
fn test_copy_sub_vec_from_cols(){
let m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[1, 2]);
let m = m.copy_sub_vec_from_cols(&v).unwrap();
let m2 = matrix_rw_i64(2, 3, &[
0, 1, 2,
2, 3, 4,
]);
assert_eq!(m, m2);
}
#[test]
fn test_sub_vec_from_rows(){
let mut m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[-1, -2, 3]);
assert!(m.sub_vec_from_rows(&v.transpose()).is_ok());
let m2 = matrix_rw_i64(2, 3, &[
2, 4, 0,
5, 7, 3,
]);
assert_eq!(m, m2);
}
#[test]
fn test_copy_sub_vec_from_rows(){
let m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[-1, -2, 3]);
let m = m.copy_sub_vec_from_rows(&v.transpose()).unwrap();
let m2 = matrix_rw_i64(2, 3, &[
2, 4, 0,
5, 7, 3,
]);
assert_eq!(m, m2);
}
#[test]
fn test_add_vec_to_cols(){
let mut m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[-1, -2]);
assert!(m.add_vec_to_cols(&v).is_ok());
let m2 = matrix_rw_i64(2, 3, &[
0, 1, 2,
2, 3, 4,
]);
assert_eq!(m, m2);
}
#[test]
fn test_copy_add_vec_to_cols(){
let m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[-1, -2]);
let m = m.copy_add_vec_to_cols(&v).unwrap();
let m2 = matrix_rw_i64(2, 3, &[
0, 1, 2,
2, 3, 4,
]);
assert_eq!(m, m2);
}
#[test]
fn test_add_vec_to_rows(){
let mut m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[-1, -2, 3]);
assert!(m.add_vec_to_rows(&v.transpose()).is_ok());
let m2 = matrix_rw_i64(2, 3, &[
0, 0, 6,
3, 3, 9,
]);
assert_eq!(m, m2);
}
#[test]
fn test_copy_add_vec_to_rows(){
let m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[-1, -2, 3]);
let m = m.copy_add_vec_to_rows(&v.transpose()).unwrap();
let m2 = matrix_rw_i64(2, 3, &[
0, 0, 6,
3, 3, 9,
]);
assert_eq!(m, m2);
}
#[test]
fn test_mul_vec_to_cols(){
let mut m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[-1, -2]);
assert!(m.mul_vec_to_cols(&v).is_ok());
let m2 = matrix_rw_i64(2, 3, &[
-1, -2, -3,
-8, -10, -12,
]);
assert_eq!(m, m2);
}
#[test]
fn test_copy_mul_vec_to_cols(){
let m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[-1, -2]);
let m = m.copy_mul_vec_to_cols(&v).unwrap();
let m2 = matrix_rw_i64(2, 3, &[
-1, -2, -3,
-8, -10, -12,
]);
assert_eq!(m, m2);
}
#[test]
fn test_mul_vec_to_rows(){
let mut m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[-1, -2, 3]);
assert!(m.mul_vec_to_rows(&v.transpose()).is_ok());
let m2 = matrix_rw_i64(2, 3, &[
-1, -4, 9,
-4, -10, 18
]);
assert_eq!(m, m2);
}
#[test]
fn test_copy_mul_vec_to_rows(){
let m = matrix_rw_i64(2, 3, &[
1, 2, 3,
4, 5, 6,
]);
let v = vector_i64(&[-1, -2, 3]);
let m = m.copy_mul_vec_to_rows(&v.transpose()).unwrap();
let m2 = matrix_rw_i64(2, 3, &[
-1, -4, 9,
-4, -10, 18
]);
assert_eq!(m, m2);
}
#[test]
fn test_ut_to_lt(){
let mut m = matrix_rw_i64(3, 3, &[
1, 2, 3,
0, 4, 5,
0, 0, 6
]);
println!("{}", m);
m.ut_to_lt();
println!("{}", m);
assert!(m.is_symmetric());
}
}
/******************************************************
*
* Bench marks
*
*******************************************************/
#[cfg(test)]
mod bench{
extern crate test;
use self::test::Bencher;
use matrix::constructors::*;
use matrix::traits::*;
#[bench]
fn bench_ut_to_lt(b: &mut Bencher){
let a = hadamard(4096).unwrap();
let mut ut = a.ut_matrix();
b.iter(|| {
ut.ut_to_lt();
});
}
}<|fim▁end|> | } |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013-2014 Will Thames <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import glob
import imp
import ansible.utils
from ansible.playbook.task import Task
import ansible.constants as C
from ansible.module_utils.splitter import split_args
import yaml
from yaml.composer import Composer
from yaml.constructor import Constructor
LINE_NUMBER_KEY = '__line__'
def load_plugins(directory):
result = []
fh = None
for pluginfile in glob.glob(os.path.join(directory, '[A-Za-z]*.py')):
pluginname = os.path.basename(pluginfile.replace('.py', ''))
try:
fh, filename, desc = imp.find_module(pluginname, [directory])
mod = imp.load_module(pluginname, fh, filename, desc)
obj = getattr(mod, pluginname)()
result.append(obj)
finally:
if fh:
fh.close()
return result
def tokenize(line):
result = list()
tokens = line.lstrip().split(" ")
if tokens[0] == '-':
tokens = tokens[1:]
if tokens[0] == 'action:' or tokens[0] == 'local_action:':
tokens = tokens[1:]
command = tokens[0].replace(":", "")
args = list()
kwargs = dict()
for arg in tokens[1:]:
if "=" in arg:
kv = arg.split("=", 1)
kwargs[kv[0]] = kv[1]
else:
args.append(arg)
return (command, args, kwargs)
def _playbook_items(pb_data):
if isinstance(pb_data, dict):
return pb_data.items()
elif not pb_data:
return []
else:
return [item for play in pb_data for item in play.items()]
def find_children(playbook):
if not os.path.exists(playbook[0]):
return []
results = []
basedir = os.path.dirname(playbook[0])
pb_data = ansible.utils.parse_yaml_from_file(playbook[0])
items = _playbook_items(pb_data)
for item in items:
for child in play_children(basedir, item, playbook[1]):
if "$" in child['path'] or "{{" in child['path']:
continue
valid_tokens = list()
for token in split_args(child['path']):
if '=' in token:
break
valid_tokens.append(token)
path = ' '.join(valid_tokens)
results.append({
'path': ansible.utils.path_dwim(basedir, path),
'type': child['type']
})
return results
def play_children(basedir, item, parent_type):
delegate_map = {
'tasks': _taskshandlers_children,
'pre_tasks': _taskshandlers_children,
'post_tasks': _taskshandlers_children,
'include': _include_children,
'roles': _roles_children,
'dependencies': _roles_children,
'handlers': _taskshandlers_children,
}
(k, v) = item
if k in delegate_map:
if v:
return delegate_map[k](basedir, k, v, parent_type)
return []
def _include_children(basedir, k, v, parent_type):
return [{'path': ansible.utils.path_dwim(basedir, v), 'type': parent_type}]
def _taskshandlers_children(basedir, k, v, parent_type):
return [{'path': ansible.utils.path_dwim(basedir, th['include']),
'type': 'tasks'}
for th in v if 'include' in th]
def _roles_children(basedir, k, v, parent_type):
results = []
for role in v:
if isinstance(role, dict):
results.extend(_look_for_role_files(basedir, role['role']))
else:
results.extend(_look_for_role_files(basedir, role))
return results
def _rolepath(basedir, role):
role_path = None
possible_paths = [
# if included from a playbook
ansible.utils.path_dwim(basedir, os.path.join('roles', role)),
ansible.utils.path_dwim(basedir, role),
# if included from roles/[role]/meta/main.yml
ansible.utils.path_dwim(
basedir, os.path.join('..', '..', '..', 'roles', role)
),
ansible.utils.path_dwim(basedir,
os.path.join('..', '..', role))
]
if C.DEFAULT_ROLES_PATH:
search_locations = C.DEFAULT_ROLES_PATH.split(os.pathsep)
for loc in search_locations:
loc = os.path.expanduser(loc)
possible_paths.append(ansible.utils.path_dwim(loc, role))
for path_option in possible_paths:
if os.path.isdir(path_option):
role_path = path_option
break
return role_path
def _look_for_role_files(basedir, role):
role_path = _rolepath(basedir, role)
if not role_path:
return []
results = []
for th in ['tasks', 'handlers', 'meta']:
for ext in ('.yml', '.yaml'):
thpath = os.path.join(role_path, th, 'main' + ext)
if os.path.exists(thpath):
results.append({'path': thpath, 'type': th})
break
return results
def rolename(filepath):
idx = filepath.find('roles/')
if idx < 0:
return ''
role = filepath[idx+6:]
role = role[:role.find('/')]
return role
def _kv_to_dict(v):
(command, args, kwargs) = tokenize(v)
return (dict(module=command, module_arguments=args, **kwargs))
def normalize_task(task):
''' ensures that all tasks have an action key<|fim▁hole|> result = dict()
for (k, v) in task.items():
if k in Task.VALID_KEYS or k.startswith('with_'):
if k == 'local_action' or k == 'action':
if not isinstance(v, dict):
v = _kv_to_dict(v)
v['module_arguments'] = v.get('module_arguments', list())
result['action'] = v
else:
result[k] = v
else:
if isinstance(v, basestring):
v = _kv_to_dict(k + ' ' + v)
elif not v:
v = dict(module=k)
else:
if isinstance(v, dict):
v.update(dict(module=k))
else:
if k == '__line__':
# Keep the line number stored
result[k] = v
continue
else:
# Should not get here!
print "Was not expecting value %s of type %s for key %s" % (str(v), type(v), k)
print "Task: %s" % str(task)
exit(1)
v['module_arguments'] = v.get('module_arguments', list())
result['action'] = v
return result
def task_to_str(task):
name = task.get("name")
if name:
return name
action = task.get("action")
args = " ".join(["k=v" for (k, v) in action.items() if k != "module_arguments"] +
action.get("module_arguments"))
return "{0} {1}".format(action["module"], args)
def get_action_tasks(yaml, file):
tasks = list()
if file['type'] in ['tasks', 'handlers']:
tasks = yaml
else:
for block in yaml:
for section in ['tasks', 'handlers', 'pre_tasks', 'post_tasks']:
if section in block:
block_tasks = block.get(section) or []
tasks.extend(block_tasks)
return [normalize_task(task) for task in tasks
if 'include' not in task.keys()]
def parse_yaml_linenumbers(data):
"""Parses yaml as ansible.utils.parse_yaml but with linenumbers.
The line numbers are stored in each node's LINE_NUMBER_KEY key"""
loader = yaml.Loader(data)
def compose_node(parent, index):
# the line number where the previous token has ended (plus empty lines)
line = loader.line
node = Composer.compose_node(loader, parent, index)
node.__line__ = line + 1
return node
def construct_mapping(node, deep=False):
mapping = Constructor.construct_mapping(loader, node, deep=deep)
mapping[LINE_NUMBER_KEY] = node.__line__
return mapping
loader.compose_node = compose_node
loader.construct_mapping = construct_mapping
data = loader.get_single_data()
return data<|fim▁end|> | and that string values are converted to python objects '''
|
<|file_name|>plotter2.py<|end_file_name|><|fim▁begin|>import matplotlib.pyplot as pl
import numpy as np
import math
from matplotlib.collections import LineCollection
from matplotlib.colors import colorConverter
def plot(X, m, x_star, t, z_t):
fig = pl.figure(figsize=(10,10))
# Draw the grid first
ax = pl.axes()
ax.set_xlim(-4,20)
ax.set_ylim(-4,20)
ax.xaxis.set_major_locator(pl.MultipleLocator(5.0))
ax.xaxis.set_minor_locator(pl.MultipleLocator(1.0))
ax.yaxis.set_major_locator(pl.MultipleLocator(5.0))
ax.yaxis.set_minor_locator(pl.MultipleLocator(1.0))
ax.grid(which='major', axis='x', linewidth=0.75, linestyle='-', color='0.75')
ax.grid(which='minor', axis='x', linewidth=0.25, linestyle='-', color='0.75')
ax.grid(which='major', axis='y', linewidth=0.75, linestyle='-', color='0.75')
ax.grid(which='minor', axis='y', linewidth=0.25, linestyle='-', color='0.75')
# Draw map
for y, row in enumerate(m):
for x, cell in enumerate(row):
if (cell == 'W'):
rect = pl.Rectangle((x,y), 1, 1, fill=True,color='#cacaca')
ax.add_patch(rect)
# Draw the robot and its direction
x,y,theta = x_star['x'], x_star['y'], x_star['theta']
dx = 1 * math.cos(theta)
dy = 1 * math.sin(theta)
ax.arrow(x,y,dx,dy, head_width=.4, head_length=0.5, length_includes_head=True)
circle = pl.Circle((x, y), radius=0.35, fc='y')
ax.add_patch(circle)
# Draw information
directions = 'n nw w sw s se e ne'.split()
title_arr = []
#print z_t<|fim▁hole|> for direction in directions:
#print z_t[direction]
title_arr.append("%s: %4.2f" % (direction, z_t[direction]))
ax.set_title('; '.join(title_arr))
#print X
xs = [xx[0]['x'] for xx in X]
ys = [xx[0]['y'] for xx in X]
pl.scatter(xs, ys)
return fig<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.contrib.auth.models import User, Group
import uuid
import davvy
import davvy.exceptions
from lxml import etree
# Create your models here.
class Resource(models.Model):
def generate_uuid():
return str(uuid.uuid4())
user = models.ForeignKey(User)
groups = models.ManyToManyField(Group, null=True, blank=True)
parent = models.ForeignKey('Resource', null=True, blank=True)
name = models.CharField(max_length=255)
collection = models.BooleanField(default=False)
uuid = models.CharField(max_length=36, default=generate_uuid)
content_type = models.CharField(max_length=255, blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
size = models.BigIntegerField(default=0)
protected = models.BooleanField(default=False)
# pretty ugly, but should help viewing the full names
def __unicode__(self):
parts = []
parent = self.parent
while True:
if not parent:<|fim▁hole|> return '/' + '/'.join(parts)
def del_prop(self, dav, request, name):
try:
model_prop = self.prop_set.get(name=name)
model_prop.delete()
except Prop.DoesNotExist:
# removing a non existent property is not an error
pass
def get_prop(self, dav, request, name):
if name in davvy.props_get:
value = davvy.props_get[name](dav, request, self)
if value is not None:
return value
raise davvy.exceptions.Forbidden()
try:
model_prop = self.prop_set.get(name=name)
if model_prop.is_xml:
return etree.fromstring(model_prop.value)
return model_prop.value
except Prop.DoesNotExist:
raise davvy.exceptions.NotFound()
def set_prop(self, dav, request, name, value):
if name in davvy.props_set:
e = davvy.props_set[name](dav, request, self, value)
if isinstance(e, Exception):
raise e
else:
try:
prop = self.prop_set.get(name=name)
except Prop.DoesNotExist:
prop = self.prop_set.create(name=name)
if len(value):
prop.value = '\n'.join(
[etree.tostring(children, pretty_print=True)
for children
in value]
)
prop.is_xml = True
elif value.text is not None:
prop.value = value.text
prop.is_xml = False
prop.save()
return self.get_prop(dav, request, name)
@property
def displayname(self):
try:
prop = self.prop_set.get(name='{DAV:}displayname')
return prop.value
except:
return ''
@property
def progenitor(self):
parent = self.parent
while parent and parent.parent:
parent = parent.parent
return parent
def properties(self, dav, request, requested_props):
propstat = []
for prop in requested_props:
try:
value = self.get_prop(dav, request, prop)
status = '200 OK'
except Exception as e:
value = None
if hasattr(e, 'status'):
status = e.status
else:
status = '500 Internal Server Error'
propstat.append((prop, ) + (value, status))
return propstat
class Meta:
unique_together = ('user', 'parent', 'name')
class Prop(models.Model):
resource = models.ForeignKey(Resource)
name = models.CharField(max_length=255)
value = models.TextField(blank=True, null=True)
is_xml = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Meta:
unique_together = ('resource', 'name')<|fim▁end|> | break
parts.insert(0, Resource.objects.get(pk=parent.id).name)
parent = parent.parent
parts.append(self.name) |
<|file_name|>win.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Windows console handling
// FIXME (#13400): this is only a tiny fraction of the win32 console api
extern crate libc;
use std::io::IoResult;
use attr;
use color;
use Terminal;
/// A Terminal implementation which uses the Win32 Console API.
pub struct WinConsole<T> {
buf: T,
foreground: color::Color,
background: color::Color,
}
#[allow(non_snake_case_functions)]
#[link(name = "kernel32")]
extern "system" {
fn SetConsoleTextAttribute(handle: libc::HANDLE, attr: libc::WORD) -> libc::BOOL;
fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE;
}
fn color_to_bits(color: color::Color) -> u16 {
// magic numbers from mingw-w64's wincon.h
let bits = match color % 8 {
color::BLACK => 0,
color::BLUE => 0x1,
color::GREEN => 0x2,
color::RED => 0x4,
color::YELLOW => 0x2 | 0x4,
color::MAGENTA => 0x1 | 0x4,
color::CYAN => 0x1 | 0x2,
color::WHITE => 0x1 | 0x2 | 0x4,<|fim▁hole|> if color >= 8 {
bits | 0x8
} else {
bits
}
}
impl<T: Writer> WinConsole<T> {
fn apply(&mut self) {
let _unused = self.buf.flush();
let mut accum: libc::WORD = 0;
accum |= color_to_bits(self.foreground);
accum |= color_to_bits(self.background) << 4;
unsafe {
// Magic -11 means stdout, from
// http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231%28v=vs.85%29.aspx
//
// You may be wondering, "but what about stderr?", and the answer
// to that is that setting terminal attributes on the stdout
// handle also sets them for stderr, since they go to the same
// terminal! Admittedly, this is fragile, since stderr could be
// redirected to a different console. This is good enough for
// rustc though. See #13400.
let out = GetStdHandle(-11);
SetConsoleTextAttribute(out, accum);
}
}
}
impl<T: Writer> Writer for WinConsole<T> {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
self.buf.write(buf)
}
fn flush(&mut self) -> IoResult<()> {
self.buf.flush()
}
}
impl<T: Writer> Terminal<T> for WinConsole<T> {
fn new(out: T) -> Option<WinConsole<T>> {
Some(WinConsole { buf: out, foreground: color::WHITE, background: color::BLACK })
}
fn fg(&mut self, color: color::Color) -> IoResult<bool> {
self.foreground = color;
self.apply();
Ok(true)
}
fn bg(&mut self, color: color::Color) -> IoResult<bool> {
self.background = color;
self.apply();
Ok(true)
}
fn attr(&mut self, attr: attr::Attr) -> IoResult<bool> {
match attr {
attr::ForegroundColor(f) => {
self.foreground = f;
self.apply();
Ok(true)
},
attr::BackgroundColor(b) => {
self.background = b;
self.apply();
Ok(true)
},
_ => Ok(false)
}
}
fn supports_attr(&self, attr: attr::Attr) -> bool {
// it claims support for underscore and reverse video, but I can't get
// it to do anything -cmr
match attr {
attr::ForegroundColor(_) | attr::BackgroundColor(_) => true,
_ => false
}
}
fn reset(&mut self) -> IoResult<()> {
self.foreground = color::WHITE;
self.background = color::BLACK;
self.apply();
Ok(())
}
fn unwrap(self) -> T { self.buf }
fn get_ref<'a>(&'a self) -> &'a T { &self.buf }
fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.buf }
}<|fim▁end|> | _ => unreachable!()
};
|
<|file_name|>winPhpAdd.js<|end_file_name|><|fim▁begin|>Ext.define('Rd.view.i18n.winPhpAdd', {
extend: 'Ext.window.Window',
alias : 'widget.winPhpAdd',
title : i18n('sAdd_Msgid'),
layout: 'fit',
autoShow: true,
width: 300,
iconCls: 'add',
glyph: Rd.config.icnAdd,
initComponent: function() {
var me = this;
me.items = [
{
xtype: 'form',
border: false,
layout: 'anchor',
width: '100%',
flex: 1,
defaults: {
anchor: '100%'
},
fieldDefaults: {
msgTarget: 'under',
labelClsExtra: 'lblRd',
labelAlign: 'top',
labelSeparator: '',
margin: 15
},
defaultType: 'textfield',
items: [
{
xtype: 'textfield',
fieldLabel: i18n('sMsgid'),
name : "msgid",
allowBlank:false,
blankText: i18n('sSpecify_a_valid_name_please')
},
{<|fim▁hole|> xtype: 'textfield',
fieldLabel: i18n('sMsgstr'),
name : "msgstr",
allowBlank:true
},
{
xtype : 'textfield',
name : "comment",
fieldLabel: i18n('sOptional_Comment')
}],
buttons: [
{
itemId: 'save',
text: i18n('sOK'),
scale: 'large',
iconCls: 'b-btn_ok',
glyph: Rd.config.icnYes,
formBind: true,
margin: '0 20 40 0'
}
]
}
];
me.callParent(arguments);
}
});<|fim▁end|> | |
<|file_name|>shadowmaps.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2016 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "gm.h"
#include "SkPathEffect.h"
#include "SkPictureRecorder.h"
#include "SkShadowPaintFilterCanvas.h"
#include "SkShadowShader.h"
#include "SkSurface.h"
#ifdef SK_EXPERIMENTAL_SHADOWING
static sk_sp<SkPicture> make_test_picture(int width, int height) {
SkPictureRecorder recorder;
// LONG RANGE TODO: eventually add SkBBHFactory (bounding box factory)
SkCanvas* canvas = recorder.beginRecording(SkRect::MakeIWH(width, height));
SkASSERT(canvas->getTotalMatrix().isIdentity());
SkPaint paint;
paint.setColor(SK_ColorGRAY);
// LONG RANGE TODO: tag occluders
// LONG RANGE TODO: track number of IDs we need (hopefully less than 256)
// and determinate the mapping from z to id
// universal receiver, "ground"
canvas->drawRect(SkRect::MakeIWH(width, height), paint);
// TODO: Maybe add the ID here along with the depth
paint.setColor(0xFFEE8888);
canvas->translateZ(80);
canvas->drawRect(SkRect::MakeLTRB(200,150,350,300), paint);
paint.setColor(0xFF88EE88);
canvas->translateZ(80);
canvas->drawRect(SkRect::MakeLTRB(150,200,300,350), paint);
paint.setColor(0xFF8888EE);
canvas->translateZ(80);
canvas->drawRect(SkRect::MakeLTRB(100,100,250,250), paint);
// TODO: Add an assert that Z order matches painter's order
// TODO: think about if the Z-order always matching painting order is too strict
return recorder.finishRecordingAsPicture();
}
namespace skiagm {
class ShadowMapsGM : public GM {
public:
ShadowMapsGM() {
this->setBGColor(sk_tool_utils::color_to_565(0xFFCCCCCC));
}
void onOnceBeforeDraw() override {
// Create a light set consisting of
// - bluish directional light pointing more right than down
// - reddish directional light pointing more down than right
// - soft white ambient light
SkLights::Builder builder;
builder.add(SkLights::Light::MakeDirectional(SkColor3f::Make(0.2f, 0.3f, 0.4f),<|fim▁hole|> fLights = builder.finish();
fShadowParams.fShadowRadius = 4.0f;
fShadowParams.fBiasingConstant = 0.3f;
fShadowParams.fMinVariance = 1024;
fShadowParams.fType = SkShadowParams::kVariance_ShadowType;
}
protected:
static constexpr int kWidth = 400;
static constexpr int kHeight = 400;
SkString onShortName() override {
return SkString("shadowmaps");
}
SkISize onISize() override {
return SkISize::Make(kWidth, kHeight);
}
void onDraw(SkCanvas* canvas) override {
// This picture stores the picture of the scene.
// It's used to generate the depth maps.
sk_sp<SkPicture> pic(make_test_picture(kWidth, kHeight));
canvas->setLights(fLights);
canvas->drawShadowedPicture(pic, nullptr, nullptr, fShadowParams);
}
private:
sk_sp<SkLights> fLights;
SkShadowParams fShadowParams;
typedef GM INHERITED;
};
//////////////////////////////////////////////////////////////////////////////
DEF_GM(return new ShadowMapsGM;)
}
#endif<|fim▁end|> | SkVector3::Make(0.2f, 0.1f, 1.0f)));
builder.add(SkLights::Light::MakeDirectional(SkColor3f::Make(0.4f, 0.3f, 0.2f),
SkVector3::Make(0.1f, 0.2f, 1.0f)));
builder.setAmbientLightColor(SkColor3f::Make(0.4f, 0.4f, 0.4f)); |
<|file_name|>NQueens.java<|end_file_name|><|fim▁begin|>import net.sf.javabdd.*;
/**
* @author John Whaley
*/
public class NQueens {
static BDDFactory B;
static boolean TRACE;
static int N; /* Size of the chess board */
static BDD[][] X; /* BDD variable array */
static BDD queen; /* N-queen problem expressed as a BDD */
static BDD solution; /* One solution */
public static void main(String[] args) {
if (args.length != 1) {
System.err.println("USAGE: java NQueens N");
return;
}
N = Integer.parseInt(args[0]);
if (N <= 0) {
System.err.println("USAGE: java NQueens N");
return;
}
TRACE = true;
long time = System.currentTimeMillis();
runTest();
freeAll();
time = System.currentTimeMillis() - time;
System.out.println("Time: "+time/1000.+" seconds");
BDDFactory.CacheStats cachestats = B.getCacheStats();
if (cachestats != null && cachestats.uniqueAccess > 0) {
System.out.println(cachestats);
}
B.done();
B = null;
}
public static double runTest() {
if (B == null) {
/* Initialize with reasonable nodes and cache size and NxN variables */
String numOfNodes = System.getProperty("bddnodes");
int numberOfNodes;
if (numOfNodes == null)
numberOfNodes = (int) (Math.pow(4.42, N-6))*1000;
else
numberOfNodes = Integer.parseInt(numOfNodes);
String cache = System.getProperty("bddcache");
int cacheSize;
if (cache == null)
cacheSize = 1000;
else
cacheSize = Integer.parseInt(cache);
numberOfNodes = Math.max(1000, numberOfNodes);
B = BDDFactory.init(numberOfNodes, cacheSize);
}
if (B.varNum() < N * N) B.setVarNum(N * N);
queen = B.universe();
int i, j;
/* Build variable array */
X = new BDD[N][N];
for (i = 0; i < N; i++)
for (j = 0; j < N; j++)
X[i][j] = B.ithVar(i * N + j);
/* Place a queen in each row */
for (i = 0; i < N; i++) {
BDD e = B.zero();
for (j = 0; j < N; j++) {
e.orWith(X[i][j].id());
}
queen.andWith(e);
}
/* Build requirements for each variable(field) */
for (i = 0; i < N; i++)
for (j = 0; j < N; j++) {
if (TRACE) System.out.print("Adding position " + i + "," + j+" \r");
build(i, j);
}
solution = queen.satOne();
double result = queen.satCount();
/* Print the results */
if (TRACE) {
System.out.println("There are " + (long) result + " solutions.");
double result2 = solution.satCount();
System.out.println("Here is "+(long) result2 + " solution:");
solution.printSet();
System.out.println();
}
return result;
}
public static void freeAll() {
for (int i = 0; i < N; i++)
for (int j = 0; j < N; j++)<|fim▁hole|> }
static void build(int i, int j) {
BDD a = B.universe(), b = B.universe(), c = B.universe(), d = B.universe();
int k, l;
/* No one in the same column */
for (l = 0; l < N; l++) {
if (l != j) {
BDD u = X[i][l].apply(X[i][j], BDDFactory.nand);
a.andWith(u);
}
}
/* No one in the same row */
for (k = 0; k < N; k++) {
if (k != i) {
BDD u = X[i][j].apply(X[k][j], BDDFactory.nand);
b.andWith(u);
}
}
/* No one in the same up-right diagonal */
for (k = 0; k < N; k++) {
int ll = k - i + j;
if (ll >= 0 && ll < N) {
if (k != i) {
BDD u = X[i][j].apply(X[k][ll], BDDFactory.nand);
c.andWith(u);
}
}
}
/* No one in the same down-right diagonal */
for (k = 0; k < N; k++) {
int ll = i + j - k;
if (ll >= 0 && ll < N) {
if (k != i) {
BDD u = X[i][j].apply(X[k][ll], BDDFactory.nand);
d.andWith(u);
}
}
}
c.andWith(d);
b.andWith(c);
a.andWith(b);
queen.andWith(a);
}
}<|fim▁end|> | X[i][j].free();
queen.free();
solution.free(); |
<|file_name|>middleware.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from tempfile import NamedTemporaryFile
from django.contrib.sites.models import Site
from django.template.loader import render_to_string
from django.utils import translation
import re, subprocess
import cStringIO as StringIO
from django.conf import settings
WETO_REQUEST_FORMAT_NAME = getattr(settings, 'WETO_REQUEST_FORMAT_NAME', 'format')
WETO_REQUEST_FORMAT_PDF_VALUE = getattr(settings, 'WETO_REQUEST_FORMAT_PDF_VALUE', 'pdf')
WETO_LIB_PATH = getattr(settings, 'WETO_LIB_PATH', '/usr/bin/wkhtmltopdf')
WETO_OPTS = getattr(settings, 'WETO_OPTS', ["--dpi", "600", "--page-size", "A4"])
DEBUG = getattr(settings, 'DEBUG', False)
def replace_relative_with_absolute_links(site_url, content):
# replace urls with absolute urls including site and ssl/non-ssl
content = re.sub(r'href="/', r'href="%s/' % site_url, content)
content = re.sub(r'src="/', r'src="%s/' % site_url, content)
# replace relative urls with absolute urls including site and ssl/non-ssl,
# not sure if this really works this way...
content = re.sub(r'href="!http', r'href="%s/' % site_url, content)
content = re.sub(r'src="!http', r'src="%s/' % site_url, content)
return content
def transform_to_pdf(response, request):
toc = request.GET.get("toc", None)
footer = request.GET.get("footer", None)<|fim▁hole|> pdf_name = request.GET.get("pdf_name", "report.pdf")
response['mimetype'] = 'application/pdf'
response['Content-Disposition'] = 'attachment; filename=%s.pdf' % pdf_name
content = response.content
# TODO: Make this more stable and less a hack
site_url = u"https://" if request.is_secure() else u"http://"
current_site = Site.objects.get_current()
site_url += current_site.domain
site_url = str(site_url)
content = replace_relative_with_absolute_links(site_url, content)
string_content = StringIO.StringIO(content)
popen_command = [WETO_LIB_PATH,] + WETO_OPTS
language = translation.get_language()
if header:
header_file = NamedTemporaryFile(suffix='.html')
header = render_to_string('weto/pdf_header.html', request)
header_file.write(replace_relative_with_absolute_links(site_url, header))
header_file.flush()
header_file.seek(0)
popen_command += ['--header-html', header_file.name]
if footer:
footer_file = NamedTemporaryFile(suffix='.html')
footer = render_to_string('weto/pdf_footer.html', request)
footer_file.write(replace_relative_with_absolute_links(site_url, footer))
footer_file.flush()
footer_file.seek(0)
popen_command += ['--footer-html', footer_file.name]
if toc:
toc_file = NamedTemporaryFile()
popen_command += ["toc"]
if toc != "default":
rendered = render_to_string('weto/toc_xsl.xml', request)
if getattr(settings, 'USE_I18N'):
toc_file.write(rendered.translate(language))
else:
toc_file.write(rendered)
toc_file.flush()
toc_file.seek(0)
popen_command += ['--xsl-style-sheet', toc_file.name]
popen_command += [ "-", "-"]
if DEBUG: # show errors on stdout
sub = subprocess.Popen(popen_command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
else:
sub = subprocess.Popen(popen_command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
string_content.flush()
string_content.seek(0)
pdf = sub.communicate(input=string_content.read())
string_content.close()
# don't know why, but I need to first remove the content, before writing to it, else it appends the content
response.content = ''
response.write(pdf[0])
if header:
header_file.close()
if toc:
toc_file.close()
if footer:
footer_file.close()
return response
class PdfMiddleware(object):
"""
Converts the response to a pdf one.
"""
def process_response(self, request, response):
format = request.GET.get(WETO_REQUEST_FORMAT_NAME, None)
if format == WETO_REQUEST_FORMAT_PDF_VALUE:
response = transform_to_pdf(response, request)
return response<|fim▁end|> | header = request.GET.get("header", None) |
<|file_name|>test_MatplotlibTimelines.py<|end_file_name|><|fim▁begin|>import unittest
<|fim▁hole|>from PyFoam.Basics.MatplotlibTimelines import MatplotlibTimelines
theSuite=unittest.TestSuite()<|fim▁end|> | |
<|file_name|>ui_live.py<|end_file_name|><|fim▁begin|><|fim▁hole|># proxy module
from traitsui.qt4.ui_live import *<|fim▁end|> | |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md>
fn main() {<|fim▁hole|><|fim▁end|> | println!("cargo:rustc-flags=-l eappprxy");
} |
<|file_name|>client.tpl.js<|end_file_name|><|fim▁begin|>/* globals options */
/* eslint-disable comma-dangle */
var opt = options;
var socket = new WebSocket('ws://localhost:' + opt.port);
socket.addEventListener('message', function(event) {
var ngHotReloadCore = (opt.root || window)[opt.ns].ngHotReloadCore;
var data = event.data ? JSON.parse(event.data) : {};
if (data.message !== 'reload') {
return;
}
<|fim▁hole|> // and loading the updated file from the ng-hot-reload server.
var script = document.createElement('script');
// Prevent browser from using a cached version of the file.
var query = '?t=' + Date.now();
script.src = 'http://localhost:' + opt.port + '/' + data.src + query;
document.body.appendChild(script);
} else if (data.fileType === 'template') {
ngHotReloadCore.template.update(data.filePath, data.file);
} else {
var errorMsg = 'Unknown file type ' + data.filePath;
ngHotReloadCore.manualReload(errorMsg);
}
});<|fim▁end|> | if (data.fileType === 'script') {
// If this is a js file, update by creating a script tag
|
<|file_name|>ComponentDescriptorValidator.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2018 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.internal.codegen.validation;
import static com.google.auto.common.MoreTypes.asDeclared;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Predicates.in;
import static com.google.common.collect.Collections2.transform;
import static com.google.common.collect.Iterables.getOnlyElement;
import static dagger.internal.codegen.base.ComponentAnnotation.rootComponentAnnotation;
import static dagger.internal.codegen.base.DiagnosticFormatting.stripCommonTypePrefixes;
import static dagger.internal.codegen.base.Formatter.INDENT;
import static dagger.internal.codegen.base.Scopes.getReadableSource;
import static dagger.internal.codegen.base.Scopes.scopesOf;
import static dagger.internal.codegen.base.Scopes.singletonScope;
import static dagger.internal.codegen.base.Util.reentrantComputeIfAbsent;
import static dagger.internal.codegen.extension.DaggerStreams.toImmutableSet;
import static dagger.internal.codegen.extension.DaggerStreams.toImmutableSetMultimap;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static javax.tools.Diagnostic.Kind.ERROR;
import com.google.auto.common.MoreElements;
import com.google.auto.common.MoreTypes;
import com.google.common.base.Equivalence.Wrapper;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Sets;
import dagger.internal.codegen.binding.ComponentCreatorDescriptor;
import dagger.internal.codegen.binding.ComponentDescriptor;
import dagger.internal.codegen.binding.ComponentRequirement;
import dagger.internal.codegen.binding.ComponentRequirement.NullPolicy;
import dagger.internal.codegen.binding.ContributionBinding;
import dagger.internal.codegen.binding.ErrorMessages;
import dagger.internal.codegen.binding.ErrorMessages.ComponentCreatorMessages;
import dagger.internal.codegen.binding.MethodSignatureFormatter;
import dagger.internal.codegen.binding.ModuleDescriptor;
import dagger.internal.codegen.compileroption.CompilerOptions;
import dagger.internal.codegen.compileroption.ValidationType;
import dagger.internal.codegen.langmodel.DaggerElements;
import dagger.internal.codegen.langmodel.DaggerTypes;
import dagger.model.Scope;
import java.util.ArrayDeque;
import java.util.Collection;
import java.util.Deque;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.StringJoiner;
import javax.inject.Inject;
import javax.lang.model.element.Element;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.ExecutableType;
import javax.lang.model.type.TypeMirror;
import javax.tools.Diagnostic;
/**
* Reports errors in the component hierarchy.
*
* <ul>
* <li>Validates scope hierarchy of component dependencies and subcomponents.
* <li>Reports errors if there are component dependency cycles.
* <li>Reports errors if any abstract modules have non-abstract instance binding methods.
* <li>Validates component creator types.
* </ul>
*/
// TODO(dpb): Combine with ComponentHierarchyValidator.
public final class ComponentDescriptorValidator {
private final DaggerElements elements;
private final DaggerTypes types;
private final CompilerOptions compilerOptions;
private final MethodSignatureFormatter methodSignatureFormatter;
private final ComponentHierarchyValidator componentHierarchyValidator;
@Inject
ComponentDescriptorValidator(
DaggerElements elements,
DaggerTypes types,
CompilerOptions compilerOptions,
MethodSignatureFormatter methodSignatureFormatter,
ComponentHierarchyValidator componentHierarchyValidator) {
this.elements = elements;
this.types = types;
this.compilerOptions = compilerOptions;
this.methodSignatureFormatter = methodSignatureFormatter;
this.componentHierarchyValidator = componentHierarchyValidator;
}
public ValidationReport<TypeElement> validate(ComponentDescriptor component) {
ComponentValidation validation = new ComponentValidation(component);
validation.visitComponent(component);
validation.report(component).addSubreport(componentHierarchyValidator.validate(component));
return validation.buildReport();
}
private final class ComponentValidation {
final ComponentDescriptor rootComponent;
final Map<ComponentDescriptor, ValidationReport.Builder<TypeElement>> reports =
new LinkedHashMap<>();
ComponentValidation(ComponentDescriptor rootComponent) {
this.rootComponent = checkNotNull(rootComponent);
}
/** Returns a report that contains all validation messages found during traversal. */
ValidationReport<TypeElement> buildReport() {
ValidationReport.Builder<TypeElement> report =
ValidationReport.about(rootComponent.typeElement());
reports.values().forEach(subreport -> report.addSubreport(subreport.build()));
return report.build();
}
/** Returns the report builder for a (sub)component. */
private ValidationReport.Builder<TypeElement> report(ComponentDescriptor component) {
return reentrantComputeIfAbsent(
reports, component, descriptor -> ValidationReport.about(descriptor.typeElement()));
}
private void reportComponentItem(
Diagnostic.Kind kind, ComponentDescriptor component, String message) {
report(component)
.addItem(message, kind, component.typeElement(), component.annotation().annotation());
}
private void reportComponentError(ComponentDescriptor component, String error) {
reportComponentItem(ERROR, component, error);
}
void visitComponent(ComponentDescriptor component) {
validateDependencyScopes(component);
validateComponentDependencyHierarchy(component);
validateModules(component);
validateCreators(component);
component.childComponents().forEach(this::visitComponent);
}
/** Validates that component dependencies do not form a cycle. */
private void validateComponentDependencyHierarchy(ComponentDescriptor component) {
validateComponentDependencyHierarchy(component, component.typeElement(), new ArrayDeque<>());
}
/** Recursive method to validate that component dependencies do not form a cycle. */
private void validateComponentDependencyHierarchy(
ComponentDescriptor component, TypeElement dependency, Deque<TypeElement> dependencyStack) {
if (dependencyStack.contains(dependency)) {
// Current component has already appeared in the component chain.
StringBuilder message = new StringBuilder();
message.append(component.typeElement().getQualifiedName());
message.append(" contains a cycle in its component dependencies:\n");
dependencyStack.push(dependency);
appendIndentedComponentsList(message, dependencyStack);
dependencyStack.pop();
reportComponentItem(
compilerOptions.scopeCycleValidationType().diagnosticKind().get(),
component,
message.toString());
} else {
rootComponentAnnotation(dependency)
.ifPresent(
componentAnnotation -> {
dependencyStack.push(dependency);
for (TypeElement nextDependency : componentAnnotation.dependencies()) {
validateComponentDependencyHierarchy(
component, nextDependency, dependencyStack);
}
dependencyStack.pop();
});
}
}
/**
* Validates that among the dependencies are at most one scoped dependency, that there are no
* cycles within the scoping chain, and that singleton components have no scoped dependencies.
*/
private void validateDependencyScopes(ComponentDescriptor component) {
ImmutableSet<Scope> scopes = component.scopes();
ImmutableSet<TypeElement> scopedDependencies =
scopedTypesIn(
component
.dependencies()
.stream()
.map(ComponentRequirement::typeElement)
.collect(toImmutableSet()));
if (!scopes.isEmpty()) {
Scope singletonScope = singletonScope(elements);
// Dagger 1.x scope compatibility requires this be suppress-able.
if (compilerOptions.scopeCycleValidationType().diagnosticKind().isPresent()
&& scopes.contains(singletonScope)) {
// Singleton is a special-case representing the longest lifetime, and therefore
// @Singleton components may not depend on scoped components
if (!scopedDependencies.isEmpty()) {
StringBuilder message =
new StringBuilder(
"This @Singleton component cannot depend on scoped components:\n");
appendIndentedComponentsList(message, scopedDependencies);
reportComponentItem(
compilerOptions.scopeCycleValidationType().diagnosticKind().get(),
component,
message.toString());
}
} else if (scopedDependencies.size() > 1) {
// Scoped components may depend on at most one scoped component.
StringBuilder message = new StringBuilder();
for (Scope scope : scopes) {
message.append(getReadableSource(scope)).append(' ');
}
message
.append(component.typeElement().getQualifiedName())
.append(" depends on more than one scoped component:\n");
appendIndentedComponentsList(message, scopedDependencies);
reportComponentError(component, message.toString());
} else {
// Dagger 1.x scope compatibility requires this be suppress-able.
if (!compilerOptions.scopeCycleValidationType().equals(ValidationType.NONE)) {
validateDependencyScopeHierarchy(
component, component.typeElement(), new ArrayDeque<>(), new ArrayDeque<>());
}
}
} else {
// Scopeless components may not depend on scoped components.
if (!scopedDependencies.isEmpty()) {
StringBuilder message =
new StringBuilder(component.typeElement().getQualifiedName())
.append(" (unscoped) cannot depend on scoped components:\n");
appendIndentedComponentsList(message, scopedDependencies);
reportComponentError(component, message.toString());
}
}
}
private void validateModules(ComponentDescriptor component) {
for (ModuleDescriptor module : component.modules()) {
if (module.moduleElement().getModifiers().contains(Modifier.ABSTRACT)) {
for (ContributionBinding binding : module.bindings()) {
if (binding.requiresModuleInstance()) {
report(component).addError(abstractModuleHasInstanceBindingMethodsError(module));
break;
}
}
}
}
}
private String abstractModuleHasInstanceBindingMethodsError(ModuleDescriptor module) {
String methodAnnotations;
switch (module.kind()) {
case MODULE:
methodAnnotations = "@Provides";
break;
case PRODUCER_MODULE:
methodAnnotations = "@Provides or @Produces";
break;
default:
throw new AssertionError(module.kind());
}
return String.format(
"%s is abstract and has instance %s methods. Consider making the methods static or "
+ "including a non-abstract subclass of the module instead.",
module.moduleElement(), methodAnnotations);
}
private void validateCreators(ComponentDescriptor component) {
if (!component.creatorDescriptor().isPresent()) {
// If no builder, nothing to validate.
return;
}
ComponentCreatorDescriptor creator = component.creatorDescriptor().get();
ComponentCreatorMessages messages = ErrorMessages.creatorMessagesFor(creator.annotation());
// Requirements for modules and dependencies that the creator can set
Set<ComponentRequirement> creatorModuleAndDependencyRequirements =
creator.moduleAndDependencyRequirements();
// Modules and dependencies the component requires
Set<ComponentRequirement> componentModuleAndDependencyRequirements =
component.dependenciesAndConcreteModules();
// Requirements that the creator can set that don't match any requirements that the component
// actually has.
Set<ComponentRequirement> inapplicableRequirementsOnCreator =
Sets.difference(
creatorModuleAndDependencyRequirements, componentModuleAndDependencyRequirements);
DeclaredType container = asDeclared(creator.typeElement().asType());
if (!inapplicableRequirementsOnCreator.isEmpty()) {
Collection<Element> excessElements =
Multimaps.filterKeys(
creator.unvalidatedRequirementElements(), in(inapplicableRequirementsOnCreator))
.values();
String formatted =
excessElements.stream()
.map(element -> formatElement(element, container))
.collect(joining(", ", "[", "]"));
report(component)
.addError(String.format(messages.extraSetters(), formatted), creator.typeElement());
}
// Component requirements that the creator must be able to set
Set<ComponentRequirement> mustBePassed =
Sets.filter(
componentModuleAndDependencyRequirements,
input -> input.nullPolicy(elements, types).equals(NullPolicy.THROW));
// Component requirements that the creator must be able to set, but can't
Set<ComponentRequirement> missingRequirements =
Sets.difference(mustBePassed, creatorModuleAndDependencyRequirements);
if (!missingRequirements.isEmpty()) {
report(component)
.addError(
String.format(
messages.missingSetters(),
missingRequirements.stream().map(ComponentRequirement::type).collect(toList())),
creator.typeElement());<|fim▁hole|>
// Validate that declared creator requirements (modules, dependencies) have unique types.
ImmutableSetMultimap<Wrapper<TypeMirror>, Element> declaredRequirementsByType =
Multimaps.filterKeys(
creator.unvalidatedRequirementElements(),
creatorModuleAndDependencyRequirements::contains)
.entries().stream()
.collect(
toImmutableSetMultimap(entry -> entry.getKey().wrappedType(), Entry::getValue));
declaredRequirementsByType
.asMap()
.forEach(
(typeWrapper, elementsForType) -> {
if (elementsForType.size() > 1) {
TypeMirror type = typeWrapper.get();
// TODO(cgdecker): Attach this error message to the factory method rather than
// the component type if the elements are factory method parameters AND the
// factory method is defined by the factory type itself and not by a supertype.
report(component)
.addError(
String.format(
messages.multipleSettersForModuleOrDependencyType(),
type,
transform(
elementsForType, element -> formatElement(element, container))),
creator.typeElement());
}
});
// TODO(cgdecker): Duplicate binding validation should handle the case of multiple elements
// that set the same bound-instance Key, but validating that here would make it fail faster
// for subcomponents.
}
private String formatElement(Element element, DeclaredType container) {
// TODO(cgdecker): Extract some or all of this to another class?
// But note that it does different formatting for parameters than
// DaggerElements.elementToString(Element).
switch (element.getKind()) {
case METHOD:
return methodSignatureFormatter.format(
MoreElements.asExecutable(element), Optional.of(container));
case PARAMETER:
return formatParameter(MoreElements.asVariable(element), container);
default:
// This method shouldn't be called with any other type of element.
throw new AssertionError();
}
}
private String formatParameter(VariableElement parameter, DeclaredType container) {
// TODO(cgdecker): Possibly leave the type (and annotations?) off of the parameters here and
// just use their names, since the type will be redundant in the context of the error message.
StringJoiner joiner = new StringJoiner(" ");
parameter.getAnnotationMirrors().stream().map(Object::toString).forEach(joiner::add);
TypeMirror parameterType = resolveParameterType(parameter, container);
return joiner
.add(stripCommonTypePrefixes(parameterType.toString()))
.add(parameter.getSimpleName())
.toString();
}
private TypeMirror resolveParameterType(VariableElement parameter, DeclaredType container) {
ExecutableElement method =
MoreElements.asExecutable(parameter.getEnclosingElement());
int parameterIndex = method.getParameters().indexOf(parameter);
ExecutableType methodType = MoreTypes.asExecutable(types.asMemberOf(container, method));
return methodType.getParameterTypes().get(parameterIndex);
}
/**
* Validates that scopes do not participate in a scoping cycle - that is to say, scoped
* components are in a hierarchical relationship terminating with Singleton.
*
* <p>As a side-effect, this means scoped components cannot have a dependency cycle between
* themselves, since a component's presence within its own dependency path implies a cyclical
* relationship between scopes. However, cycles in component dependencies are explicitly checked
* in {@link #validateComponentDependencyHierarchy(ComponentDescriptor)}.
*/
private void validateDependencyScopeHierarchy(
ComponentDescriptor component,
TypeElement dependency,
Deque<ImmutableSet<Scope>> scopeStack,
Deque<TypeElement> scopedDependencyStack) {
ImmutableSet<Scope> scopes = scopesOf(dependency);
if (stackOverlaps(scopeStack, scopes)) {
scopedDependencyStack.push(dependency);
// Current scope has already appeared in the component chain.
StringBuilder message = new StringBuilder();
message.append(component.typeElement().getQualifiedName());
message.append(" depends on scoped components in a non-hierarchical scope ordering:\n");
appendIndentedComponentsList(message, scopedDependencyStack);
if (compilerOptions.scopeCycleValidationType().diagnosticKind().isPresent()) {
reportComponentItem(
compilerOptions.scopeCycleValidationType().diagnosticKind().get(),
component,
message.toString());
}
scopedDependencyStack.pop();
} else {
// TODO(beder): transitively check scopes of production components too.
rootComponentAnnotation(dependency)
.filter(componentAnnotation -> !componentAnnotation.isProduction())
.ifPresent(
componentAnnotation -> {
ImmutableSet<TypeElement> scopedDependencies =
scopedTypesIn(componentAnnotation.dependencies());
if (scopedDependencies.size() == 1) {
// empty can be ignored (base-case), and > 1 is a separately-reported error.
scopeStack.push(scopes);
scopedDependencyStack.push(dependency);
validateDependencyScopeHierarchy(
component,
getOnlyElement(scopedDependencies),
scopeStack,
scopedDependencyStack);
scopedDependencyStack.pop();
scopeStack.pop();
}
}); // else: we skip component dependencies which are not components
}
}
private <T> boolean stackOverlaps(Deque<ImmutableSet<T>> stack, ImmutableSet<T> set) {
for (ImmutableSet<T> entry : stack) {
if (!Sets.intersection(entry, set).isEmpty()) {
return true;
}
}
return false;
}
/** Appends and formats a list of indented component types (with their scope annotations). */
private void appendIndentedComponentsList(StringBuilder message, Iterable<TypeElement> types) {
for (TypeElement scopedComponent : types) {
message.append(INDENT);
for (Scope scope : scopesOf(scopedComponent)) {
message.append(getReadableSource(scope)).append(' ');
}
message
.append(stripCommonTypePrefixes(scopedComponent.getQualifiedName().toString()))
.append('\n');
}
}
/**
* Returns a set of type elements containing only those found in the input set that have a
* scoping annotation.
*/
private ImmutableSet<TypeElement> scopedTypesIn(Collection<TypeElement> types) {
return types.stream().filter(type -> !scopesOf(type).isEmpty()).collect(toImmutableSet());
}
}
}<|fim▁end|> | } |
<|file_name|>MyForm.cpp<|end_file_name|><|fim▁begin|>#include "MyForm.h"
<|fim▁hole|>}<|fim▁end|> | int main(){
|
<|file_name|>from_model.py<|end_file_name|><|fim▁begin|># Authors: Gilles Louppe, Mathieu Blondel, Maheshakya Wijewardena
# License: BSD 3 clause
import numpy as np
from .base import SelectorMixin
from ..base import BaseEstimator, clone
from ..externals import six
from ..exceptions import NotFittedError
from ..utils.fixes import norm
def _get_feature_importances(estimator, norm_order=1):
"""Retrieve or aggregate feature importances from estimator"""
importances = getattr(estimator, "feature_importances_", None)
if importances is None and hasattr(estimator, "coef_"):
if estimator.coef_.ndim == 1:
importances = np.abs(estimator.coef_)
else:
importances = norm(estimator.coef_, axis=0, ord=norm_order)
elif importances is None:
raise ValueError(
"The underlying estimator %s has no `coef_` or "
"`feature_importances_` attribute. Either pass a fitted estimator"
" to SelectFromModel or call fit before calling transform."
% estimator.__class__.__name__)
return importances
def _calculate_threshold(estimator, importances, threshold):
"""Interpret the threshold value"""
if threshold is None:
# determine default from estimator
est_name = estimator.__class__.__name__
if ((hasattr(estimator, "penalty") and estimator.penalty == "l1") or
"Lasso" in est_name):
# the natural default threshold is 0 when l1 penalty was used
threshold = 1e-5
else:
threshold = "mean"
if isinstance(threshold, six.string_types):
if "*" in threshold:
scale, reference = threshold.split("*")
scale = float(scale.strip())
reference = reference.strip()
if reference == "median":
reference = np.median(importances)
elif reference == "mean":
reference = np.mean(importances)
else:
raise ValueError("Unknown reference: " + reference)
threshold = scale * reference
elif threshold == "median":
threshold = np.median(importances)
elif threshold == "mean":
threshold = np.mean(importances)
else:
raise ValueError("Expected threshold='mean' or threshold='median' "
"got %s" % threshold)
else:
threshold = float(threshold)
return threshold
class SelectFromModel(BaseEstimator, SelectorMixin):
"""Meta-transformer for selecting features based on importance weights.
.. versionadded:: 0.17<|fim▁hole|> The base estimator from which the transformer is built.
This can be both a fitted (if ``prefit`` is set to True)
or a non-fitted estimator.
threshold : string, float, optional default None
The threshold value to use for feature selection. Features whose
importance is greater or equal are kept while the others are
discarded. If "median" (resp. "mean"), then the ``threshold`` value is
the median (resp. the mean) of the feature importances. A scaling
factor (e.g., "1.25*mean") may also be used. If None and if the
estimator has a parameter penalty set to l1, either explicitly
or implicitly (e.g, Lasso), the threshold used is 1e-5.
Otherwise, "mean" is used by default.
prefit : bool, default False
Whether a prefit model is expected to be passed into the constructor
directly or not. If True, ``transform`` must be called directly
and SelectFromModel cannot be used with ``cross_val_score``,
``GridSearchCV`` and similar utilities that clone the estimator.
Otherwise train the model using ``fit`` and then ``transform`` to do
feature selection.
norm_order : non-zero int, inf, -inf, default 1
Order of the norm used to filter the vectors of coefficients below
``threshold`` in the case where the ``coef_`` attribute of the
estimator is of dimension 2.
Attributes
----------
estimator_ : an estimator
The base estimator from which the transformer is built.
This is stored only when a non-fitted estimator is passed to the
``SelectFromModel``, i.e when prefit is False.
threshold_ : float
The threshold value used for feature selection.
"""
def __init__(self, estimator, threshold=None, prefit=False, norm_order=1):
self.estimator = estimator
self.threshold = threshold
self.prefit = prefit
self.norm_order = norm_order
def _get_support_mask(self):
# SelectFromModel can directly call on transform.
if self.prefit:
estimator = self.estimator
elif hasattr(self, 'estimator_'):
estimator = self.estimator_
else:
raise ValueError(
'Either fit the model before transform or set "prefit=True"'
' while passing the fitted estimator to the constructor.')
scores = _get_feature_importances(estimator, self.norm_order)
self.threshold_ = _calculate_threshold(estimator, scores,
self.threshold)
return scores >= self.threshold_
def fit(self, X, y=None, **fit_params):
"""Fit the SelectFromModel meta-transformer.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The training input samples.
y : array-like, shape (n_samples,)
The target values (integers that correspond to classes in
classification, real numbers in regression).
**fit_params : Other estimator specific parameters
Returns
-------
self : object
Returns self.
"""
if self.prefit:
raise NotFittedError(
"Since 'prefit=True', call transform directly")
self.estimator_ = clone(self.estimator)
self.estimator_.fit(X, y, **fit_params)
return self
def partial_fit(self, X, y=None, **fit_params):
"""Fit the SelectFromModel meta-transformer only once.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The training input samples.
y : array-like, shape (n_samples,)
The target values (integers that correspond to classes in
classification, real numbers in regression).
**fit_params : Other estimator specific parameters
Returns
-------
self : object
Returns self.
"""
if self.prefit:
raise NotFittedError(
"Since 'prefit=True', call transform directly")
if not hasattr(self, "estimator_"):
self.estimator_ = clone(self.estimator)
self.estimator_.partial_fit(X, y, **fit_params)
return self<|fim▁end|> |
Parameters
----------
estimator : object |
<|file_name|>autodiff.py<|end_file_name|><|fim▁begin|>import numpy as np
try:
from aurora.ndarray import gpu_op, ndarray
except ImportError:
pass
class Node(object):
""" Node object represents a node in the computational graph"""
def __init__(self):
""" New node will be created by Op objects __call__ method"""
# list of inputs to this node
self.inputs = []
# operator
self.op = None
# constants
self.const = None
# name of the node mainly use for debugging
self.name = ""
def __add__(self, other):
""" Adding two nodes and returns a new node"""
if isinstance(other, Node):
return add(self, other)
else:
return add_const(self, other)
def __sub__(self, other):
if isinstance(other, Node):
return sub(self, other)
else:
return sub_const(self, other)
def __rsub__(self, other):
return ref_sub_const(self, other)
def __mul__(self, other):
if isinstance(other, Node):
return mul(self, other)
else:
return mul_const(self, other)
def __truediv__(self, other):
if isinstance(other, Node):
return div(self, other)
else:
return div_const(self, other)
# Allow left-hand-side add and multiply.
__radd__ = __add__
__rmul__ = __mul__
__rdiv__ = __truediv__
class Op(object):
""" Op class represents operations perform on nodes"""
def __call__(self):
"""
Create a new node which represents operations perform on the graph
Parameters
----------
None
Returns
-------
Node
The new node object
"""
new_node = Node()
new_node.op = self
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
"""
Given the values of input nodes, compute the output value
Parameters
----------
:type use_numpy: object
:param use_numpy:
:param node: Node that performs the computation
:param input_vals: Values of input node
Returns
-------
:return: The output value of the node
"""
raise NotImplementedError
def gradient(self, node, output_grads):
"""
Given the value of output gradients this operation calculate the
gradient contribution of each input node
Parameters
----------
:param node:
:param output_grads:
Returns
-------
:return: A list of gradient contribution to each input node respectively
"""
raise NotImplementedError
def infer_shape(self, node, input_shapes):
raise NotImplementedError
class AddOp(Op):
"""
"""
def __call__(self, nodeA, nodeB):
"""
This Operator element-wise two nodes
Parameters
----------
:param nodeA: LHS operand
:param nodeB: RHS operand
Returns
-------
:return: A new Node which represents the element-wise plus operation
"""
new_node = Op.__call__(self)
new_node.inputs = [nodeA, nodeB]
new_node.name = '({}+{})'.format(nodeA.name, nodeB.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
"""
Given values of two input nodes, return result of element-wise addition.
Parameters
----------
:param node:
:param input_vals: List of two input nodes
Returens
--------
:return: The result of the element-wise addition operation
"""
assert len(input_vals) == 2
# return input_vals[0] + input_vals[1]
if use_numpy:
output_val[:] = input_vals[0] + input_vals[1]
else:
if input_vals[0].shape == input_vals[1].shape:
gpu_op.matrix_elementwise_add(input_vals[0], input_vals[1], output_val)
elif input_vals[0].shape == (1,):
const = input_vals[0].asnumpy()[0] # TODO: (upul) do we need this ? check it?
gpu_op.matrix_elementwise_add_by_const(input_vals[1], const, output_val)
elif input_vals[1].shape == (1,):
const = input_vals[1].asnumpy()[1] # TODO: (upul) do we need this ? check it?
gpu_op.matrix_elementwise_add_by_const(input_vals[0], const, output_val)
else:
pass # TODO: (upul) handle input[0] and input[1] in different shapes
def gradient(self, node, output_grads):
"""
Given the values of output gradients, calculate the gradients of input nodes
Parameters
----------
:param node:
:param output_grads: Gradient contribution of output nodes
Returns
-------
:return: A list of gradient contribution of output nodes
"""
return [output_grads, output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
assert input_shapes[0] == input_shapes[1]
return input_shapes[0]
class AddByConstOp(Op):
"""
Operator represents the element-wise addition of a node and a const
"""
def __call__(self, node_A, const_val):
"""
:param node:
:param const_val:
:return:
"""
new_node = Op.__call__(self)
new_node.const = const_val
new_node.inputs = [node_A]
new_node.name = '({0:s}+{1:f})'.format(node_A.name, const_val)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
"""
:param node:
:param input_vals:
:return:
"""
assert len(input_vals) == 1
if use_numpy:
output_val[:] = node.const + input_vals[0]
else:
gpu_op.matrix_elementwise_add_by_const(
input_vals[0], node.const, output_val)
def gradient(self, node, output_grads):
"""
:param node:
:param output_grads:
:return:
"""
return [output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
# assert node.const.shape == input_shapes[0]
return input_shapes[0]
class SubOp(Op):
def __call__(self, node_A, node_B):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.name = '({0:s}-{1:s})'.format(node_A.name, node_B.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
output_val[:] = input_vals[0] - input_vals[1]
else:
gpu_op.matrix_elementwise_subtract(input_vals[0], input_vals[1], output_val)
def gradient(self, node, output_grads):
return [output_grads, -1 * output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
assert input_shapes[0] == input_shapes[1]
return input_shapes[0]
class SubByConstOp(Op):
def __call__(self, node_A, const_val):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.const = const_val
new_node.name = '({0:s}-{1:f})'.format(node_A.name, const_val)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
output_val[:] = input_vals[0] - node.const
else:
gpu_op.matrix_elementwise_subtract_by_const(input_vals[0], node.const, output_val)
def gradient(self, node, output_grads):
return [output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
return input_shapes[0]
class ReflectedSubByConstOp(Op):
def __call__(self, node_A, const_val):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.const = const_val
new_node.name = '({0:f}-{1:s})'.format(const_val, node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
return node.const - input_vals[0]
def gradient(self, node, output_grads):
return [-1 * output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
return input_shapes[0]
class OnesLikeOp(Op):
def __call__(self, node_A):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.name = 'Oneslike({})'.format(node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
assert isinstance(input_vals[0], np.ndarray)
output_val[:] = np.ones(input_vals[0].shape)
else:
gpu_op.array_set(output_val, 1)
def gradient(self, node, output_grads):
return [zeros_like(node.inputs[0])]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
if input_shapes[0] == 1: # TODO (upul) do we need this if ?
return (1,)
else:
return input_shapes[0]
class ZerosLikeOp(Op):
def __call__(self, node_A):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.name = 'Zeroslike({})'.format(node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
assert isinstance(input_vals[0], np.ndarray)
output_val[:] = np.zeros(input_vals[0].shape)
else:
gpu_op.array_set(output_val, 0)
def gradient(self, node, output_grads):
return [zeros_like(node.inputs[0])]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
if input_shapes[0] == 1: # TODO (upul) do we need this if ?
return (1,)
else:
return input_shapes[0]
class ReshapeOp(Op):
def __call__(self, node_A, newshape):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.newshape = newshape
new_node.name = 'Reshape({})'.format(node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
assert isinstance(input_vals[0], np.ndarray)
output_val[:] = np.reshape(input_vals[0], newshape=node.newshape)
else:
# TODO: (upul) changing share is not an expensive operation. But looks
# : bit ugly. Can't we find out an alternative approach?
input_shape = input_vals[0].shape
ndarray.reshape(output_val, input_shape)
input_vals[0].copyto(output_val)
ndarray.reshape(output_val, node.newshape)
def gradient(self, node, output_grads):
return [reshape_grad(node.inputs[0], output_grads)]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
return node.newshape
class ReshapeGradientOp(Op):
def __call__(self, node_A, node_B):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.name = 'ReshapeGradientOp({0:s})'.format(node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
output_val[:] = input_vals[1].reshape(input_vals[0].shape)
else:
# TODO: (upul) changing share is not an expensive operation. But looks
# : bit ugly. Can't we find out an alternative approach?
ndarray.reshape(output_val, input_vals[0].shape)
input_vals[1].copyto(output_val)
def gradient(self, node, output_grads):
raise NotImplementedError('Gradient of ReshapeGradientOp not supported')
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
return input_shapes[0]
class MulOp(Op):
def __call__(self, node_A, node_B):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.name = '({0:s}*{1:s})'.format(node_A.name, node_B.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
output_val[:] = input_vals[0] * input_vals[1]
else:
ip_1_shape = input_vals[0].shape
ip_2_shape = input_vals[1].shape
if ip_1_shape == ip_2_shape:
gpu_op.matrix_elementwise_multiply(input_vals[0], input_vals[1], output_val)
elif ip_1_shape == (1,):
const_val = input_vals[0].asnumpy()[0]
gpu_op.matrix_elementwise_multiply_by_const(input_vals[1], const_val, output_val)
elif ip_2_shape == (1,):
const_val = input_vals[1].asnumpy()[0]
gpu_op.matrix_elementwise_multiply_by_const(input_vals[0], const_val, output_val)
else:
pass # TODO (upul) handle ip_1_shape != ip_2_shape
def gradient(self, node, output_grads):
return [node.inputs[1] * output_grads, node.inputs[0] * output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
if input_shapes[0] == (1,):
return input_shapes[1]
elif input_shapes[1] == (1,):
return input_shapes[0]
elif input_shapes[0] == input_shapes[1]:
return input_shapes[0]
else:
stmt = 'Invalid dimensions {0:s}, (1:s)'.format(input_shapes[0], input_shapes[1])
raise RuntimeError(stmt)
class MulByConstOp(Op):
def __call__(self, node_A, const_val):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.const = const_val
new_node.name = '({0:s}*{1:f})'.format(node_A.name, const_val)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
output_val[:] = node.const * input_vals[0]
else:
gpu_op.matrix_elementwise_multiply_by_const(
input_vals[0], node.const, output_val)
def gradient(self, node, output_grads):
return [node.const * output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
return input_shapes[0]
class DivOp(Op):
def __call__(self, node_A, node_B):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.name = '({0:s}/{1:s})'.format(node_A.name, node_B.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
output_val[:] = input_vals[0] / input_vals[1]
else:
gpu_op.matrix_elementwise_division(input_vals[0], input_vals[1], output_val)
def gradient(self, node, output_grads):
grad_A = output_grads / node.inputs[1]
grad_B = -1.0 * output_grads * node.inputs[0] / (node.inputs[1] * node.inputs[1])
return [grad_A, grad_B]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
assert input_shapes[0] == input_shapes[1]
return input_shapes[0]
class DivByConstOp(Op):
def __call__(self, node_A, const_val):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.const = const_val
new_node.name = '({0:s}/{1:f})'.format(node_A.name, const_val)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
output_val[:] = input_vals[0] / node.const
else:
gpu_op.matrix_elementwise_div_by_const(input_vals[0], node.const, output_val)
def gradient(self, node, output_grads):
return [output_grads / node.const]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
return input_shapes[0]
class PlaceholderOp(Op):
"""Op to feed value to a nodes."""
def __call__(self):
"""Creates a variable node."""
new_node = Op.__call__(self)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
"""No compute function since node value is fed directly in Executor."""
assert False, "placeholder values provided by feed_dict"
def gradient(self, node, output_grad):
"""No gradient function since node has no inputs."""
return None
class ReduceSumOp(Op):
"""
"""
def __call__(self, node_A):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.name = 'ReduceSum({0:s})'.format(node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
"""
:param node:
:param input_vals:
:param output_val:
:param use_numpy:
:return:
"""
assert len(input_vals) == 1
if use_numpy:
assert isinstance(output_val, np.ndarray)
output_val[:] = np.sum(input_vals[0], axis=0)
else:
gpu_op.reduce_sum_axis_zero(input_vals[0], output_val)
def gradient(self, node, output_grads):
return [output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
if len(input_shapes[0]) == 1:
return (1,)
else:
return tuple(input_shapes[0][i]
for i in range(1, len(input_shapes[0])))
class BroadcastToOp(Op):
def __call__(self, node_A, node_B):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.name = 'BroadcastTo({0:s}, {1:s}.shape)'.format(node_A.name, node_B.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
output_val[:] = np.broadcast_to(input_vals[0], input_vals[1].shape)
else:
gpu_op.broadcast_to(input_vals[0], output_val)
def gradient(self, node, output_grads):
grad_A = reduce_sum(output_grads)
grad_B = zeros_like(node.inputs[1])
return [grad_A, grad_B]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
return input_shapes[1]
class MatMulOp(Op): # TODO: (upul) double check what this class is doing
def __call__(self, node_A, node_B, trans_A=False, trans_B=False):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.trans_A = trans_A
new_node.trans_B = trans_B
new_node.name = 'MatMul({0:s}, {1:s}'.format(node_A.name, node_B.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
if node.trans_A:
input_vals[0] = input_vals[0].T
if node.trans_B:
input_vals[1] = input_vals[1].T
output_val[:] = np.dot(input_vals[0], input_vals[1])
else:
gpu_op.matrix_multiply(
input_vals[0], node.trans_A,
input_vals[1], node.trans_B,
output_val)
def gradient(self, node, output_grads):
grad_A = matmul(output_grads, node.inputs[1], trans_A=False, trans_B=True)
grad_B = matmul(node.inputs[0], output_grads, trans_A=True, trans_B=False)
return [grad_A, grad_B]
def infer_shape(self, node, input_shapes):
"""Need to handle input_vals[0].shape != input_vals[1].shape"""
assert len(input_shapes) == 2
(row_A, col_A) = input_shapes[0]<|fim▁hole|> if node.trans_B:
row_B, col_B = col_B, row_B
assert col_A == row_B
return (row_A, col_B)
def Variable(name):
"""User defined variables in an expression.
e.g. x = Variable(name = "x")
"""
placeholder_node = placeholder()
placeholder_node.name = name
return placeholder_node
def Parameter(name, init):
"""
example: w = Parameter(name='w', state=...)
:param name:
:param init:
:return:
"""
parameter_node = placeholder()
parameter_node.name = name
parameter_node.const = init
return parameter_node
# Global singleton operations
add = AddOp()
add_const = AddByConstOp()
sub = SubOp()
sub_const = SubByConstOp()
ref_sub_const = ReflectedSubByConstOp()
mul = MulOp()
mul_const = MulByConstOp()
div = DivOp()
div_const = DivByConstOp()
zeros_like = ZerosLikeOp()
ones_like = OnesLikeOp()
reduce_sum = ReduceSumOp()
broadcast_to = BroadcastToOp()
reshape = ReshapeOp()
reshape_grad = ReshapeGradientOp()
matmul = MatMulOp()
placeholder = PlaceholderOp()<|fim▁end|> | if node.trans_A:
row_A, col_A = col_A, row_A
(row_B, col_B) = input_shapes[1] |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import json
import logging
from typing import List, Optional
from uuid import uuid4
from django import http
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from django.db.utils import OperationalError
from django.views.decorators.http import require_GET, require_POST, \
require_http_methods
from jsonschema import validate # type: ignore
from jsonschema.exceptions import ValidationError # type: ignore
from specifyweb.specify.api import create_obj, get_object_or_404, obj_to_data, \
toJson, uri_for_model
from specifyweb.specify.views import apply_access_control, login_maybe_required, \
openapi
from specifyweb.specify import models as specify_models
from ..notifications.models import Message
from . import models, tasks
from .upload import upload as uploader, upload_plan_schema
logger = logging.getLogger(__name__)
def regularize_rows(ncols: int, rows: List[List]) -> List[List[str]]:
n = ncols + 1 # extra row info such as disambiguation in hidden col at end
def regularize(row: List) -> Optional[List]:
data = (row + ['']*n)[:n] # pad / trim row length to match columns
cleaned = ['' if v is None else str(v).strip() for v in data] # convert values to strings
return None if all(v == '' for v in cleaned[0:ncols]) else cleaned # skip empty rows
return [r for r in map(regularize, rows) if r is not None]
open_api_components = {
'schemas': {
'wb_uploadresult': {
"oneOf": [
{
"type": "string",
"example": "null"
},
{
"type": "object",
"properties": {
"success": {
"type": "boolean",
},
"timestamp": {
"type": "string",
"format": "datetime",
"example": "2021-04-28T22:28:20.033117+00:00",
}
}
}
]
},
"wb_uploaderstatus": {
"oneOf": [
{
"type": "string",
"example": "null",
"description": "Nothing to report"
}, {
"type": "object",
"properties": {
"taskinfo": {
"type": "object",
"properties": {
"current": {
"type": "number",
"example": 4,
},
"total": {
"type": "number",
"example": 20,
}
}
},
"taskstatus": {
"type": "string",
"enum": [
"PROGRESS",
"PENDING",
"FAILURE",
]
},
"uploaderstatus": {
"type": "object",
"properties": {
"operation": {
"type": "string",
"enum": [
'validating',
'uploading',
'unuploading'
]
},
"taskid": {
"type": "string",
"maxLength": 36,
"example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
}
}
},
},
"description": "Status of the " +
"upload / un-upload / validation process",
}
]
},
"wb_rows": {
"type": "array",
"items": {
"type": "array",
"items": {
"type": "string",
"description": "Cell's value or null"
}
},
"description": "2D array of values",
},
"wb_visualorder": {
"oneOf": [
{
"type": "string",
"description": "null",
},
{
"type": "array",
"items": {
"type": "number",
},
"description": "The order to show columns in",
}
]
},
"wb_uploadplan": {
"type": "object",
"properties": {
},
"description": "Upload Plan. Schema - " +
"https://github.com/specify/specify7/blob/5fb51a7d25d549248505aec141ae7f7cdc83e414/specifyweb/workbench/upload/upload_plan_schema.py#L14"
},
"wb_validation_results": {
"type": "object",
"properties": {},
"description": "Schema: " +
"https://github.com/specify/specify7/blob/19ebde3d86ef4276799feb63acec275ebde9b2f4/specifyweb/workbench/upload/validation_schema.py",
},
"wb_upload_results": {
"type": "object",
"properties": {},
"description": "Schema: " +
"https://github.com/specify/specify7/blob/19ebde3d86ef4276799feb63acec275ebde9b2f4/specifyweb/workbench/upload/upload_results_schema.py",
}
}
}
@openapi(schema={
"get": {
"parameters": [
{
"name": "with_plan",
"in": "query",
"required": False,
"schema": {
"type": "string"
},
"description": "If parameter is present, limit results to data sets with upload plans."
}
],
"responses": {
"200": {
"description": "Data fetched successfully",
"content": {
"application/json": {
"schema": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": {
"type": "number",
"minimum": 0,
"description": "Data Set ID",
},
"name": {
"type": "string",
"description": "Data Set Name",
},
"uploadresult": {
"$ref": "#/components/schemas/wb_uploadresult"
},
"uploaderstatus": {
"$ref": "#/components/schemas/wb_uploaderstatus",
},
"timestampcreated": {
"type": "string",
"format": "datetime",
"example": "2021-04-28T13:16:07.774"
},
"timestampmodified": {
"type": "string",
"format": "datetime",
"example": "2021-04-28T13:50:41.710",
}
},
'required': ['id', 'name', 'uploadresult', 'uploaderstatus', 'timestampcreated', 'timestampmodified'],
'additionalProperties': False
}
}
}
}
}
}
},
'post': {
"requestBody": {
"required": True,
"description": "A JSON representation of a new Data Set",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Data Set name",
},
"columns": {
"type": "array",
"items": {
"type": "string",
"description": "A name of the column",
},
"description": "A unique array of strings",
},
"rows": {
"$ref": "#/components/schemas/wb_rows",
},
"importedfilename": {
"type": "string",
"description": "The name of the original file",
}
},
'required': ['name', 'columns', 'rows', 'importedfilename'],
'additionalProperties': False
}
}
}
},
"responses": {
"201": {
"description": "Data created successfully",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"id": {
"type": "number",
"description": "Data Set ID",
},
"name": {
"type": "string",
"description":
"Data Set name (may differ from the one " +
"in the request object as part of " +
"ensuring names are unique)"
},
},
'required': ['name', 'id'],
'additionalProperties': False
}
}
}
}
}
}
}, components=open_api_components)
@login_maybe_required
@apply_access_control
@require_http_methods(["GET", "POST"])
@transaction.atomic
def datasets(request) -> http.HttpResponse:
"""RESTful list of user's WB datasets. POSTing will create a new dataset."""
if request.method == "POST":
data = json.load(request)
columns = data['columns']
if any(not isinstance(c, str) for c in columns) or not isinstance(columns, list):
return http.HttpResponse(f"all column headers must be strings: {columns}", status=400)
if len(set(columns)) != len(columns):
return http.HttpResponse(f"all column headers must be unique: {columns}", status=400)
rows = regularize_rows(len(columns), data['rows'])
ds = models.Spdataset.objects.create(
specifyuser=request.specify_user,
collection=request.specify_collection,
name=data['name'],
columns=columns,
data=rows,
importedfilename=data['importedfilename'],
createdbyagent=request.specify_user_agent,
modifiedbyagent=request.specify_user_agent,
)
return http.JsonResponse({"id": ds.id, "name": ds.name}, status=201)
else:
attrs = ('name', 'uploadresult', 'uploaderstatus', 'timestampcreated', 'timestampmodified')
dss = models.Spdataset.objects.filter(specifyuser=request.specify_user, collection=request.specify_collection).only(*attrs)
if 'with_plan' in request.GET:
dss = dss.filter(uploadplan__isnull=False)
return http.JsonResponse([{'id': ds.id, **{attr: getattr(ds, attr) for attr in attrs}} for ds in dss], safe=False)
@openapi(schema={
"get": {
"responses": {
"200": {
"description": "Successful response",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"id": {
"type": "number",
"description": "Data Set ID",
},
"name": {
"type": "string",
"description": "Data Set name",
},
"columns": {
"type": "array",
"items": {
"type": "string",
"description": "A name of the column",
},
"description": "A unique array of strings",
},
"visualorder": {
"$ref": "#/components/schemas/wb_visualorder"
},
"rows": {
"$ref": "#/components/schemas/wb_rows"
},
"uploadplan": {
"$ref": "#/components/schemas/wb_uploadplan"
},
"uploadresult": {
"$ref": "#/components/schemas/wb_uploadresult"
},
"uploaderstatus": {
"$ref": "#/components/schemas/wb_uploaderstatus"
},
"importedfilename": {
"type": "string",
"description": "The name of the original file",
},
"remarks": {
"type": "string",
},
"timestampcreated": {
"type": "string",
"format": "datetime",
"example": "2021-04-28T13:16:07.774"
},
"timestampmodified": {
"type": "string",
"format": "datetime",
"example": "2021-04-28T13:50:41.710",
}
},
'required': ['id', 'name', 'columns', 'visualorder', 'rows', 'uploadplan', 'uploadresult',
'uploaderstatus', 'importedfilename', 'remarks', 'timestampcreated', 'timestampmodified'],
'additionalProperties': False
}
}
}
}
}
},
'put': {
"requestBody": {
"required": True,
"description": "A JSON representation of updates to the data set",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Data Set name",
},
"remarks": {
"type": "string",
},
"visualorder": {
"$ref": "#/components/schemas/wb_visualorder"
},
"uploadplan": {
"$ref": "#/components/schemas/wb_uploadplan"
},
},
'additionalProperties': False
}
}
}
},
"responses": {
"204": {"description": "Data set updated."},
"409": {"description": "Dataset in use by uploader."}
}
},
"delete": {
"responses": {
"204": {"description": "Data set deleted."},
"409": {"description": "Dataset in use by uploader"}
}
}
}, components=open_api_components)
@login_maybe_required
@apply_access_control
@require_http_methods(["GET", "PUT", "DELETE"])
@transaction.atomic
def dataset(request, ds_id: str) -> http.HttpResponse:
"""RESTful endpoint for dataset <ds_id>. Supports GET PUT and DELETE."""
try:
ds = models.Spdataset.objects.get(id=ds_id)
except ObjectDoesNotExist:
return http.HttpResponseNotFound()
if ds.specifyuser != request.specify_user:
return http.HttpResponseForbidden()
if request.method == "GET":
return http.JsonResponse(dict(
id=ds.id,
name=ds.name,
columns=ds.columns,
visualorder=ds.visualorder,
rows=ds.data,
uploadplan=ds.uploadplan and json.loads(ds.uploadplan),
uploaderstatus=ds.uploaderstatus,
uploadresult=ds.uploadresult,
rowresults=ds.rowresults and json.loads(ds.rowresults),
remarks=ds.remarks,
importedfilename=ds.importedfilename,
timestampcreated=ds.timestampcreated,
timestampmodified=ds.timestampmodified,
createdbyagent=uri_for_model('agent', ds.createdbyagent_id) if ds.createdbyagent_id is not None else None,
modifiedbyagent=uri_for_model('agent', ds.modifiedbyagent_id) if ds.modifiedbyagent_id is not None else None,
))
with transaction.atomic():
ds = models.Spdataset.objects.select_for_update().get(id=ds_id)
if request.method == "PUT":
attrs = json.load(request)
if 'name' in attrs:
ds.name = attrs['name']
if 'remarks' in attrs:
ds.remarks = attrs['remarks']
if 'visualorder' in attrs:
ds.visualorder = attrs['visualorder']
assert ds.visualorder is None or (isinstance(ds.visualorder, list) and len(ds.visualorder) == len(ds.columns))
if 'uploadplan' in attrs:
plan = attrs['uploadplan']
try:
validate(plan, upload_plan_schema.schema)
except ValidationError as e:
return http.HttpResponse(f"upload plan is invalid: {e}", status=400)
if ds.uploaderstatus != None:
return http.HttpResponse('dataset in use by uploader', status=409)
if ds.was_uploaded():
return http.HttpResponse('dataset has been uploaded. changing upload plan not allowed.', status=400)
new_cols = upload_plan_schema.parse_plan(request.specify_collection, plan).get_cols() - set(ds.columns)
if new_cols:
ncols = len(ds.columns)
ds.columns += list(new_cols)
for i, row in enumerate(ds.data):
ds.data[i] = row[:ncols] + [""]*len(new_cols) + row[ncols:]
ds.uploadplan = json.dumps(plan)
ds.rowresults = None
ds.uploadresult = None
ds.save()
return http.HttpResponse(status=204)
if request.method == "DELETE":
if ds.uploaderstatus != None:
return http.HttpResponse('dataset in use by uploader', status=409)
ds.delete()
return http.HttpResponse(status=204)
assert False, "Unexpected HTTP method"
@openapi(schema={
"get": {
"responses": {
"200": {
"description": "Successful response",
"content": {
"application/json": {
"schema": {
"type": "array",
"items": {
"type": "array",
"items": {
"type": "string",
"description": "Cell value"
}
},
"description":
"2d array of cells. NOTE: last column would contain " +
"disambiguation results as a JSON object or be an " +
"empty string"
}
}
}
}
}
},
'put': {
"requestBody": {
"required": True,
"description": "A JSON representation of a spreadsheet",
"content": {
"application/json": {
"schema": {
"type": "array",
"items": {
"type": "array",
"items": {
"type": "string",
"description": "Cell value"
}
},
"description":
"2d array of cells. NOTE: last column should contain " +
"disambiguation results as a JSON object or be an " +
"empty string"
}
}
}
},
"responses": {
"204": {"description": "Data set rows updated."},
"409": {"description": "Dataset in use by uploader"}
}
},
}, components=open_api_components)
@login_maybe_required
@apply_access_control
@require_http_methods(["GET", "PUT"])
@transaction.atomic
def rows(request, ds_id: str) -> http.HttpResponse:
"""Returns (GET) or sets (PUT) the row data for dataset <ds_id>."""
try:
ds = models.Spdataset.objects.select_for_update().get(id=ds_id)
except ObjectDoesNotExist:
return http.HttpResponseNotFound()
if ds.specifyuser != request.specify_user:
return http.HttpResponseForbidden()
if request.method == "PUT":
if ds.uploaderstatus is not None:
return http.HttpResponse('dataset in use by uploader.', status=409)
if ds.was_uploaded():
return http.HttpResponse('dataset has been uploaded. changing data not allowed.', status=400)
rows = regularize_rows(len(ds.columns), json.load(request))
ds.data = rows
ds.rowresults = None
ds.uploadresult = None
ds.modifiedbyagent = request.specify_user_agent
ds.save()
return http.HttpResponse(status=204)
else: # GET
return http.JsonResponse(ds.data, safe=False)
@openapi(schema={
'post': {
"responses": {
"200": {
"description": "Returns a GUID (job ID)",
"content": {
"text/plain": {
"schema": {
"type": "string",
"maxLength": 36,
"example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
}
}
}
},
"409": {"description": "Dataset in use by uploader"}
}
},
}, components=open_api_components)
@login_maybe_required
@apply_access_control
@require_POST
def upload(request, ds_id, no_commit: bool, allow_partial: bool) -> http.HttpResponse:
"Initiates an upload or validation of dataset <ds_id>."
ds = get_object_or_404(models.Spdataset, id=ds_id)
if ds.specifyuser != request.specify_user:
return http.HttpResponseForbidden()
if request.specify_user.usertype != 'Manager' and not no_commit:
return http.HttpResponseForbidden("Only manager users may upload data sets.")
with transaction.atomic():
ds = models.Spdataset.objects.select_for_update().get(id=ds_id)
if ds.uploaderstatus is not None:
return http.HttpResponse('dataset in use by uploader.', status=409)
if ds.collection != request.specify_collection:
return http.HttpResponse('dataset belongs to a different collection.', status=400)
if ds.was_uploaded():
return http.HttpResponse('dataset has already been uploaded.', status=400)
taskid = str(uuid4())
async_result = tasks.upload.apply_async([
request.specify_collection.id,
request.specify_user_agent.id,
ds_id,
no_commit,
allow_partial
], task_id=taskid)
ds.uploaderstatus = {
'operation': "validating" if no_commit else "uploading",
'taskid': taskid
}
ds.save(update_fields=['uploaderstatus'])
return http.JsonResponse(async_result.id, safe=False)
@openapi(schema={
'post': {
"responses": {
"200": {
"description": "Returns a GUID (job ID)",
"content": {
"text/plain": {
"schema": {
"type": "string",
"maxLength": 36,
"example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
}
}
}
},
"409": {"description": "Dataset in use by uploader"}
}
},
}, components=open_api_components)
@login_maybe_required
@apply_access_control
@require_POST
def unupload(request, ds_id: int) -> http.HttpResponse:
"Initiates an unupload of dataset <ds_id>."
ds = get_object_or_404(models.Spdataset, id=ds_id)
if ds.specifyuser != request.specify_user:
return http.HttpResponseForbidden()
if request.specify_user.usertype != 'Manager':
return http.HttpResponseForbidden("Only manager users may un-upload data sets.")
with transaction.atomic():
ds = models.Spdataset.objects.select_for_update().get(id=ds_id)
if ds.uploaderstatus is not None:
return http.HttpResponse('dataset in use by uploader.', status=409)
if not ds.was_uploaded():
return http.HttpResponse('dataset has not been uploaded.', status=400)
taskid = str(uuid4())
async_result = tasks.unupload.apply_async([ds.id, request.specify_user_agent.id], task_id=taskid)
ds.uploaderstatus = {
'operation': "unuploading",
'taskid': taskid
}
ds.save(update_fields=['uploaderstatus'])
return http.JsonResponse(async_result.id, safe=False)
# @login_maybe_required
@openapi(schema={
'get': {
"responses": {
"200": {
"description": "Data fetched successfully",
"content": {
"text/plain": {
"schema": {
"$ref": "#/components/schemas/wb_uploaderstatus",
}
}
}
},
}
},
}, components=open_api_components)
@require_GET
def status(request, ds_id: int) -> http.HttpResponse:
"Returns the uploader status for the dataset <ds_id>."
ds = get_object_or_404(models.Spdataset, id=ds_id)
# if (wb.specifyuser != request.specify_user):
# return http.HttpResponseForbidden()
if ds.uploaderstatus is None:
return http.JsonResponse(None, safe=False)
task = {
'uploading': tasks.upload,
'validating': tasks.upload,
'unuploading': tasks.unupload,
}[ds.uploaderstatus['operation']]
result = task.AsyncResult(ds.uploaderstatus['taskid'])
status = {
'uploaderstatus': ds.uploaderstatus,
'taskstatus': result.state,
'taskinfo': result.info if isinstance(result.info, dict) else repr(result.info)
}
return http.JsonResponse(status)
@openapi(schema={
'post': {
"responses": {
"200": {
"description": "Returns either 'ok' if a task is aborted " +
" or 'not running' if no task exists.",
"content": {
"text/plain": {
"schema": {
"type": "string",
"enum": [
"ok",
"not running"
]
}
}
}
},
"503": {
"description": "Indicates the process could not be terminated.",
"content": {
"text/plain": {
"schema": {
"type": "string",
"enum": [
'timed out waiting for requested task to terminate'
]
}
}
}
},
}
},
}, components=open_api_components)
@login_maybe_required
@apply_access_control
@require_POST
def abort(request, ds_id: int) -> http.HttpResponse:
"Aborts any ongoing uploader operation for dataset <ds_id>."
ds = get_object_or_404(models.Spdataset, id=ds_id)
if ds.specifyuser != request.specify_user:
return http.HttpResponseForbidden()
if ds.uploaderstatus is None:
return http.HttpResponse('not running', content_type='text/plain')
task = {
'uploading': tasks.upload,
'validating': tasks.upload,
'unuploading': tasks.unupload,
}[ds.uploaderstatus['operation']]
result = task.AsyncResult(ds.uploaderstatus['taskid']).revoke(terminate=True)
try:
models.Spdataset.objects.filter(id=ds_id).update(uploaderstatus=None)
except OperationalError as e:
if e.args[0] == 1205: # (1205, 'Lock wait timeout exceeded; try restarting transaction')
return http.HttpResponse(
'timed out waiting for requested task to terminate',
status=503,
content_type='text/plain'
)
else:
raise
return http.HttpResponse('ok', content_type='text/plain')
@openapi(schema={
'get': {
"responses": {
"200": {
"description": "Successful operation",
"content": {
"text/plain": {
"schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/wb_upload_results",
}
}
}
}
},
}
},
}, components=open_api_components)<|fim▁hole|> "Returns the detailed upload/validation results if any for the dataset <ds_id>."
ds = get_object_or_404(models.Spdataset, id=ds_id)
if ds.specifyuser != request.specify_user:
return http.HttpResponseForbidden()
if ds.rowresults is None:
return http.JsonResponse(None, safe=False)
results = json.loads(ds.rowresults)
if settings.DEBUG:
from .upload.upload_results_schema import schema
validate(results, schema)
return http.JsonResponse(results, safe=False)
@openapi(schema={
'post': {
"requestBody": {
"required": True,
"description": "A row to validate",
"content": {
"application/json": {
"schema": {
"type": "array",
"items": {
"type": "string",
"description": "Cell value"
},
}
}
}
},
"responses": {
"200": {
"description": "Returns upload results for a single row.",
"content": {
"text/plain": {
"schema": {
"type": "object",
"properties": {
"results": {
"$ref": "#/components/schemas/wb_upload_results"
},
},
'required': ['results'],
'additionalProperties': False
}
}
}
},
}
},
}, components=open_api_components)
@login_maybe_required
@apply_access_control
@require_POST
def validate_row(request, ds_id: str) -> http.HttpResponse:
"Validates a single row for dataset <ds_id>. The row data is passed as POST parameters."
ds = get_object_or_404(models.Spdataset, id=ds_id)
collection = request.specify_collection
bt, upload_plan = uploader.get_ds_upload_plan(collection, ds)
row = json.loads(request.body)
ncols = len(ds.columns)
rows = regularize_rows(ncols, [row])
if not rows:
return http.JsonResponse(None, safe=False)
row = rows[0]
da = uploader.get_disambiguation_from_row(ncols, row)
result = uploader.validate_row(collection, upload_plan, request.specify_user_agent.id, dict(zip(ds.columns, row)), da)
return http.JsonResponse({'result': result.to_json()})
@openapi(schema={
'get': {
"responses": {
"200": {
"description": "Returns the upload plan schema, like defined here: " +
"https://github.com/specify/specify7/blob/19ebde3d86ef4276799feb63acec275ebde9b2f4/specifyweb/workbench/upload/upload_plan_schema.py",
"content": {
"text/plain": {
"schema": {
"type": "object",
"properties": {},
}
}
}
},
}
},
}, components=open_api_components)
@require_GET
def up_schema(request) -> http.HttpResponse:
"Returns the upload plan schema."
return http.JsonResponse(upload_plan_schema.schema)
@openapi(schema={
'post': {
"requestBody": {
"required": True,
"description": "User ID of the new owner",
"content": {
"application/x-www-form-urlencoded": {
"schema": {
"type": "object",
"properties": {
"specifyuserid": {
"type": "number",
"description": "User ID of the new owner"
},
},
'required': ['specifyuserid'],
'additionalProperties': False
}
}
}
},
"responses": {
"204": {"description": "Dataset transfer succeeded."},
}
},
}, components=open_api_components)
@login_maybe_required
@apply_access_control
@require_POST
def transfer(request, ds_id: int) -> http.HttpResponse:
"""Transfer dataset's ownership to a different user."""
if 'specifyuserid' not in request.POST:
return http.HttpResponseBadRequest("missing parameter: specifyuserid")
ds = get_object_or_404(models.Spdataset, id=ds_id)
if ds.specifyuser != request.specify_user:
return http.HttpResponseForbidden()
Specifyuser = getattr(specify_models, 'Specifyuser')
try:
ds.specifyuser = Specifyuser.objects.get(id=request.POST['specifyuserid'])
except Specifyuser.DoesNotExist:
return http.HttpResponseBadRequest("the user does not exist")
Message.objects.create(user=ds.specifyuser, content=json.dumps({
'type': 'dataset-ownership-transferred',
'previous-owner-name': request.specify_user.name,
'dataset-name': ds.name,
'dataset-id': ds_id,
}))
ds.save()
return http.HttpResponse(status=204)<|fim▁end|> | @login_maybe_required
@apply_access_control
@require_GET
def upload_results(request, ds_id: int) -> http.HttpResponse: |
<|file_name|>containers.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# This file is part of PARPG.
# PARPG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# PARPG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
<|fim▁hole|>
"""Containes classes defining concrete container game objects like crates,
barrels, chests, etc."""
__all__ = ["WoodenCrate",]
from composed import ImmovableContainer
class WoodenCrate (ImmovableContainer):
def __init__ (self, ID, name = 'Wooden Crate', \
text = 'A battered crate', gfx = 'crate', **kwargs):
ImmovableContainer.__init__(self, ID = ID, name = name, gfx = gfx, \
text = text, **kwargs)<|fim▁end|> | # You should have received a copy of the GNU General Public License
# along with PARPG. If not, see <http://www.gnu.org/licenses/>. |
<|file_name|>RoboticMiddlewareImpl.java<|end_file_name|><|fim▁begin|>/**
*/
package org.eclipse.papyrus.RobotML.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.papyrus.RobotML.RobotMLPackage;
import org.eclipse.papyrus.RobotML.RoboticMiddleware;
import org.eclipse.papyrus.RobotML.RoboticMiddlewareKind;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Robotic Middleware</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.eclipse.papyrus.RobotML.impl.RoboticMiddlewareImpl#getKind <em>Kind</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class RoboticMiddlewareImpl extends PlatformImpl implements RoboticMiddleware {
/**
* The default value of the '{@link #getKind() <em>Kind</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getKind()
* @generated
* @ordered
*/
protected static final RoboticMiddlewareKind KIND_EDEFAULT = RoboticMiddlewareKind.RT_MAPS;
/**
* The cached value of the '{@link #getKind() <em>Kind</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getKind()
* @generated
* @ordered
*/
protected RoboticMiddlewareKind kind = KIND_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected RoboticMiddlewareImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return RobotMLPackage.Literals.ROBOTIC_MIDDLEWARE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RoboticMiddlewareKind getKind() {
return kind;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setKind(RoboticMiddlewareKind newKind) {
RoboticMiddlewareKind oldKind = kind;
kind = newKind == null ? KIND_EDEFAULT : newKind;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, RobotMLPackage.ROBOTIC_MIDDLEWARE__KIND, oldKind, kind));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case RobotMLPackage.ROBOTIC_MIDDLEWARE__KIND:
return getKind();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated<|fim▁hole|> case RobotMLPackage.ROBOTIC_MIDDLEWARE__KIND:
setKind((RoboticMiddlewareKind)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case RobotMLPackage.ROBOTIC_MIDDLEWARE__KIND:
setKind(KIND_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case RobotMLPackage.ROBOTIC_MIDDLEWARE__KIND:
return kind != KIND_EDEFAULT;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (kind: ");
result.append(kind);
result.append(')');
return result.toString();
}
} //RoboticMiddlewareImpl<|fim▁end|> | */
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) { |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
if __name__ == "__main__":
settings_name = "settings.local" if os.name == 'nt' else "settings.remote"<|fim▁hole|> os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_name)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)<|fim▁end|> | |
<|file_name|>InputUtils.java<|end_file_name|><|fim▁begin|>package com.buddycloud.utils;
import android.app.Activity;
import android.content.Context;
import android.view.inputmethod.InputMethodManager;
public class InputUtils {
public static void hideKeyboard(Activity activity) {
InputMethodManager imm = (InputMethodManager) activity.getSystemService(Context.INPUT_METHOD_SERVICE);
<|fim▁hole|> InputMethodManager imm = (InputMethodManager) activity.getSystemService(Context.INPUT_METHOD_SERVICE);
return imm.isActive();
}
}<|fim▁end|> | imm.hideSoftInputFromWindow(activity.getCurrentFocus().getWindowToken(), 0);
}
public static boolean isActive(Activity activity) {
|
<|file_name|>qquickrectangle.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the QtQuick module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qquickrectangle_p.h"
#include "qquickrectangle_p_p.h"
#include <QtQuick/private/qsgcontext_p.h>
#include <private/qsgadaptationlayer_p.h>
#include <QtGui/qpixmapcache.h>
#include <QtCore/qstringbuilder.h>
#include <QtCore/qmath.h>
#include <QtCore/qmetaobject.h>
QT_BEGIN_NAMESPACE
// XXX todo - should we change rectangle to draw entirely within its width/height?
/*!
\internal
\class QQuickPen<|fim▁hole|>
By default, the pen is invalid and nothing is drawn. You must either set a color (then the default
width is 1) or a width (then the default color is black).
A width of 1 indicates is a single-pixel line on the border of the item being painted.
Example:
\qml
Rectangle {
border.width: 2
border.color: "red"
}
\endqml
*/
QQuickPen::QQuickPen(QObject *parent)
: QObject(parent)
, m_width(1)
, m_color("#000000")
, m_aligned(true)
, m_valid(false)
{
}
qreal QQuickPen::width() const
{
return m_width;
}
void QQuickPen::setWidth(qreal w)
{
if (m_width == w && m_valid)
return;
m_width = w;
m_valid = m_color.alpha() && (qRound(m_width) >= 1 || (!m_aligned && m_width > 0));
emit penChanged();
}
QColor QQuickPen::color() const
{
return m_color;
}
void QQuickPen::setColor(const QColor &c)
{
m_color = c;
m_valid = m_color.alpha() && (qRound(m_width) >= 1 || (!m_aligned && m_width > 0));
emit penChanged();
}
bool QQuickPen::pixelAligned() const
{
return m_aligned;
}
void QQuickPen::setPixelAligned(bool aligned)
{
if (aligned == m_aligned)
return;
m_aligned = aligned;
m_valid = m_color.alpha() && (qRound(m_width) >= 1 || (!m_aligned && m_width > 0));
emit penChanged();
}
bool QQuickPen::isValid() const
{
return m_valid;
}
/*!
\qmltype GradientStop
\instantiates QQuickGradientStop
\inqmlmodule QtQuick
\ingroup qtquick-visual-utility
\brief Defines the color at a position in a Gradient
\sa Gradient
*/
/*!
\qmlproperty real QtQuick::GradientStop::position
\qmlproperty color QtQuick::GradientStop::color
The position and color properties describe the color used at a given
position in a gradient, as represented by a gradient stop.
The default position is 0.0; the default color is black.
\sa Gradient
*/
QQuickGradientStop::QQuickGradientStop(QObject *parent)
: QObject(parent)
{
}
qreal QQuickGradientStop::position() const
{
return m_position;
}
void QQuickGradientStop::setPosition(qreal position)
{
m_position = position; updateGradient();
}
QColor QQuickGradientStop::color() const
{
return m_color;
}
void QQuickGradientStop::setColor(const QColor &color)
{
m_color = color; updateGradient();
}
void QQuickGradientStop::updateGradient()
{
if (QQuickGradient *grad = qobject_cast<QQuickGradient*>(parent()))
grad->doUpdate();
}
/*!
\qmltype Gradient
\instantiates QQuickGradient
\inqmlmodule QtQuick
\ingroup qtquick-visual-utility
\brief Defines a gradient fill
A gradient is defined by two or more colors, which will be blended seamlessly.
The colors are specified as a set of GradientStop child items, each of
which defines a position on the gradient from 0.0 to 1.0 and a color.
The position of each GradientStop is defined by setting its
\l{GradientStop::}{position} property; its color is defined using its
\l{GradientStop::}{color} property.
A gradient without any gradient stops is rendered as a solid white fill.
Note that this item is not a visual representation of a gradient. To display a
gradient, use a visual item (like \l Rectangle) which supports the use
of gradients.
\section1 Example Usage
\div {class="float-right"}
\inlineimage qml-gradient.png
\enddiv
The following example declares a \l Rectangle item with a gradient starting
with red, blending to yellow at one third of the height of the rectangle,
and ending with green:
\snippet qml/gradient.qml code
\clearfloat
\section1 Performance and Limitations
Calculating gradients can be computationally expensive compared to the use
of solid color fills or images. Consider using gradients for static items
in a user interface.
In Qt 5.0, only vertical, linear gradients can be applied to items. If you
need to apply different orientations of gradients, a combination of rotation
and clipping will need to be applied to the relevant items. This can
introduce additional performance requirements for your application.
The use of animations involving gradient stops may not give the desired
result. An alternative way to animate gradients is to use pre-generated
images or SVG drawings containing gradients.
\sa GradientStop
*/
/*!
\qmlproperty list<GradientStop> QtQuick::Gradient::stops
\default
This property holds the gradient stops describing the gradient.
By default, this property contains an empty list.
To set the gradient stops, define them as children of the Gradient.
*/
QQuickGradient::QQuickGradient(QObject *parent)
: QObject(parent)
{
}
QQuickGradient::~QQuickGradient()
{
}
QQmlListProperty<QQuickGradientStop> QQuickGradient::stops()
{
return QQmlListProperty<QQuickGradientStop>(this, m_stops);
}
QGradientStops QQuickGradient::gradientStops() const
{
QGradientStops stops;
for (int i = 0; i < m_stops.size(); ++i){
int j = 0;
while (j < stops.size() && stops.at(j).first < m_stops[i]->position())
j++;
stops.insert(j, QGradientStop(m_stops.at(i)->position(), m_stops.at(i)->color()));
}
return stops;
}
void QQuickGradient::doUpdate()
{
emit updated();
}
int QQuickRectanglePrivate::doUpdateSlotIdx = -1;
/*!
\qmltype Rectangle
\instantiates QQuickRectangle
\inqmlmodule QtQuick
\inherits Item
\ingroup qtquick-visual
\brief Paints a filled rectangle with an optional border
Rectangle items are used to fill areas with solid color or gradients, and/or
to provide a rectangular border.
\section1 Appearance
Each Rectangle item is painted using either a solid fill color, specified using
the \l color property, or a gradient, defined using a Gradient type and set
using the \l gradient property. If both a color and a gradient are specified,
the gradient is used.
You can add an optional border to a rectangle with its own color and thickness
by setting the \l border.color and \l border.width properties. Set the color
to "transparent" to paint a border without a fill color.
You can also create rounded rectangles using the \l radius property. Since this
introduces curved edges to the corners of a rectangle, it may be appropriate to
set the \l Item::antialiasing property to improve its appearance.
\section1 Example Usage
\div {class="float-right"}
\inlineimage declarative-rect.png
\enddiv
The following example shows the effects of some of the common properties on a
Rectangle item, which in this case is used to create a square:
\snippet qml/rectangle/rectangle.qml document
\clearfloat
\section1 Performance
Using the \l Item::antialiasing property improves the appearance of a rounded rectangle at
the cost of rendering performance. You should consider unsetting this property
for rectangles in motion, and only set it when they are stationary.
\sa Image
*/
QQuickRectangle::QQuickRectangle(QQuickItem *parent)
: QQuickItem(*(new QQuickRectanglePrivate), parent)
{
setFlag(ItemHasContents);
}
void QQuickRectangle::doUpdate()
{
update();
}
/*!
\qmlproperty bool QtQuick::Rectangle::antialiasing
Used to decide if the Rectangle should use antialiasing or not.
\l {Antialiasing} provides information on the performance implications
of this property.
The default is true for Rectangles with a radius, and false otherwise.
*/
/*!
\qmlpropertygroup QtQuick::Rectangle::border
\qmlproperty int QtQuick::Rectangle::border.width
\qmlproperty color QtQuick::Rectangle::border.color
The width and color used to draw the border of the rectangle.
A width of 1 creates a thin line. For no line, use a width of 0 or a transparent color.
\note The width of the rectangle's border does not affect the geometry of the
rectangle itself or its position relative to other items if anchors are used.
The border is rendered within the rectangle's boundaries.
*/
QQuickPen *QQuickRectangle::border()
{
Q_D(QQuickRectangle);
return d->getPen();
}
/*!
\qmlproperty Gradient QtQuick::Rectangle::gradient
The gradient to use to fill the rectangle.
This property allows for the construction of simple vertical gradients.
Other gradients may by formed by adding rotation to the rectangle.
\div {class="float-left"}
\inlineimage declarative-rect_gradient.png
\enddiv
\snippet qml/rectangle/rectangle-gradient.qml rectangles
\clearfloat
If both a gradient and a color are specified, the gradient will be used.
\sa Gradient, color
*/
QQuickGradient *QQuickRectangle::gradient() const
{
Q_D(const QQuickRectangle);
return d->gradient;
}
void QQuickRectangle::setGradient(QQuickGradient *gradient)
{
Q_D(QQuickRectangle);
if (d->gradient == gradient)
return;
static int updatedSignalIdx = -1;
if (updatedSignalIdx < 0)
updatedSignalIdx = QMetaMethod::fromSignal(&QQuickGradient::updated).methodIndex();
if (d->doUpdateSlotIdx < 0)
d->doUpdateSlotIdx = QQuickRectangle::staticMetaObject.indexOfSlot("doUpdate()");
if (d->gradient)
QMetaObject::disconnect(d->gradient, updatedSignalIdx, this, d->doUpdateSlotIdx);
d->gradient = gradient;
if (d->gradient)
QMetaObject::connect(d->gradient, updatedSignalIdx, this, d->doUpdateSlotIdx);
update();
}
void QQuickRectangle::resetGradient()
{
setGradient(0);
}
/*!
\qmlproperty real QtQuick::Rectangle::radius
This property holds the corner radius used to draw a rounded rectangle.
If radius is non-zero, the rectangle will be painted as a rounded rectangle, otherwise it will be
painted as a normal rectangle. The same radius is used by all 4 corners; there is currently
no way to specify different radii for different corners.
*/
qreal QQuickRectangle::radius() const
{
Q_D(const QQuickRectangle);
return d->radius;
}
void QQuickRectangle::setRadius(qreal radius)
{
Q_D(QQuickRectangle);
if (d->radius == radius)
return;
d->radius = radius;
d->setImplicitAntialiasing(radius != 0.0);
update();
emit radiusChanged();
}
/*!
\qmlproperty color QtQuick::Rectangle::color
This property holds the color used to fill the rectangle.
The default color is white.
\div {class="float-right"}
\inlineimage rect-color.png
\enddiv
The following example shows rectangles with colors specified
using hexadecimal and named color notation:
\snippet qml/rectangle/rectangle-colors.qml rectangles
\clearfloat
If both a gradient and a color are specified, the gradient will be used.
\sa gradient
*/
QColor QQuickRectangle::color() const
{
Q_D(const QQuickRectangle);
return d->color;
}
void QQuickRectangle::setColor(const QColor &c)
{
Q_D(QQuickRectangle);
if (d->color == c)
return;
d->color = c;
update();
emit colorChanged();
}
QSGNode *QQuickRectangle::updatePaintNode(QSGNode *oldNode, UpdatePaintNodeData *data)
{
Q_UNUSED(data);
Q_D(QQuickRectangle);
if (width() <= 0 || height() <= 0
|| (d->color.alpha() == 0 && (!d->pen || d->pen->width() == 0 || d->pen->color().alpha() == 0))) {
delete oldNode;
return 0;
}
QSGRectangleNode *rectangle = static_cast<QSGRectangleNode *>(oldNode);
if (!rectangle) rectangle = d->sceneGraphContext()->createRectangleNode();
rectangle->setRect(QRectF(0, 0, width(), height()));
rectangle->setColor(d->color);
if (d->pen && d->pen->isValid()) {
rectangle->setPenColor(d->pen->color());
rectangle->setPenWidth(d->pen->width());
rectangle->setAligned(d->pen->pixelAligned());
} else {
rectangle->setPenWidth(0);
}
rectangle->setRadius(d->radius);
rectangle->setAntialiasing(antialiasing());
QGradientStops stops;
if (d->gradient) {
stops = d->gradient->gradientStops();
}
rectangle->setGradientStops(stops);
rectangle->update();
return rectangle;
}
QT_END_NAMESPACE<|fim▁end|> | \brief For specifying a pen used for drawing rectangle borders on a QQuickView |
<|file_name|>ExifToolsScan.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
import os
import sys
import subprocess
import re
from common import list2cmdline
from common import sshexec
from common import SSH
subprocess.list2cmdline = list2cmdline
__author__ = "Drew Bonasera"
__license__ = "MPL 2.0"
TYPE = "Metadata"
NAME = "ExifTool"
# These are overwritten by the config file
HOST = ("MultiScanner", 22, "User")
KEY = os.path.join(os.path.realpath(os.path.dirname(sys.argv[0])), 'etc', 'id_rsa')
PATHREPLACE = "X:\\"
# Entries to be removed from the final results
REMOVEENTRY = ["ExifTool Version Number", "File Name", "Directory", "File Modification Date/Time",
"File Creation Date/Time", "File Access Date/Time", "File Permissions"]
DEFAULTCONF = {
'cmdline': ["-t"],
"path": "C:\\exiftool.exe",
"key": KEY,
'host': HOST,
"replacement path": PATHREPLACE,
'remove-entry': REMOVEENTRY,
'ENABLED': True
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if os.path.isfile(conf["path"]):
if 'replacement path' in conf:
del conf['replacement path']
return True
if SSH:
return True
else:
return False
def scan(filelist, conf=DEFAULTCONF):
if os.path.isfile(conf["path"]):
local = True
elif SSH:
local = False
cmdline = conf["cmdline"]
results = []
output = ""
cmd = cmdline
for item in filelist:
cmd.append('"' + item + '" ')
cmd.insert(0, conf["path"])
host, port, user = conf["host"]
if local:
try:
output = subprocess.check_output(cmd)<|fim▁hole|> output = e.output
e.returncode
else:
try:
output = sshexec(host, list2cmdline(cmd), port=port, username=user, key_filename=conf["key"])
except Exception as e:
# TODO: log exception
return None
output = output.decode("utf-8", errors="ignore")
output = output.replace('\r', '')
reader = output.split('\n')
data = {}
fname = filelist[0]
for row in reader:
row = row.split('\t')
try:
if row[0].startswith('======== '):
if data:
results.append((fname, data))
data = {}
fname = row[0][9:]
if re.match('[A-Za-z]:/', fname):
# why exif tools, whyyyyyyyy
fname = fname.replace('/', '\\')
continue
except Exception as e:
# TODO: log exception
pass
try:
if row[0] not in conf['remove-entry']:
data[row[0]] = row[1]
except Exception as e:
# TODO: log exception
continue
if data:
results.append((fname, data))
data = {}
reader = None
# Gather metadata
metadata = {}
output = output.replace('\r', '')
reader = output.split('\n')
for row in reader:
row = row.split('\t')
if row and row[0] == "ExifTool Version Number":
metadata["Program version"] = row[1]
break
metadata["Name"] = NAME
metadata["Type"] = TYPE
return (results, metadata)<|fim▁end|> | except subprocess.CalledProcessError as e: |
<|file_name|>events.py<|end_file_name|><|fim▁begin|># sqlalchemy/events.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Core event interfaces."""
from . import event, exc
from .pool import Pool
from .engine import Connectable, Engine, Dialect
from .sql.base import SchemaEventTarget
class DDLEvents(event.Events):
"""
Define event listeners for schema objects,
that is, :class:`.SchemaItem` and other :class:`.SchemaEventTarget`
subclasses, including :class:`.MetaData`, :class:`.Table`,
:class:`.Column`.
:class:`.MetaData` and :class:`.Table` support events
specifically regarding when CREATE and DROP
DDL is emitted to the database.
Attachment events are also provided to customize
behavior whenever a child schema element is associated
with a parent, such as, when a :class:`.Column` is associated
with its :class:`.Table`, when a :class:`.ForeignKeyConstraint`
is associated with a :class:`.Table`, etc.
Example using the ``after_create`` event::
from sqlalchemy import event
from sqlalchemy import Table, Column, Metadata, Integer
m = MetaData()
some_table = Table('some_table', m, Column('data', Integer))
def after_create(target, connection, **kw):
connection.execute("ALTER TABLE %s SET name=foo_%s" %
(target.name, target.name))
event.listen(some_table, "after_create", after_create)
DDL events integrate closely with the
:class:`.DDL` class and the :class:`.DDLElement` hierarchy
of DDL clause constructs, which are themselves appropriate
as listener callables::
from sqlalchemy import DDL
event.listen(
some_table,
"after_create",
DDL("ALTER TABLE %(table)s SET name=foo_%(table)s")
)
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
See also:
:ref:`event_toplevel`
:class:`.DDLElement`
:class:`.DDL`
:ref:`schema_ddl_sequences`
"""
_target_class_doc = "SomeSchemaClassOrObject"
_dispatch_target = SchemaEventTarget
def before_create(self, target, connection, **kw):
"""Called before CREATE statements are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
CREATE statement or statements will be emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
"""
def after_create(self, target, connection, **kw):
"""Called after CREATE statements are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
CREATE statement or statements have been emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
"""
def before_drop(self, target, connection, **kw):
"""Called before DROP statements are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
DROP statement or statements will be emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
"""
def after_drop(self, target, connection, **kw):
"""Called after DROP statements are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
DROP statement or statements have been emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
"""
def before_parent_attach(self, target, parent):
"""Called before a :class:`.SchemaItem` is associated with
a parent :class:`.SchemaItem`.
:param target: the target object
:param parent: the parent to which the target is being attached.
:func:`.event.listen` also accepts a modifier for this event:
:param propagate=False: When True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
def after_parent_attach(self, target, parent):
"""Called after a :class:`.SchemaItem` is associated with
a parent :class:`.SchemaItem`.
:param target: the target object
:param parent: the parent to which the target is being attached.
:func:`.event.listen` also accepts a modifier for this event:
:param propagate=False: When True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
def column_reflect(self, inspector, table, column_info):
"""Called for each unit of 'column info' retrieved when
a :class:`.Table` is being reflected.
The dictionary of column information as returned by the
dialect is passed, and can be modified. The dictionary
is that returned in each element of the list returned
by :meth:`.reflection.Inspector.get_columns`.
The event is called before any action is taken against
this dictionary, and the contents can be modified.
The :class:`.Column` specific arguments ``info``, ``key``,
and ``quote`` can also be added to the dictionary and
will be passed to the constructor of :class:`.Column`.
Note that this event is only meaningful if either
associated with the :class:`.Table` class across the
board, e.g.::
from sqlalchemy.schema import Table
from sqlalchemy import event
def listen_for_reflect(inspector, table, column_info):
"receive a column_reflect event"
# ...
event.listen(
Table,
'column_reflect',
listen_for_reflect)
...or with a specific :class:`.Table` instance using
the ``listeners`` argument::
def listen_for_reflect(inspector, table, column_info):
"receive a column_reflect event"
# ...
t = Table(
'sometable',
autoload=True,
listeners=[
('column_reflect', listen_for_reflect)
])
This because the reflection process initiated by ``autoload=True``
completes within the scope of the constructor for :class:`.Table`.
"""
class PoolEvents(event.Events):
"""Available events for :class:`.Pool`.
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
e.g.::
from sqlalchemy import event
def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
"handle an on checkout event"
event.listen(Pool, 'checkout', my_on_checkout)
In addition to accepting the :class:`.Pool` class and
:class:`.Pool` instances, :class:`.PoolEvents` also accepts
:class:`.Engine` objects and the :class:`.Engine` class as
targets, which will be resolved to the ``.pool`` attribute of the
given engine or the :class:`.Pool` class::
engine = create_engine("postgresql://scott:tiger@localhost/test")
# will associate with engine.pool
event.listen(engine, 'checkout', my_on_checkout)
"""
_target_class_doc = "SomeEngineOrPool"
_dispatch_target = Pool
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
if issubclass(target, Engine):
return Pool
elif issubclass(target, Pool):
return target
elif isinstance(target, Engine):
return target.pool
else:
return target
def connect(self, dbapi_connection, connection_record):
"""Called at the moment a particular DBAPI connection is first
created for a given :class:`.Pool`.
This event allows one to capture the point directly after which
the DBAPI module-level ``.connect()`` method has been used in order
to produce a new DBAPI connection.
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def first_connect(self, dbapi_connection, connection_record):
"""Called exactly once for the first time a DBAPI connection is
checked out from a particular :class:`.Pool`.
The rationale for :meth:`.PoolEvents.first_connect` is to determine
information about a particular series of database connections based
on the settings used for all connections. Since a particular
:class:`.Pool` refers to a single "creator" function (which in terms
of a :class:`.Engine` refers to the URL and connection options used),
it is typically valid to make observations about a single connection
that can be safely assumed to be valid about all subsequent
connections, such as the database version, the server and client
encoding settings, collation settings, and many others.
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def checkout(self, dbapi_connection, connection_record, connection_proxy):
"""Called when a connection is retrieved from the Pool.
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
:param connection_proxy: the :class:`._ConnectionFairy` object which
will proxy the public interface of the DBAPI connection for the
lifespan of the checkout.
If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current
connection will be disposed and a fresh connection retrieved.
Processing of all checkout listeners will abort and restart
using the new connection.
.. seealso:: :meth:`.ConnectionEvents.engine_connect` - a similar event
which occurs upon creation of a new :class:`.Connection`.
"""
def checkin(self, dbapi_connection, connection_record):
"""Called when a connection returns to the pool.
Note that the connection may be closed, and may be None if the
connection has been invalidated. ``checkin`` will not be called
for detached connections. (They do not return to the pool.)
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def reset(self, dbapi_connnection, connection_record):
"""Called before the "reset" action occurs for a pooled connection.
This event represents
when the ``rollback()`` method is called on the DBAPI connection
before it is returned to the pool. The behavior of "reset" can
be controlled, including disabled, using the ``reset_on_return``
pool argument.
The :meth:`.PoolEvents.reset` event is usually followed by the
:meth:`.PoolEvents.checkin` event is called, except in those
cases where the connection is discarded immediately after reset.
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
.. versionadded:: 0.8
.. seealso::
:meth:`.ConnectionEvents.rollback`
:meth:`.ConnectionEvents.commit`
"""
def invalidate(self, dbapi_connection, connection_record, exception):
"""Called when a DBAPI connection is to be "invalidated".
This event is called any time the :meth:`._ConnectionRecord.invalidate`
method is invoked, either from API usage or via "auto-invalidation".
The event occurs before a final attempt to call ``.close()`` on the
connection occurs.
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
:param exception: the exception object corresponding to the reason
for this invalidation, if any. May be ``None``.
.. versionadded:: 0.9.2 Added support for connection invalidation
listening.
.. seealso::
:ref:`pool_connection_invalidation`
"""
class ConnectionEvents(event.Events):
"""Available events for :class:`.Connectable`, which includes
:class:`.Connection` and :class:`.Engine`.
The methods here define the name of an event as well as the names of
members that are passed to listener functions.
An event listener can be associated with any :class:`.Connectable`
class or instance, such as an :class:`.Engine`, e.g.::
from sqlalchemy import event, create_engine
def before_cursor_execute(conn, cursor, statement, parameters, context,
executemany):
log.info("Received statement: %s" % statement)
engine = create_engine('postgresql://scott:tiger@localhost/test')
event.listen(engine, "before_cursor_execute", before_cursor_execute)
or with a specific :class:`.Connection`::
with engine.begin() as conn:
@event.listens_for(conn, 'before_cursor_execute')
def before_cursor_execute(conn, cursor, statement, parameters,
context, executemany):
log.info("Received statement: %s" % statement)
When the methods are called with a `statement` parameter, such as in
:meth:`.after_cursor_execute`, :meth:`.before_cursor_execute` and
:meth:`.dbapi_error`, the statement is the exact SQL string that was
prepared for transmission to the DBAPI ``cursor`` in the connection's
:class:`.Dialect`.
The :meth:`.before_execute` and :meth:`.before_cursor_execute`
events can also be established with the ``retval=True`` flag, which
allows modification of the statement and parameters to be sent
to the database. The :meth:`.before_cursor_execute` event is
particularly useful here to add ad-hoc string transformations, such
as comments, to all executions::
from sqlalchemy.engine import Engine
from sqlalchemy import event
@event.listens_for(Engine, "before_cursor_execute", retval=True)
def comment_sql_calls(conn, cursor, statement, parameters,
context, executemany):
statement = statement + " -- some comment"
return statement, parameters
.. note:: :class:`.ConnectionEvents` can be established on any
combination of :class:`.Engine`, :class:`.Connection`, as well
as instances of each of those classes. Events across all
four scopes will fire off for a given instance of
:class:`.Connection`. However, for performance reasons, the
:class:`.Connection` object determines at instantiation time
whether or not its parent :class:`.Engine` has event listeners
established. Event listeners added to the :class:`.Engine`
class or to an instance of :class:`.Engine` *after* the instantiation
of a dependent :class:`.Connection` instance will usually
*not* be available on that :class:`.Connection` instance. The newly
added listeners will instead take effect for :class:`.Connection`
instances created subsequent to those event listeners being
established on the parent :class:`.Engine` class or instance.
:param retval=False: Applies to the :meth:`.before_execute` and
:meth:`.before_cursor_execute` events only. When True, the
user-defined event function must have a return value, which
is a tuple of parameters that replace the given statement
and parameters. See those methods for a description of
specific return arguments.
.. versionchanged:: 0.8 :class:`.ConnectionEvents` can now be associated
with any :class:`.Connectable` including :class:`.Connection`,
in addition to the existing support for :class:`.Engine`.
"""
_target_class_doc = "SomeEngine"
_dispatch_target = Connectable
@classmethod
def _listen(cls, event_key, retval=False):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, \
event_key._listen_fn
target._has_events = True
if not retval:
if identifier == 'before_execute':
orig_fn = fn
def wrap_before_execute(conn, clauseelement,
multiparams, params):
orig_fn(conn, clauseelement, multiparams, params)
return clauseelement, multiparams, params
fn = wrap_before_execute
elif identifier == 'before_cursor_execute':
orig_fn = fn
def wrap_before_cursor_execute(conn, cursor, statement,
parameters, context,
executemany):
orig_fn(conn, cursor, statement,
parameters, context, executemany)
return statement, parameters
fn = wrap_before_cursor_execute
elif retval and \
identifier not in ('before_execute',
'before_cursor_execute', 'handle_error'):
raise exc.ArgumentError(
"Only the 'before_execute', "
"'before_cursor_execute' and 'handle_error' engine "
"event listeners accept the 'retval=True' "
"argument.")
event_key.with_wrapper(fn).base_listen()
def before_execute(self, conn, clauseelement, multiparams, params):
"""Intercept high level execute() events, receiving uncompiled
SQL constructs and other objects prior to rendering into SQL.
This event is good for debugging SQL compilation issues as well
as early manipulation of the parameters being sent to the database,
as the parameter lists will be in a consistent format here.
This event can be optionally established with the ``retval=True``
flag. The ``clauseelement``, ``multiparams``, and ``params``
arguments should be returned as a three-tuple in this case::
@event.listens_for(Engine, "before_execute", retval=True)
def before_execute(conn, conn, clauseelement, multiparams, params):
# do something with clauseelement, multiparams, params
return clauseelement, multiparams, params
:param conn: :class:`.Connection` object
:param clauseelement: SQL expression construct, :class:`.Compiled`
instance, or string statement passed to :meth:`.Connection.execute`.
:param multiparams: Multiple parameter sets, a list of dictionaries.
:param params: Single parameter set, a single dictionary.
See also:
:meth:`.before_cursor_execute`
"""
def after_execute(self, conn, clauseelement, multiparams, params, result):
"""Intercept high level execute() events after execute.
:param conn: :class:`.Connection` object
:param clauseelement: SQL expression construct, :class:`.Compiled`
instance, or string statement passed to :meth:`.Connection.execute`.
:param multiparams: Multiple parameter sets, a list of dictionaries.
:param params: Single parameter set, a single dictionary.
:param result: :class:`.ResultProxy` generated by the execution.
"""
def before_cursor_execute(self, conn, cursor, statement,
parameters, context, executemany):
"""Intercept low-level cursor execute() events before execution,
receiving the string SQL statement and DBAPI-specific parameter list to
be invoked against a cursor.
This event is a good choice for logging as well as late modifications
to the SQL string. It's less ideal for parameter modifications except
for those which are specific to a target backend.
This event can be optionally established with the ``retval=True``
flag. The ``statement`` and ``parameters`` arguments should be
returned as a two-tuple in this case::
@event.listens_for(Engine, "before_cursor_execute", retval=True)
def before_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
# do something with statement, parameters
return statement, parameters
See the example at :class:`.ConnectionEvents`.
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object
:param statement: string SQL statement, as to be passed to the DBAPI
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
:param context: :class:`.ExecutionContext` object in use. May
be ``None``.
:param executemany: boolean, if ``True``, this is an ``executemany()``
call, if ``False``, this is an ``execute()`` call.
See also:
:meth:`.before_execute`
:meth:`.after_cursor_execute`
"""
def after_cursor_execute(self, conn, cursor, statement,
parameters, context, executemany):
"""Intercept low-level cursor execute() events after execution.
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object. Will have results pending
if the statement was a SELECT, but these should not be consumed
as they will be needed by the :class:`.ResultProxy`.
:param statement: string SQL statement, as passed to the DBAPI
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
:param context: :class:`.ExecutionContext` object in use. May
be ``None``.
:param executemany: boolean, if ``True``, this is an ``executemany()``
call, if ``False``, this is an ``execute()`` call.
"""
def dbapi_error(self, conn, cursor, statement, parameters,
context, exception):
"""Intercept a raw DBAPI error.
This event is called with the DBAPI exception instance
received from the DBAPI itself, *before* SQLAlchemy wraps the
exception with it's own exception wrappers, and before any
other operations are performed on the DBAPI cursor; the
existing transaction remains in effect as well as any state
on the cursor.
The use case here is to inject low-level exception handling
into an :class:`.Engine`, typically for logging and
debugging purposes.
.. warning::
Code should **not** modify
any state or throw any exceptions here as this will
interfere with SQLAlchemy's cleanup and error handling
routines. For exception modification, please refer to the
new :meth:`.ConnectionEvents.handle_error` event.
Subsequent to this hook, SQLAlchemy may attempt any
number of operations on the connection/cursor, including
closing the cursor, rolling back of the transaction in the
case of connectionless execution, and disposing of the entire
connection pool if a "disconnect" was detected. The
exception is then wrapped in a SQLAlchemy DBAPI exception
wrapper and re-thrown.
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object
:param statement: string SQL statement, as passed to the DBAPI
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
:param context: :class:`.ExecutionContext` object in use. May
be ``None``.
:param exception: The **unwrapped** exception emitted directly from the
DBAPI. The class here is specific to the DBAPI module in use.
.. deprecated:: 0.9.7 - replaced by
:meth:`.ConnectionEvents.handle_error`
"""
def handle_error(self, exception_context):
"""Intercept all exceptions processed by the :class:`.Connection`.
This includes all exceptions emitted by the DBAPI as well as
within SQLAlchemy's statement invocation process, including
encoding errors and other statement validation errors. Other areas
in which the event is invoked include transaction begin and end,
result row fetching, cursor creation.
Note that :meth:`.handle_error` may support new kinds of exceptions
and new calling scenarios at *any time*. Code which uses this
event must expect new calling patterns to be present in minor
releases.
To support the wide variety of members that correspond to an exception,
as well as to allow extensibility of the event without backwards
incompatibility, the sole argument received is an instance of
:class:`.ExceptionContext`. This object contains data members
representing detail about the exception.
Use cases supported by this hook include:
* read-only, low-level exception handling for logging and
debugging purposes
* exception re-writing
The hook is called while the cursor from the failed operation
(if any) is still open and accessible. Special cleanup operations
can be called on this cursor; SQLAlchemy will attempt to close
this cursor subsequent to this hook being invoked. If the connection
is in "autocommit" mode, the transaction also remains open within
the scope of this hook; the rollback of the per-statement transaction
also occurs after the hook is called.
The user-defined event handler has two options for replacing
the SQLAlchemy-constructed exception into one that is user
defined. It can either raise this new exception directly, in
which case all further event listeners are bypassed and the
exception will be raised, after appropriate cleanup as taken
place::
@event.listens_for(Engine, "handle_error")
def handle_exception(context):
if isinstance(context.original_exception,
psycopg2.OperationalError) and \\
"failed" in str(context.original_exception):
raise MySpecialException("failed operation")
Alternatively, a "chained" style of event handling can be
used, by configuring the handler with the ``retval=True``
modifier and returning the new exception instance from the
function. In this case, event handling will continue onto the
next handler. The "chained" exception is available using
:attr:`.ExceptionContext.chained_exception`::
@event.listens_for(Engine, "handle_error", retval=True)
def handle_exception(context):
if context.chained_exception is not None and \\
"special" in context.chained_exception.message:
return MySpecialException("failed",
cause=context.chained_exception)
Handlers that return ``None`` may remain within this chain; the
last non-``None`` return value is the one that continues to be
passed to the next handler.
When a custom exception is raised or returned, SQLAlchemy raises
this new exception as-is, it is not wrapped by any SQLAlchemy
object. If the exception is not a subclass of
:class:`sqlalchemy.exc.StatementError`,
certain features may not be available; currently this includes
the ORM's feature of adding a detail hint about "autoflush" to
exceptions raised within the autoflush process.
:param context: an :class:`.ExceptionContext` object. See this
class for details on all available members.
.. versionadded:: 0.9.7 Added the
:meth:`.ConnectionEvents.handle_error` hook.
"""
def engine_connect(self, conn, branch):
"""Intercept the creation of a new :class:`.Connection`.
This event is called typically as the direct result of calling
the :meth:`.Engine.connect` method.
It differs from the :meth:`.PoolEvents.connect` method, which
refers to the actual connection to a database at the DBAPI level;
a DBAPI connection may be pooled and reused for many operations.
In contrast, this event refers only to the production of a higher level
:class:`.Connection` wrapper around such a DBAPI connection.
It also differs from the :meth:`.PoolEvents.checkout` event
in that it is specific to the :class:`.Connection` object, not the
DBAPI connection that :meth:`.PoolEvents.checkout` deals with, although
this DBAPI connection is available here via the
:attr:`.Connection.connection` attribute. But note there can in fact
be multiple :meth:`.PoolEvents.checkout` events within the lifespan
of a single :class:`.Connection` object, if that :class:`.Connection`
is invalidated and re-established. There can also be multiple
:class:`.Connection` objects generated for the same already-checked-out
DBAPI connection, in the case that a "branch" of a :class:`.Connection`
is produced.
:param conn: :class:`.Connection` object.
:param branch: if True, this is a "branch" of an existing
:class:`.Connection`. A branch is generated within the course
of a statement execution to invoke supplemental statements, most
typically to pre-execute a SELECT of a default value for the purposes
of an INSERT statement.
.. versionadded:: 0.9.0
.. seealso::
:meth:`.PoolEvents.checkout` the lower-level pool checkout event
for an individual DBAPI connection
:meth:`.ConnectionEvents.set_connection_execution_options` - a copy
of a :class:`.Connection` is also made when the
:meth:`.Connection.execution_options` method is called.
"""
def set_connection_execution_options(self, conn, opts):
"""Intercept when the :meth:`.Connection.execution_options`
method is called.
This method is called after the new :class:`.Connection` has been
produced, with the newly updated execution options collection, but
before the :class:`.Dialect` has acted upon any of those new options.
Note that this method is not called when a new :class:`.Connection`
is produced which is inheriting execution options from its parent
:class:`.Engine`; to intercept this condition, use the
:meth:`.ConnectionEvents.engine_connect` event.
:param conn: The newly copied :class:`.Connection` object
:param opts: dictionary of options that were passed to the
:meth:`.Connection.execution_options` method.
.. versionadded:: 0.9.0
.. seealso::
:meth:`.ConnectionEvents.set_engine_execution_options` - event
which is called when :meth:`.Engine.execution_options` is called.
"""
def set_engine_execution_options(self, engine, opts):
"""Intercept when the :meth:`.Engine.execution_options`
method is called.
The :meth:`.Engine.execution_options` method produces a shallow
copy of the :class:`.Engine` which stores the new options. That new
:class:`.Engine` is passed here. A particular application of this
method is to add a :meth:`.ConnectionEvents.engine_connect` event
handler to the given :class:`.Engine` which will perform some per-
:class:`.Connection` task specific to these execution options.
:param conn: The newly copied :class:`.Engine` object
:param opts: dictionary of options that were passed to the
:meth:`.Connection.execution_options` method.
.. versionadded:: 0.9.0
.. seealso::
:meth:`.ConnectionEvents.set_connection_execution_options` - event
which is called when :meth:`.Connection.execution_options` is
called.
"""
def begin(self, conn):
"""Intercept begin() events.
:param conn: :class:`.Connection` object
"""
def rollback(self, conn):
"""Intercept rollback() events, as initiated by a
:class:`.Transaction`.
Note that the :class:`.Pool` also "auto-rolls back"
a DBAPI connection upon checkin, if the ``reset_on_return``
flag is set to its default value of ``'rollback'``.
To intercept this
rollback, use the :meth:`.PoolEvents.reset` hook.
:param conn: :class:`.Connection` object
.. seealso::
:meth:`.PoolEvents.reset`
"""
def commit(self, conn):
"""Intercept commit() events, as initiated by a
:class:`.Transaction`.
Note that the :class:`.Pool` may also "auto-commit"
a DBAPI connection upon checkin, if the ``reset_on_return``
flag is set to the value ``'commit'``. To intercept this
commit, use the :meth:`.PoolEvents.reset` hook.
:param conn: :class:`.Connection` object
"""
def savepoint(self, conn, name):
"""Intercept savepoint() events.
:param conn: :class:`.Connection` object
:param name: specified name used for the savepoint.
"""
def rollback_savepoint(self, conn, name, context):
"""Intercept rollback_savepoint() events.
:param conn: :class:`.Connection` object
:param name: specified name used for the savepoint.
:param context: :class:`.ExecutionContext` in use. May be ``None``.
"""
def release_savepoint(self, conn, name, context):
"""Intercept release_savepoint() events.
:param conn: :class:`.Connection` object
:param name: specified name used for the savepoint.
:param context: :class:`.ExecutionContext` in use. May be ``None``.
"""
def begin_twophase(self, conn, xid):
"""Intercept begin_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
"""
def prepare_twophase(self, conn, xid):
"""Intercept prepare_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
"""
def rollback_twophase(self, conn, xid, is_prepared):
"""Intercept rollback_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
:param is_prepared: boolean, indicates if
:meth:`.TwoPhaseTransaction.prepare` was called.
"""
def commit_twophase(self, conn, xid, is_prepared):
"""Intercept commit_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
:param is_prepared: boolean, indicates if
:meth:`.TwoPhaseTransaction.prepare` was called.
"""
class DialectEvents(event.Events):
"""event interface for execution-replacement functions.
These events allow direct instrumentation and replacement
of key dialect functions which interact with the DBAPI.
.. note::
:class:`.DialectEvents` hooks should be considered **semi-public**
and experimental.
These hooks are not for general use and are only for those situations
where intricate re-statement of DBAPI mechanics must be injected onto
an existing dialect. For general-use statement-interception events,
please use the :class:`.ConnectionEvents` interface.
.. seealso::
:meth:`.ConnectionEvents.before_cursor_execute`
:meth:`.ConnectionEvents.before_execute`
:meth:`.ConnectionEvents.after_cursor_execute`
:meth:`.ConnectionEvents.after_execute`
.. versionadded:: 0.9.4
"""
_target_class_doc = "SomeEngine"
_dispatch_target = Dialect
@classmethod
def _listen(cls, event_key, retval=False):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, event_key.fn
target._has_events = True
event_key.base_listen()
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
if issubclass(target, Engine):
return Dialect
elif issubclass(target, Dialect):
return target
elif isinstance(target, Engine):
return target.dialect
else:
return target
def do_executemany(self, cursor, statement, parameters, context):
"""Receive a cursor to have executemany() called.
Return the value True to halt further events from invoking,
and to indicate that the cursor execution has already taken
place within the event handler.
<|fim▁hole|>
Return the value True to halt further events from invoking,
and to indicate that the cursor execution has already taken
place within the event handler.
"""
def do_execute(self, cursor, statement, parameters, context):
"""Receive a cursor to have execute() called.
Return the value True to halt further events from invoking,
and to indicate that the cursor execution has already taken
place within the event handler.
"""<|fim▁end|> | """
def do_execute_no_params(self, cursor, statement, context):
"""Receive a cursor to have execute() with no parameters called. |
<|file_name|>extrema_pickle.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Fri July 6 11:04:03 2015
@author: boland
"""
import os
import datetime
import numpy as np
import multiprocessing as mp
import matplotlib.pyplot as plt
from scipy import signal
from obspy import read
from scipy.signal import argrelextrema
from info_dataless import locs_from_dataless
from scipy import interpolate
from matplotlib.colors import LogNorm
import pickle
import fiona
from shapely import geometry
from shapely.geometry import asPolygon, Polygon
from math import sqrt, radians, cos, sin, asin
from info_dataless import locs_from_dataless
from descartes.patch import PolygonPatch
from matplotlib.colors import LogNorm
from scipy.spatial import ConvexHull
from scipy.cluster.vq import kmeans
from shapely.affinity import scale
from matplotlib.path import Path
from scipy.interpolate import griddata
#------------------------------------------------------------------------------
# CLASSES
#------------------------------------------------------------------------------
class InShape:
"""
Class defined in order to define a shapefile boundary AND quickly check
if a given set of coordinates is contained within it. This class uses
the shapely module.
"""
def __init__(self, input_shape, coords=0.):
#initialise boundary shapefile location string input
self.boundary = input_shape
#initialise coords shape input
self.dots = coords
#initialise boundary polygon
self.polygon = 0.
#initialise output coordinates that are contained within the polygon
self.output = 0.
def shape_poly(self):
with fiona.open(self.boundary) as fiona_collection:
# In this case, we'll assume the shapefile only has one later
shapefile_record = fiona_collection.next()
# Use Shapely to create the polygon
self.polygon = geometry.asShape( shapefile_record['geometry'] )
return self.polygon
def point_check(self, coord):
"""
Function that takes a single (2,1) shape input, converts the points<|fim▁hole|> is contained within the shapefile.
"""
self.polygon = self.shape_poly()
point = geometry.Point(coord[0], coord[1])
if self.polygon.contains(point):
return coord
def shape_bounds(self):
"""
Function that returns the bounding box coordinates xmin,xmax,ymin,ymax
"""
self.polygon = self.shape_poly()
return self.polygon.bounds
def shape_buffer(self, shape=None, size=1., res=1):
"""
Function that returns a new polygon of the larger buffered points.
Can import polygon into function if desired. Default is
self.shape_poly()
"""
if shape is None:
self.polygon = self.shape_poly()
return asPolygon(self.polygon.buffer(size, resolution=res)\
.exterior)
def extract_poly_coords(self, poly):
if poly.type == 'Polygon':
exterior_coords = poly.exterior.coords[:]
elif poly.type == 'MultiPolygon':
exterior_coords = []
for part in poly:
epc = np.asarray(self.extract_poly_coords(part)) # Recursive call
exterior_coords.append(epc)
else:
raise ValueError('Unhandled geometry type: ' + repr(poly.type))
return np.vstack(exterior_coords)
def external_coords(self, shape=None, buff=None, size=1., res=1):
"""
Function that returns the external coords of a buffered shapely
polygon. Note that shape variable input
MUST be a shapely Polygon object.
"""
if shape is not None and buff is not None:
poly = self.shape_buffer(shape=shape, size=size, res=res)
elif shape is not None:
poly = shape
else:
poly = self.shape_poly()
exterior_coords = self.extract_poly_coords(poly)
return exterior_coords
#------------------------------------------------------------------------------
# IMPORT PATHS TO MSEED FILES
#------------------------------------------------------------------------------
def spectrum(tr):
wave = tr.data #this is how to extract a data array from a mseed file
fs = tr.stats.sampling_rate
#hour = str(hour).zfill(2) #create correct format for eqstring
f, Pxx_spec = signal.welch(wave, fs, 'flattop', nperseg=1024, scaling='spectrum')
#plt.semilogy(f, np.sqrt(Pxx_spec))
if len(f) >= 256:
column = np.column_stack((f[:255], np.abs(np.sqrt(Pxx_spec)[:255])))
return column
else:
return 0.
# x = np.linspace(0, 10, 1000)
# f_interp = interp1d(np.sqrt(Pxx_spec),f, kind='cubic')
#x.reverse()
#y.reverse()
# print f_interp(x)
#f,np.sqrt(Pxx_spec),'o',
# plt.figure()
# plt.plot(x,f_interp(x),'-' )
# plt.show()
def paths_sort(path):
"""
Function defined for customised sorting of the abs_paths list
and will be used in conjunction with the sorted() built in python
function in order to produce file paths in chronological order.
"""
base_name = os.path.basename(path)
stat_name = base_name.split('.')[0]
date = base_name.split('.')[1]
try:
date = datetime.datetime.strptime(date, '%Y-%m-%d')
return date, stat_name
except Exception as e:
a=4
def paths(folder_path, extension):
"""
Function that returns a list of desired absolute paths called abs_paths
of files that contains a given extension e.g. .txt should be entered as
folder_path, txt. This function will run recursively through and find
any and all files within this folder with that extension!
"""
abs_paths = []
for root, dirs, files in os.walk(folder_path):
for f in files:
fullpath = os.path.join(root, f)
if os.path.splitext(fullpath)[1] == '.{}'.format(extension):
abs_paths.append(fullpath)
abs_paths = sorted(abs_paths, key=paths_sort)
return abs_paths
# import background shapefile location
shape_path = "/home/boland/Dropbox/University/UniMelb\
/AGOS/PROGRAMS/ANT/Versions/26.04.2015/shapefiles/aus.shp"
# generate shape object
# Generate InShape class
SHAPE = InShape(shape_path)
# Create shapely polygon from imported shapefile
UNIQUE_SHAPE = SHAPE.shape_poly()
# set plotting limits for shapefile boundaries
lonmin, latmin, lonmax, latmax = SHAPE.shape_bounds()
print lonmin, latmin, lonmax, latmax
#lonmin, lonmax, latmin, latmax = SHAPE.plot_lims()
dataless_path = 'ALL_AUSTRALIA.870093.dataless'
stat_locs = locs_from_dataless(dataless_path)
#folder_path = '/storage/ANT/INPUT/DATA/AU-2014'
folder_path = '/storage/ANT/INPUT/DATA/AU-2014'
extension = 'mseed'
paths_list = paths(folder_path, extension)
t0_total = datetime.datetime.now()
figs_counter = 0
pickle_file = '/storage/ANT/spectral_density/station_pds_maxima/\
S Network 2014/noise_info0_SNetwork2014.pickle'
f = open(name=pickle_file, mode='rb')
noise_info0 = pickle.load(f)
f.close()
# dump noise_info1
fig = plt.figure(figsize=(15,10), dpi=1000)
plt.title('Average Seismic Noise First Peak Maximum PDS\n S Network | 2014')
plt.xlabel('Longitude (degrees)')
plt.ylabel('Latitude (degrees)')
patch = PolygonPatch(UNIQUE_SHAPE, facecolor='white',\
edgecolor='k', zorder=1)
ax = fig.add_subplot(111)
ax.add_patch(patch)
#create 5000 Random points distributed within the circle radius 100
x, y = noise_info0[:,0], noise_info0[:,1]
points = np.column_stack((x,y))
xmin, xmax = np.min(x), np.max(x)
ymin, ymax = np.min(y), np.max(y)
values = noise_info0[:,2]
#now we create a grid of values, interpolated from our random sample above
y = np.linspace(ymin, ymax, 100)
x = np.linspace(xmin, xmax, 100)
gridx, gridy = np.meshgrid(x, y)
heat_field = griddata(points, values, (gridx, gridy), method='cubic',fill_value=0)
print heat_field
heat_field = np.where(heat_field < 0, 1, heat_field)
heat_field = np.ma.masked_where(heat_field==0,heat_field)
plt.pcolor(gridx, gridy, heat_field,
cmap='rainbow',alpha=0.5, norm=LogNorm(vmin=100, vmax=3e4),
zorder=2)
plt.scatter(noise_info0[:,0], noise_info0[:,1], c=noise_info0[:,2],
norm=LogNorm(vmin=100, vmax=3e4), s=35, cmap='rainbow', zorder=3)
#cmin, cmax = np.min(noise_info0[:,2]), np.max(noise_info0[:,2])
#sc = plt.scatter(noise_info0[:,0], noise_info0[:,1], c=noise_info0[:,2],
# norm=LogNorm(vmin=100, vmax=3e4), s=50, cmap=cm, zorder=2)
col = plt.colorbar()
col.ax.set_ylabel('Maximum Power Density Spectrum (V RMS)')
ax.set_xlim(lonmin-0.05*abs(lonmax-lonmin), \
lonmax+0.05*abs(lonmax-lonmin))
ax.set_ylim(latmin-0.05*abs(latmax-latmin), \
latmax+0.05*abs(latmax-latmin))
fig.savefig('station_pds_maxima/check1.svg', format='SVG')<|fim▁end|> | into a shapely.geometry.Point object and then checks if the coord |
<|file_name|>pkg.develspace.context.pc.py<|end_file_name|><|fim▁begin|># generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []<|fim▁hole|>PROJECT_NAME = "hector_imu_tools"
PROJECT_SPACE_DIR = "/home/trevor/ROS/catkin_ws/devel"
PROJECT_VERSION = "0.3.3"<|fim▁end|> | |
<|file_name|>common.go<|end_file_name|><|fim▁begin|>// Copyright 2020 The LUCI Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package experiments
import (
"context"
"go.chromium.org/luci/common/errors"
bbpb "go.chromium.org/luci/buildbucket/proto"
swarmingpb "go.chromium.org/luci/swarming/proto/api"
)
var knownExperiments = map[string]Experiment{}
// Experiment mutates `task` based on `b`.
type Experiment func(ctx context.Context, b *bbpb.Build, task *swarmingpb.TaskRequest) error
// Register registers a known experiment given its key and implementation.
func Register(key string, exp Experiment) {
knownExperiments[key] = exp
}
<|fim▁hole|>func Apply(ctx context.Context, b *bbpb.Build, task *swarmingpb.TaskRequest) error {
for _, name := range b.GetInput().GetExperiments() {
if exp, ok := knownExperiments[name]; ok {
if err := exp(ctx, b, task); err != nil {
return errors.Annotate(err, "experiment %q", name).Err()
}
}
}
return nil
}<|fim▁end|> | // Apply mutates `task` based on known experiments in b.Input.Experiments. |
<|file_name|>clap_app.rs<|end_file_name|><|fim▁begin|>/*
Precached - A Linux process monitor and pre-caching daemon
Copyright (C) 2017-2020 the precached developers
This file is part of precached.
Precached is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Precached is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Precached. If not, see <http://www.gnu.org/licenses/>.
*/
use clap::{App, AppSettings, Arg, SubCommand};
use crate::constants;
use crate::i18n;
pub fn get_app<'a, 'b>() -> App<'a, 'b>
where
'a: 'b,
{
App::new("precachedtop")
.version(env!("CARGO_PKG_VERSION"))
.author("X3n0m0rph59 <[email protected]>")
// .versionless_subcommands(true)
// .subcommand_required_else_help(true)
.setting(AppSettings::GlobalVersion)<|fim▁hole|> .short("c")
.long("config")
.value_name("file")
.help(tr!("precachedtop-config-file"))
.default_value(constants::CONFIG_FILE)
.takes_value(true),
)
.arg(
Arg::with_name("v")
.short("v")
.multiple(true)
.help(tr!("precachedtop-output-verbosity")),
)
.subcommand(
SubCommand::with_name("help").setting(AppSettings::DeriveDisplayOrder), // .about(tr!("precachedtop-help")),
)
.subcommand(
SubCommand::with_name("completions")
.setting(AppSettings::Hidden)
// .about(tr!("precachedtop-completions"))
.arg(
Arg::with_name("SHELL")
.required(true)
.possible_values(&["bash", "fish", "zsh", "powershell"])
.help(tr!("precachedtop-completions-shell")),
),
)
}<|fim▁end|> | .setting(AppSettings::DeriveDisplayOrder)
.arg(
Arg::with_name("config") |
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
use std::fmt;
use std::fs::File;
use std::io::{BufReader, BufWriter, Write};
use std::path::{Path, PathBuf};
use log::{debug, warn};
use crate::errors::{Error, Result};
use crate::keywords::sanitize_keyword;
use crate::parser::file_descriptor;
fn sizeof_varint(v: u32) -> usize {
match v {
0x0..=0x7F => 1,
0x80..=0x3FFF => 2,
0x4000..=0x1F_FFFF => 3,
0x20_0000..=0xFFF_FFFF => 4,
_ => 5,
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Syntax {
Proto2,
Proto3,
}
impl Default for Syntax {
fn default() -> Syntax {
Syntax::Proto2
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum Frequency {
Optional,
Repeated,
Required,
}
#[derive(Clone, PartialEq, Eq, Hash, Default)]
pub struct MessageIndex {
indexes: Vec<usize>,
}
impl fmt::Debug for MessageIndex {
fn fmt(&self, f: &mut fmt::Formatter) -> ::std::result::Result<(), fmt::Error> {
f.debug_set().entries(self.indexes.iter()).finish()
}
}
impl MessageIndex {
pub fn get_message<'a>(&self, desc: &'a FileDescriptor) -> &'a Message {
let first_message = self.indexes.first().and_then(|i| desc.messages.get(*i));
self.indexes
.iter()
.skip(1)
.fold(first_message, |cur, next| {
cur.and_then(|msg| msg.messages.get(*next))
})
.expect("Message index not found")
}
fn get_message_mut<'a>(&self, desc: &'a mut FileDescriptor) -> &'a mut Message {
let first_message = self
.indexes
.first()
.and_then(move |i| desc.messages.get_mut(*i));
self.indexes
.iter()
.skip(1)
.fold(first_message, |cur, next| {
cur.and_then(|msg| msg.messages.get_mut(*next))
})
.expect("Message index not found")
}
fn push(&mut self, i: usize) {
self.indexes.push(i);
}
fn pop(&mut self) {
self.indexes.pop();
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)]
pub struct EnumIndex {
msg_index: MessageIndex,
index: usize,
}
impl EnumIndex {
pub fn get_enum<'a>(&self, desc: &'a FileDescriptor) -> &'a Enumerator {
let enums = if self.msg_index.indexes.is_empty() {
&desc.enums
} else {
&self.msg_index.get_message(desc).enums
};
enums.get(self.index).expect("Enum index not found")
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum FieldType {
Int32,
Int64,
Uint32,
Uint64,
Sint32,
Sint64,
Bool,
Enum(EnumIndex),
Fixed64,
Sfixed64,
Double,
StringCow,
BytesCow,
String_,
Bytes_,
Message(MessageIndex),
MessageOrEnum(String),
Fixed32,
Sfixed32,
Float,
Map(Box<FieldType>, Box<FieldType>),
}
impl FieldType {
pub fn is_primitive(&self) -> bool {
match *self {
FieldType::Message(_)
| FieldType::Map(_, _)
| FieldType::StringCow
| FieldType::BytesCow
| FieldType::String_
| FieldType::Bytes_ => false,
_ => true,
}
}
fn has_cow(&self) -> bool {
match *self {
FieldType::BytesCow | FieldType::StringCow => true,
FieldType::Map(ref k, ref v) => k.has_cow() || v.has_cow(),
_ => false,
}
}
fn has_bytes_and_string(&self) -> bool {
match *self {
FieldType::Bytes_ | FieldType::String_ => true,
_ => false,
}
}
fn is_map(&self) -> bool {
match *self {
FieldType::Map(_, _) => true,
_ => false,
}
}
fn wire_type_num(&self, packed: bool) -> u32 {
if packed {
2
} else {
self.wire_type_num_non_packed()
}
}
fn wire_type_num_non_packed(&self) -> u32 {
/*
0 Varint int32, int64, uint32, uint64, sint32, sint64, bool, enum
1 64-bit fixed64, sfixed64, double
2 Length-delimited string, bytes, embedded messages, packed repeated fields
3 Start group groups (deprecated)
4 End group groups (deprecated)
5 32-bit fixed32, sfixed32, float
*/
match *self {
FieldType::Int32
| FieldType::Sint32
| FieldType::Int64
| FieldType::Sint64
| FieldType::Uint32
| FieldType::Uint64
| FieldType::Bool
| FieldType::Enum(_) => 0,
FieldType::Fixed64 | FieldType::Sfixed64 | FieldType::Double => 1,
FieldType::StringCow
| FieldType::BytesCow
| FieldType::String_
| FieldType::Bytes_
| FieldType::Message(_)
| FieldType::Map(_, _) => 2,
FieldType::Fixed32 | FieldType::Sfixed32 | FieldType::Float => 5,
FieldType::MessageOrEnum(_) => unreachable!("Message / Enum not resolved"),
}
}
fn proto_type(&self) -> &str {
match *self {
FieldType::Int32 => "int32",
FieldType::Sint32 => "sint32",
FieldType::Int64 => "int64",
FieldType::Sint64 => "sint64",
FieldType::Uint32 => "uint32",
FieldType::Uint64 => "uint64",
FieldType::Bool => "bool",
FieldType::Enum(_) => "enum",
FieldType::Fixed32 => "fixed32",
FieldType::Sfixed32 => "sfixed32",
FieldType::Float => "float",
FieldType::Fixed64 => "fixed64",
FieldType::Sfixed64 => "sfixed64",
FieldType::Double => "double",
FieldType::String_ => "string",
FieldType::Bytes_ => "bytes",
FieldType::StringCow => "string",
FieldType::BytesCow => "bytes",
FieldType::Message(_) => "message",
FieldType::Map(_, _) => "map",
FieldType::MessageOrEnum(_) => unreachable!("Message / Enum not resolved"),
}
}
fn is_fixed_size(&self) -> bool {
match self.wire_type_num_non_packed() {
1 | 5 => true,
_ => false,
}
}
fn regular_default<'a, 'b>(&'a self, desc: &'b FileDescriptor) -> Option<&'b str> {
match *self {
FieldType::Int32 => Some("0i32"),
FieldType::Sint32 => Some("0i32"),
FieldType::Int64 => Some("0i64"),
FieldType::Sint64 => Some("0i64"),
FieldType::Uint32 => Some("0u32"),
FieldType::Uint64 => Some("0u64"),
FieldType::Bool => Some("false"),
FieldType::Fixed32 => Some("0u32"),
FieldType::Sfixed32 => Some("0i32"),
FieldType::Float => Some("0f32"),
FieldType::Fixed64 => Some("0u64"),
FieldType::Sfixed64 => Some("0i64"),
FieldType::Double => Some("0f64"),
FieldType::StringCow => Some("\"\""),
FieldType::BytesCow => Some("Cow::Borrowed(b\"\")"),
FieldType::String_ => Some("String::default()"),
FieldType::Bytes_ => Some("vec![]"),
FieldType::Enum(ref e) => {
let e = e.get_enum(desc);
Some(&*e.fully_qualified_fields[0].0)
}
FieldType::Message(_) => None,
FieldType::Map(_, _) => None,
FieldType::MessageOrEnum(_) => unreachable!("Message / Enum not resolved"),
}
}
pub fn message(&self) -> Option<&MessageIndex> {
if let FieldType::Message(ref m) = self {
Some(m)
} else {
None
}
}
fn has_lifetime(
&self,
desc: &FileDescriptor,
packed: bool,
ignore: &mut Vec<MessageIndex>,
) -> bool {
match *self {
FieldType::StringCow | FieldType::BytesCow => true, // Cow<[u8]>
FieldType::Message(ref m) => m.get_message(desc).has_lifetime(desc, ignore),
FieldType::Fixed64
| FieldType::Sfixed64
| FieldType::Double
| FieldType::Fixed32
| FieldType::Sfixed32
| FieldType::String_
| FieldType::Bytes_
| FieldType::Float => packed, // Cow<[M]>
FieldType::Map(ref key, ref value) => {
key.has_lifetime(desc, false, ignore) || value.has_lifetime(desc, false, ignore)
}
_ => false,
}
}
fn rust_type(&self, desc: &FileDescriptor) -> Result<String> {
Ok(match *self {
FieldType::Int32 | FieldType::Sint32 | FieldType::Sfixed32 => "i32".to_string(),
FieldType::Int64 | FieldType::Sint64 | FieldType::Sfixed64 => "i64".to_string(),
FieldType::Uint32 | FieldType::Fixed32 => "u32".to_string(),
FieldType::Uint64 | FieldType::Fixed64 => "u64".to_string(),
FieldType::Double => "f64".to_string(),
FieldType::Float => "f32".to_string(),
FieldType::StringCow => "Cow<'a, str>".to_string(),
FieldType::BytesCow => "Cow<'a, [u8]>".to_string(),
FieldType::String_ => "String".to_string(),
FieldType::Bytes_ => "Vec<u8>".to_string(),
FieldType::Bool => "bool".to_string(),
FieldType::Enum(ref e) => {
let e = e.get_enum(desc);
format!("{}{}", e.get_modules(desc), e.name)
}
FieldType::Message(ref msg) => {
let m = msg.get_message(desc);
let lifetime = if m.has_lifetime(desc, &mut Vec::new()) {
"<'a>"
} else {
""
};
format!("{}{}{}", m.get_modules(desc), m.name, lifetime)
}
FieldType::Map(ref key, ref value) => format!(
"KVMap<{}, {}>",
key.rust_type(desc)?,
value.rust_type(desc)?
),
FieldType::MessageOrEnum(_) => unreachable!("Message / Enum not resolved"),
})
}
/// Returns the relevant function to read the data, both for regular and Cow wrapped
fn read_fn(&self, desc: &FileDescriptor) -> Result<(String, String)> {
Ok(match *self {
FieldType::Message(ref msg) => {
let m = msg.get_message(desc);
let m = format!(
"r.read_message::<{}{}>(bytes)?",
m.get_modules(desc),
m.name
);
(m.clone(), m)
}
FieldType::Map(_, _) => return Err(Error::ReadFnMap),
FieldType::StringCow | FieldType::BytesCow => {
let m = format!("r.read_{}(bytes)", self.proto_type());
let cow = format!("{}.map(Cow::Borrowed)?", m);
(m, cow)
}
FieldType::String_ => {
let m = format!("r.read_{}(bytes)", self.proto_type());
let vec = format!("{}?.to_owned()", m);
(m, vec)
}
FieldType::Bytes_ => {
let m = format!("r.read_{}(bytes)", self.proto_type());
let vec = format!("{}?.to_owned()", m);
(m, vec)
}
FieldType::MessageOrEnum(_) => unreachable!("Message / Enum not resolved"),
_ => {
let m = format!("r.read_{}(bytes)?", self.proto_type());
(m.clone(), m)
}
})
}
fn get_size(&self, s: &str) -> String {
match *self {
FieldType::Int32
| FieldType::Int64
| FieldType::Uint32
| FieldType::Uint64
| FieldType::Bool
| FieldType::Enum(_) => format!("sizeof_varint(*({}) as u64)", s),
FieldType::Sint32 => format!("sizeof_sint32(*({}))", s),
FieldType::Sint64 => format!("sizeof_sint64(*({}))", s),
FieldType::Fixed64 | FieldType::Sfixed64 | FieldType::Double => "8".to_string(),
FieldType::Fixed32 | FieldType::Sfixed32 | FieldType::Float => "4".to_string(),
FieldType::StringCow | FieldType::BytesCow => format!("sizeof_len(({}).len())", s),
FieldType::String_ | FieldType::Bytes_ => format!("sizeof_len(({}).len())", s),
FieldType::Message(_) => format!("sizeof_len(({}).get_size())", s),
FieldType::Map(ref k, ref v) => {
format!("2 + {} + {}", k.get_size("k"), v.get_size("v"))
}
FieldType::MessageOrEnum(_) => unreachable!("Message / Enum not resolved"),
}
}
fn get_write(&self, s: &str, boxed: bool) -> String {
match *self {
FieldType::Enum(_) => format!("write_enum(*{} as i32)", s),
FieldType::Int32
| FieldType::Sint32
| FieldType::Int64
| FieldType::Sint64
| FieldType::Uint32
| FieldType::Uint64
| FieldType::Bool
| FieldType::Fixed64
| FieldType::Sfixed64
| FieldType::Double
| FieldType::Fixed32
| FieldType::Sfixed32
| FieldType::Float => format!("write_{}(*{})", self.proto_type(), s),
FieldType::StringCow => format!("write_string(&**{})", s),
FieldType::BytesCow => format!("write_bytes(&**{})", s),
FieldType::String_ => format!("write_string(&**{})", s),
FieldType::Bytes_ => format!("write_bytes(&**{})", s),
FieldType::Message(_) if boxed => format!("write_message(&**{})", s),
FieldType::Message(_) => format!("write_message({})", s),
FieldType::Map(ref k, ref v) => format!(
"write_map({}, {}, |w| w.{}, {}, |w| w.{})",
self.get_size(""),
tag(1, k, false),
k.get_write("k", false),
tag(2, v, false),
v.get_write("v", false)
),
FieldType::MessageOrEnum(_) => unreachable!("Message / Enum not resolved"),
}
}
}
#[derive(Debug, Clone)]
pub struct Field {
pub name: String,
pub frequency: Frequency,
pub typ: FieldType,
pub number: i32,
pub default: Option<String>,
pub packed: Option<bool>,
pub boxed: bool,
pub deprecated: bool,
}
impl Field {
fn packed(&self) -> bool {
self.packed.unwrap_or(false)
}
fn sanitize_default(&mut self, desc: &FileDescriptor) -> Result<()> {
if let Some(ref mut d) = self.default {
*d = match &*self.typ.rust_type(desc)? {
"u32" => format!("{}u32", *d),
"u64" => format!("{}u64", *d),
"i32" => format!("{}i32", *d),
"i64" => format!("{}i64", *d),
"f32" => match &*d.to_lowercase() {
"inf" => "::core::f32::INFINITY".to_string(),
"-inf" => "::core::f32::NEG_INFINITY".to_string(),
"nan" => "::core::f32::NAN".to_string(),
_ => format!("{}f32", *d),
},
"f64" => match &*d.to_lowercase() {
"inf" => "::core::f64::INFINITY".to_string(),
"-inf" => "::core::f64::NEG_INFINITY".to_string(),
"nan" => "::core::f64::NAN".to_string(),
_ => format!("{}f64", *d),
},
"Cow<'a, str>" => format!("Cow::Borrowed({})", d),
"Cow<'a, [u8]>" => format!("Cow::Borrowed(b{})", d),
"String" => format!("String::from({})", d),
"Bytes" => format!(r#"b{}"#, d),
"Vec<u8>" => format!("b{}.to_vec()", d),
"bool" => format!("{}", d.parse::<bool>().unwrap()),
e => format!("{}::{}", e, d), // enum, as message and map do not have defaults
}
}
Ok(())
}
fn has_regular_default(&self, desc: &FileDescriptor) -> bool {
self.default.is_none()
|| self.default.as_ref().map(|d| &**d) == self.typ.regular_default(desc)
}
fn tag(&self) -> u32 {
tag(self.number as u32, &self.typ, self.packed())
}
fn write_definition<W: Write>(
&self,
w: &mut W,
desc: &FileDescriptor,
config: &Config,
) -> Result<()> {
if self.deprecated {
if config.add_deprecated_fields {
writeln!(w, " #[deprecated]")?;
} else {
return Ok(());
}
}
write!(w, " pub {}: ", self.name)?;
let rust_type = self.typ.rust_type(desc)?;
match self.frequency {
_ if self.boxed => writeln!(w, "Option<Box<{}>>,", rust_type)?,
Frequency::Optional
if desc.syntax == Syntax::Proto2 && self.default.is_none()
|| self.typ.message().is_some() =>
{
writeln!(w, "Option<{}>,", rust_type)?
}
Frequency::Repeated
if self.packed() && self.typ.is_fixed_size() && !config.dont_use_cow =>
{
writeln!(w, "Cow<'a, [{}]>,", rust_type)?;
}
Frequency::Repeated => writeln!(w, "Vec<{}>,", rust_type)?,
Frequency::Required | Frequency::Optional => writeln!(w, "{},", rust_type)?,
}
Ok(())
}
fn write_match_tag<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
if self.deprecated && !config.add_deprecated_fields {
return Ok(());
}
// special case for FieldType::Map: destructure tuple before inserting in HashMap
if let FieldType::Map(ref key, ref value) = self.typ {
writeln!(w, " Ok({}) => {{", self.tag())?;
writeln!(
w,
" let (key, value) = \
r.read_map(bytes, |r, bytes| Ok({}), |r, bytes| Ok({}))?;",
key.read_fn(desc)?.1,
value.read_fn(desc)?.1
)?;
writeln!(
w,
" msg.{}.insert(key, value);",
self.name
)?;
writeln!(w, " }}")?;
return Ok(());
}
let (val, val_cow) = self.typ.read_fn(desc)?;
let name = &self.name;
write!(w, " Ok({}) => ", self.tag())?;
match self.frequency {
_ if self.boxed => writeln!(w, "msg.{} = Some(Box::new({})),", name, val)?,
Frequency::Optional
if desc.syntax == Syntax::Proto2 && self.default.is_none()
|| self.typ.message().is_some() =>
{
writeln!(w, "msg.{} = Some({}),", name, val_cow)?
}
Frequency::Required | Frequency::Optional => {
writeln!(w, "msg.{} = {},", name, val_cow)?
}
Frequency::Repeated if self.packed() && self.typ.is_fixed_size() => {
writeln!(w, "msg.{} = r.read_packed_fixed(bytes)?.into(),", name)?;
}
Frequency::Repeated if self.packed() => {
writeln!(
w,
"msg.{} = r.read_packed(bytes, |r, bytes| Ok({}))?,",
name, val_cow
)?;
}
Frequency::Repeated => writeln!(w, "msg.{}.push({}),", name, val_cow)?,
}
Ok(())
}
fn write_get_size<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
if self.deprecated && !config.add_deprecated_fields {
return Ok(());
}
write!(w, " + ")?;
let tag_size = sizeof_varint(self.tag());
match self.frequency {
Frequency::Optional
if desc.syntax == Syntax::Proto2 || self.typ.message().is_some() =>
{
// TODO this might be incorrect behavior for proto2
match self.default.as_ref() {
None => {
write!(w, "self.{}.as_ref().map_or(0, ", self.name)?;
if self.typ.is_fixed_size() {
writeln!(w, "|_| {} + {})", tag_size, self.typ.get_size(""))?;
} else {
writeln!(w, "|m| {} + {})", tag_size, self.typ.get_size("m"))?;
}
}
Some(d) => {
writeln!(
w,
"if self.{} == {} {{ 0 }} else {{ {} + {} }}",
self.name,
d,
tag_size,
self.typ.get_size(&format!("&self.{}", self.name))
)?;
}
}
}
Frequency::Required if self.typ.is_map() => {
writeln!(
w,
"self.{}.iter().map(|(k, v)| {} + sizeof_len({})).sum::<usize>()",
self.name,
tag_size,
self.typ.get_size("")
)?;
}
Frequency::Optional => match self.typ {
FieldType::Bytes_ => writeln!(
w,
"if self.{}.is_empty() {{ 0 }} else {{ {} + {} }}",
self.name,
tag_size,
self.typ.get_size(&format!("&self.{}", self.name))
)?,
_ => writeln!(
w,
"if self.{} == {} {{ 0 }} else {{ {} + {} }}",
self.name,
self.default.as_ref().map_or_else(
|| self.typ.regular_default(desc).unwrap_or("None"),
|s| s.as_str()
),
tag_size,
self.typ.get_size(&format!("&self.{}", self.name))
)?,
},
Frequency::Required => writeln!(
w,
"{} + {}",
tag_size,
self.typ.get_size(&format!("&self.{}", self.name))
)?,
Frequency::Repeated => {
if self.packed() {
write!(
w,
"if self.{}.is_empty() {{ 0 }} else {{ {} + ",
self.name, tag_size
)?;
match self.typ.wire_type_num_non_packed() {
1 => writeln!(w, "sizeof_len(self.{}.len() * 8) }}", self.name)?,
5 => writeln!(w, "sizeof_len(self.{}.len() * 4) }}", self.name)?,
_ => writeln!(
w,
"sizeof_len(self.{}.iter().map(|s| {}).sum::<usize>()) }}",
self.name,
self.typ.get_size("s")
)?,
}
} else {
match self.typ.wire_type_num_non_packed() {
1 => writeln!(w, "({} + 8) * self.{}.len()", tag_size, self.name)?,
5 => writeln!(w, "({} + 4) * self.{}.len()", tag_size, self.name)?,
_ => writeln!(
w,
"self.{}.iter().map(|s| {} + {}).sum::<usize>()",
self.name,
tag_size,
self.typ.get_size("s")
)?,
}
}
}
}
Ok(())
}
fn write_write<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
if self.deprecated && !config.add_deprecated_fields {
return Ok(());
}
match self.frequency {
Frequency::Optional
if desc.syntax == Syntax::Proto2 || self.typ.message().is_some() =>
{
match self.default.as_ref() {
None => {
writeln!(
w,
" if let Some(ref s) = \
self.{} {{ w.write_with_tag({}, |w| w.{})?; }}",
self.name,
self.tag(),
self.typ.get_write("s", self.boxed)
)?;
}
Some(d) => {
writeln!(
w,
" if self.{} != {} {{ w.write_with_tag({}, |w| w.{})?; }}",
self.name,
d,
self.tag(),
self.typ
.get_write(&format!("&self.{}", self.name), self.boxed)
)?;
}
}
}
Frequency::Optional => match self.typ {
FieldType::Bytes_ => {
writeln!(
w,
" if !self.{}.is_empty() {{ w.write_with_tag({}, |w| w.{})?; }}",
self.name,
self.tag(),
self.typ
.get_write(&format!("&self.{}", self.name), self.boxed)
)?;
}
_ => {
writeln!(
w,
" if self.{} != {} {{ w.write_with_tag({}, |w| w.{})?; }}",
self.name,
self.default.as_ref().map_or_else(
|| self.typ.regular_default(desc).unwrap_or("None"),
|s| s.as_str()
),
self.tag(),
self.typ
.get_write(&format!("&self.{}", self.name), self.boxed)
)?;
}
},
Frequency::Required if self.typ.is_map() => {
writeln!(
w,
" for (k, v) in self.{}.iter() {{ w.write_with_tag({}, |w| w.{})?; }}",
self.name,
self.tag(),
self.typ.get_write("", false)
)?;
}
Frequency::Required => {
writeln!(
w,
" w.write_with_tag({}, |w| w.{})?;",
self.tag(),
self.typ
.get_write(&format!("&self.{}", self.name), self.boxed)
)?;
}
Frequency::Repeated if self.packed() && self.typ.is_fixed_size() => writeln!(
w,
" w.write_packed_fixed_with_tag({}, &self.{})?;",
self.tag(),
self.name
)?,
Frequency::Repeated if self.packed() => writeln!(
w,
" w.write_packed_with_tag({}, &self.{}, |w, m| w.{}, &|m| {})?;",
self.tag(),
self.name,
self.typ.get_write("m", self.boxed),
self.typ.get_size("m")
)?,
Frequency::Repeated => {
writeln!(
w,
" for s in &self.{} {{ w.write_with_tag({}, |w| w.{})?; }}",
self.name,
self.tag(),
self.typ.get_write("s", self.boxed)
)?;
}
}
Ok(())
}
}
fn get_modules(module: &str, imported: bool, desc: &FileDescriptor) -> String {
let skip = if desc.package.is_empty() && !imported {
1
} else {
0
};
module
.split('.')
.filter(|p| !p.is_empty())
.skip(skip)
.map(|p| format!("{}::", p))
.collect()
}
#[derive(Debug, Clone, Default)]
pub struct Message {
pub name: String,
pub fields: Vec<Field>,
pub oneofs: Vec<OneOf>,
pub reserved_nums: Option<Vec<i32>>,
pub reserved_names: Option<Vec<String>>,
pub imported: bool,
pub package: String, // package from imports + nested items
pub messages: Vec<Message>, // nested messages
pub enums: Vec<Enumerator>, // nested enums
pub module: String, // 'package' corresponding to actual generated Rust module
pub path: PathBuf,
pub import: PathBuf,
pub index: MessageIndex,
}
impl Message {
fn convert_field_types(&mut self, from: &FieldType, to: &FieldType) {
for f in self.all_fields_mut().filter(|f| f.typ == *from) {
f.typ = to.clone();
}
// If that type is a map with the fieldtype, it must also be converted.
for f in self.all_fields_mut() {
let new_type: FieldType = match f.typ {
FieldType::Map(ref mut key, ref mut value)
if **key == *from && **value == *from =>
{
FieldType::Map(Box::new(to.clone()), Box::new(to.clone()))
}
FieldType::Map(ref mut key, ref mut value) if **key == *from => {
FieldType::Map(Box::new(to.clone()), value.clone())
}
FieldType::Map(ref mut key, ref mut value) if **value == *from => {
FieldType::Map(key.clone(), Box::new(to.clone()))
}
ref other => other.clone(),
};
f.typ = new_type;
}
for message in &mut self.messages {
message.convert_field_types(from, to);
}
}
fn has_lifetime(&self, desc: &FileDescriptor, ignore: &mut Vec<MessageIndex>) -> bool {
if ignore.contains(&&self.index) {
return false;
}
ignore.push(self.index.clone());
let res = self
.all_fields()
.any(|f| f.typ.has_lifetime(desc, f.packed(), ignore));
ignore.pop();
res
}
fn set_imported(&mut self) {
self.imported = true;
for o in self.oneofs.iter_mut() {
o.imported = true;
}
for m in self.messages.iter_mut() {
m.set_imported();
}
for e in self.enums.iter_mut() {
e.imported = true;
}
}
fn get_modules(&self, desc: &FileDescriptor) -> String {
get_modules(&self.module, self.imported, desc)<|fim▁hole|> fn is_unit(&self) -> bool {
self.fields.is_empty() && self.oneofs.is_empty()
}
fn write_common_uses<W: Write>(
w: &mut W,
messages: &Vec<Message>,
desc: &FileDescriptor,
config: &Config,
) -> Result<()> {
if config.nostd {
writeln!(w, "use alloc::vec::Vec;")?;
}
if !config.dont_use_cow {
if messages.iter().any(|m| {
m.all_fields()
.any(|f| (f.typ.has_cow() || (f.packed() && f.typ.is_fixed_size())))
}) {
if config.nostd {
writeln!(w, "use alloc::borrow::Cow;")?;
} else {
writeln!(w, "use std::borrow::Cow;")?;
}
}
} else if config.nostd {
if messages
.iter()
.any(|m| m.all_fields().any(|f| (f.typ.has_bytes_and_string())))
{
writeln!(w, "use alloc::borrow::ToOwned;")?;
}
}
if config.nostd
&& messages.iter().any(|m| {
desc.owned && m.has_lifetime(desc, &mut Vec::new())
|| m.all_fields().any(|f| f.boxed)
})
{
writeln!(w)?;
writeln!(w, "use alloc::boxed::Box;")?;
}
if messages
.iter()
.filter(|m| !m.imported)
.any(|m| m.all_fields().any(|f| f.typ.is_map()))
{
if config.hashbrown {
writeln!(w, "use hashbrown::HashMap;")?;
writeln!(w, "type KVMap<K, V> = HashMap<K, V>;")?;
} else if config.nostd {
writeln!(w, "use alloc::collections::BTreeMap;")?;
writeln!(w, "type KVMap<K, V> = BTreeMap<K, V>;")?;
} else {
writeln!(w, "use std::collections::HashMap;")?;
writeln!(w, "type KVMap<K, V> = HashMap<K, V>;")?;
}
}
Ok(())
}
fn write<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
println!("Writing message {}{}", self.get_modules(desc), self.name);
writeln!(w)?;
self.write_definition(w, desc, config)?;
writeln!(w)?;
self.write_impl_message_read(w, desc, config)?;
writeln!(w)?;
self.write_impl_message_write(w, desc, config)?;
if config.gen_info {
self.write_impl_message_info(w, desc, config)?;
writeln!(w)?;
}
if desc.owned && self.has_lifetime(desc, &mut Vec::new()) {
writeln!(w)?;
self.write_impl_owned(w, config)?;
}
if !(self.messages.is_empty() && self.enums.is_empty() && self.oneofs.is_empty()) {
writeln!(w)?;
writeln!(w, "pub mod mod_{} {{", self.name)?;
writeln!(w)?;
Self::write_common_uses(w, &self.messages, desc, config)?;
if !self.messages.is_empty() || !self.oneofs.is_empty() {
writeln!(w, "use super::*;")?;
}
for m in &self.messages {
m.write(w, desc, config)?;
}
for e in &self.enums {
e.write(w)?;
}
for o in &self.oneofs {
o.write(w, desc, config)?;
}
writeln!(w)?;
writeln!(w, "}}")?;
}
Ok(())
}
fn write_definition<W: Write>(
&self,
w: &mut W,
desc: &FileDescriptor,
config: &Config,
) -> Result<()> {
let mut custom_struct_derive = config.custom_struct_derive.join(", ");
if !custom_struct_derive.is_empty() {
custom_struct_derive += ", ";
}
writeln!(
w,
"#[derive({}Debug, Default, PartialEq, Clone)]",
custom_struct_derive
)?;
if let Some(repr) = &config.custom_repr {
writeln!(w, "#[repr({})]", repr)?;
}
if self.is_unit() {
writeln!(w, "pub struct {} {{ }}", self.name)?;
return Ok(());
}
let mut ignore = Vec::new();
if config.dont_use_cow {
ignore.push(self.index.clone());
}
if self.has_lifetime(desc, &mut ignore) {
writeln!(w, "pub struct {}<'a> {{", self.name)?;
} else {
writeln!(w, "pub struct {} {{", self.name)?;
}
for f in &self.fields {
f.write_definition(w, desc, config)?;
}
for o in &self.oneofs {
o.write_message_definition(w, desc)?;
}
writeln!(w, "}}")?;
Ok(())
}
fn write_impl_message_info<W: Write>(
&self,
w: &mut W,
desc: &FileDescriptor,
config: &Config,
) -> Result<()> {
let mut ignore = Vec::new();
if config.dont_use_cow {
ignore.push(self.index.clone());
}
if self.has_lifetime(desc, &mut ignore) {
writeln!(w, "impl<'a> MessageInfo for {}<'a> {{", self.name)?;
} else {
writeln!(w, "impl MessageInfo for {} {{", self.name)?;
}
writeln!(
w,
" const PATH : &'static str = \"{}.{}\";",
self.module, self.name
)?;
writeln!(w, "}}")?;
Ok(())
}
fn write_impl_message_read<W: Write>(
&self,
w: &mut W,
desc: &FileDescriptor,
config: &Config,
) -> Result<()> {
if self.is_unit() {
writeln!(w, "impl<'a> MessageRead<'a> for {} {{", self.name)?;
writeln!(
w,
" fn from_reader(r: &mut BytesReader, _: &[u8]) -> Result<Self> {{"
)?;
writeln!(w, " r.read_to_end();")?;
writeln!(w, " Ok(Self::default())")?;
writeln!(w, " }}")?;
writeln!(w, "}}")?;
return Ok(());
}
let mut ignore = Vec::new();
if config.dont_use_cow {
ignore.push(self.index.clone());
}
if self.has_lifetime(desc, &mut ignore) {
writeln!(w, "impl<'a> MessageRead<'a> for {}<'a> {{", self.name)?;
writeln!(
w,
" fn from_reader(r: &mut BytesReader, bytes: &'a [u8]) -> Result<Self> {{"
)?;
} else {
writeln!(w, "impl<'a> MessageRead<'a> for {} {{", self.name)?;
writeln!(
w,
" fn from_reader(r: &mut BytesReader, bytes: &'a [u8]) -> Result<Self> {{"
)?;
}
let unregular_defaults = self
.fields
.iter()
.filter(|f| !f.has_regular_default(desc))
.collect::<Vec<_>>();
if unregular_defaults.is_empty() {
writeln!(w, " let mut msg = Self::default();")?;
} else {
writeln!(w, " let mut msg = {} {{", self.name)?;
for f in unregular_defaults {
writeln!(
w,
" {}: {},",
f.name,
f.default.as_ref().unwrap()
)?;
}
writeln!(w, " ..Self::default()")?;
writeln!(w, " }};")?;
}
writeln!(w, " while !r.is_eof() {{")?;
writeln!(w, " match r.next_tag(bytes) {{")?;
for f in &self.fields {
f.write_match_tag(w, desc, config)?;
}
for o in &self.oneofs {
o.write_match_tag(w, desc, config)?;
}
writeln!(
w,
" Ok(t) => {{ r.read_unknown(bytes, t)?; }}"
)?;
writeln!(w, " Err(e) => return Err(e),")?;
writeln!(w, " }}")?;
writeln!(w, " }}")?;
writeln!(w, " Ok(msg)")?;
writeln!(w, " }}")?;
writeln!(w, "}}")?;
// TODO: write impl default when special default?
// alternatively set the default value directly when reading
Ok(())
}
fn write_impl_message_write<W: Write>(
&self,
w: &mut W,
desc: &FileDescriptor,
config: &Config,
) -> Result<()> {
if self.is_unit() {
writeln!(w, "impl MessageWrite for {} {{ }}", self.name)?;
return Ok(());
}
let mut ignore = Vec::new();
if config.dont_use_cow {
ignore.push(self.index.clone());
}
if self.has_lifetime(desc, &mut ignore) {
writeln!(w, "impl<'a> MessageWrite for {}<'a> {{", self.name)?;
} else {
writeln!(w, "impl MessageWrite for {} {{", self.name)?;
}
self.write_get_size(w, desc, config)?;
writeln!(w)?;
self.write_write_message(w, desc, config)?;
writeln!(w, "}}")?;
Ok(())
}
fn write_impl_owned<W: Write>(&self, w: &mut W, config: &Config) -> Result<()> {
write!(
w,
r#"
#[derive(Debug)]
struct {name}OwnedInner {{
buf: Vec<u8>,
proto: {name}<'static>,
_pin: core::marker::PhantomPinned,
}}
impl {name}OwnedInner {{
fn new(buf: Vec<u8>) -> Result<core::pin::Pin<Box<Self>>> {{
let inner = Self {{
buf,
proto: unsafe {{ core::mem::MaybeUninit::zeroed().assume_init() }},
_pin: core::marker::PhantomPinned,
}};
let mut pinned = Box::pin(inner);
let mut reader = BytesReader::from_bytes(&pinned.buf);
let proto = {name}::from_reader(&mut reader, &pinned.buf)?;
unsafe {{
let proto = core::mem::transmute::<_, {name}<'static>>(proto);
pinned.as_mut().get_unchecked_mut().proto = proto;
}}
Ok(pinned)
}}
}}
pub struct {name}Owned {{
inner: core::pin::Pin<Box<{name}OwnedInner>>,
}}
#[allow(dead_code)]
impl {name}Owned {{
pub fn buf(&self) -> &[u8] {{
&self.inner.buf
}}
pub fn proto(&self) -> &{name} {{
&self.inner.proto
}}
}}
impl core::fmt::Debug for {name}Owned {{
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {{
self.inner.proto.fmt(f)
}}
}}
impl Deref for {name}Owned {{
type Target = {name}<'static>;
fn deref(&self) -> &Self::Target {{
&self.inner.proto
}}
}}
impl DerefMut for {name}Owned {{
fn deref_mut(&mut self) -> &mut Self::Target {{
unsafe {{ &mut self.inner.as_mut().get_unchecked_mut().proto }}
}}
}}
impl TryFrom<Vec<u8>> for {name}Owned {{
type Error=quick_protobuf::Error;
fn try_from(buf: Vec<u8>) -> Result<Self> {{
Ok(Self {{ inner: {name}OwnedInner::new(buf)? }})
}}
}}
impl TryInto<Vec<u8>> for {name}Owned {{
type Error=quick_protobuf::Error;
fn try_into(self) -> Result<Vec<u8>> {{
let mut buf = Vec::new();
let mut writer = Writer::new(&mut buf);
self.deref().write_message(&mut writer)?;
Ok(buf)
}}
}}
#[cfg(feature = "test_helpers")]
impl<'a> From<{name}<'a>> for {name}Owned {{
fn from(proto: {name}) -> Self {{
use quick_protobuf::{{MessageWrite, Writer}};
let mut buf = Vec::new();
let mut writer = Writer::new(&mut buf);
proto.write_message(&mut writer).expect("bad proto serialization");
Self {{ inner: {name}OwnedInner::new(buf).unwrap() }}
}}
}}
"#,
name = self.name
)?;
if config.gen_info {
write!(w, r#"
impl MessageInfo for {name}Owned {{
const PATH: &'static str = "{module}.{name}";
}}
"#, name = self.name, module = self.module)?;
}
Ok(())
}
fn write_get_size<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
writeln!(w, " fn get_size(&self) -> usize {{")?;
writeln!(w, " 0")?;
for f in &self.fields {
f.write_get_size(w, desc, config)?;
}
for o in self.oneofs.iter() {
o.write_get_size(w, desc, config)?;
}
writeln!(w, " }}")?;
Ok(())
}
fn write_write_message<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
writeln!(
w,
" fn write_message<W: WriterBackend>(&self, w: &mut Writer<W>) -> Result<()> {{"
)?;
for f in &self.fields {
f.write_write(w, desc, config)?;
}
for o in &self.oneofs {
o.write_write(w, desc, config)?;
}
writeln!(w, " Ok(())")?;
writeln!(w, " }}")?;
Ok(())
}
fn sanity_checks(&self, desc: &FileDescriptor) -> Result<()> {
for f in self.all_fields() {
// check reserved
if self
.reserved_names
.as_ref()
.map_or(false, |names| names.contains(&f.name))
|| self
.reserved_nums
.as_ref()
.map_or(false, |nums| nums.contains(&f.number))
{
return Err(Error::InvalidMessage(format!(
"Error in message {}\n\
Field {:?} conflict with reserved fields",
self.name, f
)));
}
// check default enums
if let Some(var) = f.default.as_ref() {
if let FieldType::Enum(ref e) = f.typ {
let e = e.get_enum(desc);
e.fields.iter().find(|&(ref name, _)| name == var)
.ok_or_else(|| Error::InvalidDefaultEnum(format!(
"Error in message {}\n\
Enum field {:?} has a default value '{}' which is not valid for enum index {:?}",
self.name, f, var, e)))?;
}
}
}
Ok(())
}
fn set_package(&mut self, package: &str, module: &str) {
// The complication here is that the _package_ (as declared in the proto file) does
// not directly map to the _module_. For example, the package 'a.A' where A is a
// message will be the module 'a.mod_A', since we can't reuse the message name A as
// the submodule containing nested items. Also, protos with empty packages always
// have a module corresponding to the file name.
let (child_package, child_module) = if package.is_empty() && module.is_empty() {
(self.name.clone(), format!("mod_{}", self.name))
} else if package.is_empty() {
self.module = module.to_string();
(self.name.clone(), format!("{}.mod_{}", module, self.name))
} else {
self.package = package.to_string();
self.module = module.to_string();
(
format!("{}.{}", package, self.name),
format!("{}.mod_{}", module, self.name),
)
};
for m in &mut self.messages {
m.set_package(&child_package, &child_module);
}
for m in &mut self.enums {
m.set_package(&child_package, &child_module);
}
for m in &mut self.oneofs {
m.set_package(&child_package, &child_module);
}
}
fn set_map_required(&mut self) {
for f in self.all_fields_mut() {
if let FieldType::Map(_, _) = f.typ {
f.frequency = Frequency::Required;
}
}
for m in &mut self.messages {
m.set_map_required();
}
}
fn set_repeated_as_packed(&mut self) {
for f in self.all_fields_mut() {
if f.packed.is_none() {
if let Frequency::Repeated = f.frequency {
f.packed = Some(true);
}
}
}
}
fn unset_packed_non_primitives(&mut self) {
for f in self.all_fields_mut() {
if !f.typ.is_primitive() && f.packed.is_some() {
f.packed = None;
}
}
}
fn sanitize_defaults(&mut self, desc: &FileDescriptor) -> Result<()> {
for f in self.all_fields_mut() {
f.sanitize_default(desc)?;
}
for m in &mut self.messages {
m.sanitize_defaults(desc)?;
}
Ok(())
}
fn sanitize_names(&mut self) {
sanitize_keyword(&mut self.name);
sanitize_keyword(&mut self.package);
for f in self.fields.iter_mut() {
sanitize_keyword(&mut f.name);
}
for m in &mut self.messages {
m.sanitize_names();
}
for e in &mut self.enums {
e.sanitize_names();
}
for o in &mut self.oneofs {
o.sanitize_names();
}
}
/// Return an iterator producing references to all the `Field`s of `self`,
/// including both direct and `oneof` fields.
pub fn all_fields(&self) -> impl Iterator<Item = &Field> {
self.fields
.iter()
.chain(self.oneofs.iter().flat_map(|o| o.fields.iter()))
}
/// Return an iterator producing mutable references to all the `Field`s of
/// `self`, including both direct and `oneof` fields.
fn all_fields_mut(&mut self) -> impl Iterator<Item = &mut Field> {
self.fields
.iter_mut()
.chain(self.oneofs.iter_mut().flat_map(|o| o.fields.iter_mut()))
}
}
#[derive(Debug, Clone, Default)]
pub struct RpcFunctionDeclaration {
pub name: String,
pub arg: String,
pub ret: String,
}
#[derive(Debug, Clone, Default)]
pub struct RpcService {
pub service_name: String,
pub functions: Vec<RpcFunctionDeclaration>,
}
impl RpcService {
fn write_definition<W: Write>(&self, w: &mut W, config: &Config) -> Result<()> {
(config.custom_rpc_generator)(self, w)
}
}
pub type RpcGeneratorFunction = Box<dyn Fn(&RpcService, &mut dyn Write) -> Result<()>>;
#[derive(Debug, Clone, Default)]
pub struct Enumerator {
pub name: String,
pub fields: Vec<(String, i32)>,
pub fully_qualified_fields: Vec<(String, i32)>,
pub partially_qualified_fields: Vec<(String, i32)>,
pub imported: bool,
pub package: String,
pub module: String,
pub path: PathBuf,
pub import: PathBuf,
pub index: EnumIndex,
}
impl Enumerator {
fn set_package(&mut self, package: &str, module: &str) {
self.package = package.to_string();
self.module = module.to_string();
self.partially_qualified_fields = self
.fields
.iter()
.map(|f| (format!("{}::{}", &self.name, f.0), f.1))
.collect();
self.fully_qualified_fields = self
.partially_qualified_fields
.iter()
.map(|pqf| {
let fqf = if self.module.is_empty() {
pqf.0.clone()
} else {
format!("{}::{}", self.module.replace(".", "::"), pqf.0)
};
(fqf, pqf.1)
})
.collect();
}
fn sanitize_names(&mut self) {
sanitize_keyword(&mut self.name);
sanitize_keyword(&mut self.package);
for f in self.fields.iter_mut() {
sanitize_keyword(&mut f.0);
}
}
fn get_modules(&self, desc: &FileDescriptor) -> String {
get_modules(&self.module, self.imported, desc)
}
fn write<W: Write>(&self, w: &mut W) -> Result<()> {
println!("Writing enum {}", self.name);
writeln!(w)?;
self.write_definition(w)?;
writeln!(w)?;
if self.fields.is_empty() {
Ok(())
} else {
self.write_impl_default(w)?;
writeln!(w)?;
self.write_from_i32(w)?;
writeln!(w)?;
self.write_from_str(w)
}
}
fn write_definition<W: Write>(&self, w: &mut W) -> Result<()> {
writeln!(w, "#[derive(Debug, PartialEq, Eq, Clone, Copy)]")?;
writeln!(w, "pub enum {} {{", self.name)?;
for &(ref f, ref number) in &self.fields {
writeln!(w, " {} = {},", f, number)?;
}
writeln!(w, "}}")?;
Ok(())
}
fn write_impl_default<W: Write>(&self, w: &mut W) -> Result<()> {
writeln!(w, "impl Default for {} {{", self.name)?;
writeln!(w, " fn default() -> Self {{")?;
// TODO: check with default field and return error if there is no field
writeln!(w, " {}", self.partially_qualified_fields[0].0)?;
writeln!(w, " }}")?;
writeln!(w, "}}")?;
Ok(())
}
fn write_from_i32<W: Write>(&self, w: &mut W) -> Result<()> {
writeln!(w, "impl From<i32> for {} {{", self.name)?;
writeln!(w, " fn from(i: i32) -> Self {{")?;
writeln!(w, " match i {{")?;
for &(ref f, ref number) in &self.fields {
writeln!(w, " {} => {}::{},", number, self.name, f)?;
}
writeln!(w, " _ => Self::default(),")?;
writeln!(w, " }}")?;
writeln!(w, " }}")?;
writeln!(w, "}}")?;
Ok(())
}
fn write_from_str<W: Write>(&self, w: &mut W) -> Result<()> {
writeln!(w, "impl<'a> From<&'a str> for {} {{", self.name)?;
writeln!(w, " fn from(s: &'a str) -> Self {{")?;
writeln!(w, " match s {{")?;
for &(ref f, _) in &self.fields {
writeln!(w, " {:?} => {}::{},", f, self.name, f)?;
}
writeln!(w, " _ => Self::default(),")?;
writeln!(w, " }}")?;
writeln!(w, " }}")?;
writeln!(w, "}}")?;
Ok(())
}
}
#[derive(Debug, Clone, Default)]
pub struct OneOf {
pub name: String,
pub fields: Vec<Field>,
pub package: String,
pub module: String,
pub imported: bool,
}
impl OneOf {
fn has_lifetime(&self, desc: &FileDescriptor) -> bool {
self.fields
.iter()
.any(|f| !f.deprecated && f.typ.has_lifetime(desc, f.packed(), &mut Vec::new()))
}
fn set_package(&mut self, package: &str, module: &str) {
self.package = package.to_string();
self.module = module.to_string();
}
fn sanitize_names(&mut self) {
sanitize_keyword(&mut self.name);
sanitize_keyword(&mut self.package);
for f in self.fields.iter_mut() {
sanitize_keyword(&mut f.name);
}
}
fn get_modules(&self, desc: &FileDescriptor) -> String {
get_modules(&self.module, self.imported, desc)
}
fn write<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
writeln!(w)?;
self.write_definition(w, desc, config)?;
writeln!(w)?;
self.write_impl_default(w, desc)?;
Ok(())
}
fn write_definition<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
writeln!(w, "#[derive(Debug, PartialEq, Clone)]")?;
if self.has_lifetime(desc) {
writeln!(w, "pub enum OneOf{}<'a> {{", self.name)?;
} else {
writeln!(w, "pub enum OneOf{} {{", self.name)?;
}
for f in &self.fields {
if f.deprecated {
if config.add_deprecated_fields {
writeln!(w, " #[deprecated]")?;
} else {
continue;
}
}
let rust_type = f.typ.rust_type(desc)?;
if f.boxed {
writeln!(w, " {}(Box<{}>),", f.name, rust_type)?;
} else {
writeln!(w, " {}({}),", f.name, rust_type)?;
}
}
writeln!(w, " None,")?;
writeln!(w, "}}")?;
if cfg!(feature = "generateImplFromForEnums") {
self.generate_impl_from_for_enums(w, desc, config)
} else {
Ok(())
}
}
fn generate_impl_from_for_enums<W: Write>(
&self,
w: &mut W,
desc: &FileDescriptor,
config: &Config,
) -> Result<()> {
// For the first of each enumeration type, generate an impl From<> for it.
let mut handled_fields = Vec::new();
for f in self.fields.iter().filter(|f| !f.deprecated || config.add_deprecated_fields) {
let rust_type = f.typ.rust_type(desc)?;
if handled_fields.contains(&rust_type) {
continue;
}
writeln!(w, "impl From<{}> for OneOf{} {{", rust_type, self.name)?; // TODO: lifetime.
writeln!(w, " fn from(f: {}) -> OneOf{} {{", rust_type, self.name)?;
writeln!(w, " OneOf{}::{}(f)", self.name, f.name)?;
writeln!(w, " }}")?;
writeln!(w, "}}")?;
handled_fields.push(rust_type);
}
Ok(())
}
fn write_impl_default<W: Write>(&self, w: &mut W, desc: &FileDescriptor) -> Result<()> {
if self.has_lifetime(desc) {
writeln!(w, "impl<'a> Default for OneOf{}<'a> {{", self.name)?;
} else {
writeln!(w, "impl Default for OneOf{} {{", self.name)?;
}
writeln!(w, " fn default() -> Self {{")?;
writeln!(w, " OneOf{}::None", self.name)?;
writeln!(w, " }}")?;
writeln!(w, "}}")?;
Ok(())
}
fn write_message_definition<W: Write>(&self, w: &mut W, desc: &FileDescriptor) -> Result<()> {
if self.has_lifetime(desc) {
writeln!(
w,
" pub {}: {}OneOf{}<'a>,",
self.name,
self.get_modules(desc),
self.name
)?;
} else {
writeln!(
w,
" pub {}: {}OneOf{},",
self.name,
self.get_modules(desc),
self.name
)?;
}
Ok(())
}
fn write_match_tag<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
for f in self.fields.iter().filter(|f| !f.deprecated || config.add_deprecated_fields) {
let (val, val_cow) = f.typ.read_fn(desc)?;
if f.boxed {
writeln!(
w,
" Ok({}) => msg.{} = {}OneOf{}::{}(Box::new({})),",
f.tag(),
self.name,
self.get_modules(desc),
self.name,
f.name,
val
)?;
} else {
writeln!(
w,
" Ok({}) => msg.{} = {}OneOf{}::{}({}),",
f.tag(),
self.name,
self.get_modules(desc),
self.name,
f.name,
val_cow
)?;
}
}
Ok(())
}
fn write_get_size<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
writeln!(w, " + match self.{} {{", self.name)?;
for f in self.fields.iter().filter(|f| !f.deprecated || config.add_deprecated_fields) {
let tag_size = sizeof_varint(f.tag());
if f.typ.is_fixed_size() {
writeln!(
w,
" {}OneOf{}::{}(_) => {} + {},",
self.get_modules(desc),
self.name,
f.name,
tag_size,
f.typ.get_size("")
)?;
} else {
writeln!(
w,
" {}OneOf{}::{}(ref m) => {} + {},",
self.get_modules(desc),
self.name,
f.name,
tag_size,
f.typ.get_size("m")
)?;
}
}
writeln!(
w,
" {}OneOf{}::None => 0,",
self.get_modules(desc),
self.name
)?;
write!(w, " }}")?;
Ok(())
}
fn write_write<W: Write>(&self, w: &mut W, desc: &FileDescriptor, config: &Config) -> Result<()> {
write!(w, " match self.{} {{", self.name)?;
for f in self.fields.iter().filter(|f| !f.deprecated || config.add_deprecated_fields) {
writeln!(
w,
" {}OneOf{}::{}(ref m) => {{ w.write_with_tag({}, |w| w.{})? }},",
self.get_modules(desc),
self.name,
f.name,
f.tag(),
f.typ.get_write("m", f.boxed)
)?;
}
writeln!(
w,
" {}OneOf{}::None => {{}},",
self.get_modules(desc),
self.name
)?;
write!(w, " }}")?;
Ok(())
}
}
pub struct Config {
pub in_file: PathBuf,
pub out_file: PathBuf,
pub single_module: bool,
pub import_search_path: Vec<PathBuf>,
pub no_output: bool,
pub error_cycle: bool,
pub headers: bool,
pub dont_use_cow: bool,
pub custom_struct_derive: Vec<String>,
pub custom_repr: Option<String>,
pub custom_rpc_generator: RpcGeneratorFunction,
pub custom_includes: Vec<String>,
pub owned: bool,
pub nostd: bool,
pub hashbrown: bool,
pub gen_info: bool,
pub add_deprecated_fields: bool,
}
#[derive(Debug, Default, Clone)]
pub struct FileDescriptor {
pub import_paths: Vec<PathBuf>,
pub package: String,
pub syntax: Syntax,
pub messages: Vec<Message>,
pub enums: Vec<Enumerator>,
pub module: String,
pub rpc_services: Vec<RpcService>,
pub owned: bool,
}
impl FileDescriptor {
pub fn run(configs: &[Config]) -> Result<()> {
for config in configs {
Self::write_proto(&config)?
}
Ok(())
}
pub fn write_proto(config: &Config) -> Result<()> {
let mut desc = FileDescriptor::read_proto(&config.in_file, &config.import_search_path)?;
desc.owned = config.owned;
if desc.messages.is_empty() && desc.enums.is_empty() {
// There could had been unsupported structures, so bail early
return Err(Error::EmptyRead);
}
desc.resolve_types()?;
desc.break_cycles(config.error_cycle)?;
desc.sanity_checks()?;
if config.dont_use_cow {
desc.convert_field_types(&FieldType::StringCow, &FieldType::String_);
desc.convert_field_types(&FieldType::BytesCow, &FieldType::Bytes_);
}
desc.set_defaults()?;
desc.sanitize_names();
if config.single_module {
desc.package = "".to_string();
}
let (prefix, file_package) = split_package(&desc.package);
let mut file_stem = if file_package.is_empty() {
get_file_stem(&config.out_file)?
} else {
file_package.to_string()
};
if !file_package.is_empty() {
sanitize_keyword(&mut file_stem);
}
let mut out_file = config.out_file.with_file_name(format!("{}.rs", file_stem));
if !prefix.is_empty() {
use std::fs::create_dir_all;
// e.g. package is a.b; we need to create directory 'a' and insert it into the path
let file = PathBuf::from(out_file.file_name().unwrap());
out_file.pop();
for p in prefix.split('.') {
out_file.push(p);
if !out_file.exists() {
create_dir_all(&out_file)?;
update_mod_file(&out_file)?;
}
}
out_file.push(file);
}
if config.no_output {
let imported = |b| if b { " imported" } else { "" };
println!("source will be written to {}\n", out_file.display());
for m in &desc.messages {
println!(
"message {} module {}{}",
m.name,
m.module,
imported(m.imported)
);
}
for e in &desc.enums {
println!(
"enum {} module {}{}",
e.name,
e.module,
imported(e.imported)
);
}
return Ok(());
}
let name = config.in_file.file_name().and_then(|e| e.to_str()).unwrap();
let mut w = BufWriter::new(File::create(&out_file)?);
desc.write(&mut w, name, config)?;
update_mod_file(&out_file)
}
pub fn convert_field_types(&mut self, from: &FieldType, to: &FieldType) {
// Messages and enums are the only structures with types
for m in &mut self.messages {
m.convert_field_types(from, to);
}
}
/// Opens a proto file, reads it and returns raw parsed data
pub fn read_proto(in_file: &Path, import_search_path: &[PathBuf]) -> Result<FileDescriptor> {
let file = std::fs::read_to_string(in_file)?;
let (_, mut desc) = file_descriptor(&file).map_err(|e| Error::Nom(e))?;
for mut m in &mut desc.messages {
if m.path.as_os_str().is_empty() {
m.path = in_file.to_path_buf();
if !import_search_path.is_empty() {
if let Ok(p) = m.path.clone().strip_prefix(&import_search_path[0]) {
m.import = p.to_path_buf();
}
}
}
}
// proto files with no packages are given an implicit module,
// since every generated Rust source file represents a module
desc.module = if desc.package.is_empty() {
get_file_stem(in_file)?
} else {
desc.package.clone()
};
desc.fetch_imports(&in_file, import_search_path)?;
Ok(desc)
}
fn sanity_checks(&self) -> Result<()> {
for m in &self.messages {
m.sanity_checks(&self)?;
}
Ok(())
}
/// Get messages and enums from imports
fn fetch_imports(&mut self, in_file: &Path, import_search_path: &[PathBuf]) -> Result<()> {
for m in &mut self.messages {
m.set_package("", &self.module);
}
for m in &mut self.enums {
m.set_package("", &self.module);
}
for import in &self.import_paths {
// this is the same logic as the C preprocessor;
// if the include path item is absolute, then append the filename,
// otherwise it is always relative to the file.
let mut matching_file = None;
for path in import_search_path {
let candidate = if path.is_absolute() {
path.join(&import)
} else {
in_file
.parent()
.map_or_else(|| path.join(&import), |p| p.join(path).join(&import))
};
if candidate.exists() {
matching_file = Some(candidate);
break;
}
}
if matching_file.is_none() {
return Err(Error::InvalidImport(format!(
"file {} not found on import path",
import.display()
)));
}
let proto_file = matching_file.unwrap();
let mut f = FileDescriptor::read_proto(&proto_file, import_search_path)?;
// if the proto has a packge then the names will be prefixed
let package = f.package.clone();
let module = f.module.clone();
self.messages.extend(f.messages.drain(..).map(|mut m| {
if m.package.is_empty() {
m.set_package(&package, &module);
}
if m.path.as_os_str().is_empty() {
m.path = proto_file.clone();
}
if m.import.as_os_str().is_empty() {
m.import = import.clone();
}
m.set_imported();
m
}));
self.enums.extend(f.enums.drain(..).map(|mut e| {
if e.package.is_empty() {
e.set_package(&package, &module);
}
if e.path.as_os_str().is_empty() {
e.path = proto_file.clone();
}
if e.import.as_os_str().is_empty() {
e.import = import.clone();
}
e.imported = true;
e
}));
}
Ok(())
}
fn set_defaults(&mut self) -> Result<()> {
// set map fields as required (they are equivalent to repeated message)
for m in &mut self.messages {
m.set_map_required();
}
// if proto3, then changes several defaults
if let Syntax::Proto3 = self.syntax {
for m in &mut self.messages {
m.set_repeated_as_packed();
}
}
// this is very inefficient but we don't care ...
//let msgs = self.messages.clone();
let copy = self.clone();
for m in &mut self.messages {
m.sanitize_defaults(©)?; //&msgs, &self.enums)?; ???
}
// force packed only on primitives
for m in &mut self.messages {
m.unset_packed_non_primitives();
}
Ok(())
}
fn sanitize_names(&mut self) {
for m in &mut self.messages {
m.sanitize_names();
}
for e in &mut self.enums {
e.sanitize_names();
}
}
/// Breaks cycles by adding boxes when necessary
fn break_cycles(&mut self, error_cycle: bool) -> Result<()> {
// get strongly connected components
let sccs = self.sccs();
fn is_cycle(scc: &[MessageIndex], desc: &FileDescriptor) -> bool {
scc.iter()
.map(|m| m.get_message(desc))
.flat_map(|m| m.all_fields())
.filter(|f| !f.boxed)
.filter_map(|f| f.typ.message())
.any(|m| scc.contains(m))
}
// sccs are sub DFS trees so if there is a edge connecting a node to
// another node higher in the scc list, then this is a cycle. (Note that
// we may have several cycles per scc).
//
// Technically we only need to box one edge (optional field) per cycle to
// have Sized structs. Unfortunately, scc root depend on the order we
// traverse the graph so such a field is not guaranteed to always be the same.
//
// For now, we decide (see discussion in #121) to box all optional fields
// within a scc. We favor generated code stability over performance.
for scc in &sccs {
debug!("scc: {:?}", scc);
for (i, v) in scc.iter().enumerate() {
// cycles with v as root
let cycles = v
.get_message(self)
.all_fields()
.filter_map(|f| f.typ.message())
.filter_map(|m| scc[i..].iter().position(|n| n == m))
.collect::<Vec<_>>();
for cycle in cycles {
let cycle = &scc[i..i + cycle + 1];
debug!("cycle: {:?}", &cycle);
for v in cycle {
for f in v
.get_message_mut(self)
.all_fields_mut()
.filter(|f| f.frequency == Frequency::Optional)
.filter(|f| f.typ.message().map_or(false, |m| cycle.contains(m)))
{
f.boxed = true;
}
}
if is_cycle(cycle, self) {
if error_cycle {
return Err(Error::Cycle(
cycle
.iter()
.map(|m| m.get_message(self).name.clone())
.collect(),
));
} else {
for v in cycle {
warn!(
"Unsound proto file would result in infinite size Messages.\n\
Cycle detected in messages {:?}.\n\
Modifying required fields into optional fields",
cycle
.iter()
.map(|m| &m.get_message(self).name)
.collect::<Vec<_>>()
);
for f in v
.get_message_mut(self)
.all_fields_mut()
.filter(|f| f.frequency == Frequency::Required)
.filter(|f| {
f.typ.message().map_or(false, |m| cycle.contains(m))
})
{
f.boxed = true;
f.frequency = Frequency::Optional;
}
}
}
}
}
}
}
Ok(())
}
fn get_full_names(&mut self) -> (HashMap<String, MessageIndex>, HashMap<String, EnumIndex>) {
fn rec_full_names(
m: &mut Message,
index: &mut MessageIndex,
full_msgs: &mut HashMap<String, MessageIndex>,
full_enums: &mut HashMap<String, EnumIndex>,
) {
m.index = index.clone();
if m.package.is_empty() {
full_msgs.insert(m.name.clone(), index.clone());
} else {
full_msgs.insert(format!("{}.{}", m.package, m.name), index.clone());
}
for (i, e) in m.enums.iter_mut().enumerate() {
let index = EnumIndex {
msg_index: index.clone(),
index: i,
};
e.index = index.clone();
full_enums.insert(format!("{}.{}", e.package, e.name), index);
}
for (i, m) in m.messages.iter_mut().enumerate() {
index.push(i);
rec_full_names(m, index, full_msgs, full_enums);
index.pop();
}
}
let mut full_msgs = HashMap::new();
let mut full_enums = HashMap::new();
let mut index = MessageIndex { indexes: vec![] };
for (i, m) in self.messages.iter_mut().enumerate() {
index.push(i);
rec_full_names(m, &mut index, &mut full_msgs, &mut full_enums);
index.pop();
}
for (i, e) in self.enums.iter_mut().enumerate() {
let index = EnumIndex {
msg_index: index.clone(),
index: i,
};
e.index = index.clone();
if e.package.is_empty() {
full_enums.insert(e.name.clone(), index.clone());
} else {
full_enums.insert(format!("{}.{}", e.package, e.name), index.clone());
}
}
(full_msgs, full_enums)
}
fn resolve_types(&mut self) -> Result<()> {
let (full_msgs, full_enums) = self.get_full_names();
fn rec_resolve_types(
m: &mut Message,
full_msgs: &HashMap<String, MessageIndex>,
full_enums: &HashMap<String, EnumIndex>,
) -> Result<()> {
// Interestingly, we can't call all_fields_mut to iterate over the
// fields here: writing out the field traversal as below lets Rust
// split m's mutable borrow, permitting the loop body to use fields
// of `m` other than `fields` and `oneofs`.
'types: for typ in m
.fields
.iter_mut()
.chain(m.oneofs.iter_mut().flat_map(|o| o.fields.iter_mut()))
.map(|f| &mut f.typ)
.flat_map(|typ| match *typ {
FieldType::Map(ref mut key, ref mut value) => {
vec![&mut **key, &mut **value].into_iter()
}
_ => vec![typ].into_iter(),
})
{
if let FieldType::MessageOrEnum(name) = typ.clone() {
let test_names: Vec<String> = if name.starts_with('.') {
vec![name.clone().split_off(1)]
} else if m.package.is_empty() {
vec![name.clone(), format!("{}.{}", m.name, name)]
} else {
vec![
name.clone(),
format!("{}.{}", m.package, name),
format!("{}.{}.{}", m.package, m.name, name),
]
};
for name in &test_names {
if let Some(msg) = full_msgs.get(&*name) {
*typ = FieldType::Message(msg.clone());
continue 'types;
} else if let Some(e) = full_enums.get(&*name) {
*typ = FieldType::Enum(e.clone());
continue 'types;
}
}
return Err(Error::MessageOrEnumNotFound(name));
}
}
for m in m.messages.iter_mut() {
rec_resolve_types(m, full_msgs, full_enums)?;
}
Ok(())
}
for m in self.messages.iter_mut() {
rec_resolve_types(m, &full_msgs, &full_enums)?;
}
Ok(())
}
fn write<W: Write>(&self, w: &mut W, filename: &str, config: &Config) -> Result<()> {
println!(
"Found {} messages, and {} enums",
self.messages.len(),
self.enums.len()
);
if config.headers {
self.write_headers(w, filename, config)?;
}
self.write_package_start(w)?;
self.write_uses(w, config)?;
self.write_imports(w)?;
self.write_enums(w)?;
self.write_messages(w, config)?;
self.write_rpc_services(w, config)?;
self.write_package_end(w)?;
Ok(())
}
fn write_headers<W: Write>(&self, w: &mut W, filename: &str, config: &Config) -> Result<()> {
writeln!(
w,
"// Automatically generated rust module for '{}' file",
filename
)?;
writeln!(w)?;
writeln!(w, "#![allow(non_snake_case)]")?;
writeln!(w, "#![allow(non_upper_case_globals)]")?;
writeln!(w, "#![allow(non_camel_case_types)]")?;
writeln!(w, "#![allow(unused_imports)]")?;
writeln!(w, "#![allow(unknown_lints)]")?;
writeln!(w, "#![allow(clippy::all)]")?;
if config.add_deprecated_fields {
writeln!(w, "#![allow(deprecated)]")?;
}
writeln!(w, "#![cfg_attr(rustfmt, rustfmt_skip)]")?;
writeln!(w)?;
Ok(())
}
fn write_package_start<W: Write>(&self, w: &mut W) -> Result<()> {
writeln!(w)?;
Ok(())
}
fn write_uses<W: Write>(&self, w: &mut W, config: &Config) -> Result<()> {
if self.messages.iter().all(|m| m.is_unit()) {
writeln!(
w,
"use quick_protobuf::{{BytesReader, Result, MessageInfo, MessageRead, MessageWrite}};"
)?;
return Ok(());
}
Message::write_common_uses(w, &self.messages, self, config)?;
writeln!(
w,
"use quick_protobuf::{{MessageInfo, MessageRead, MessageWrite, BytesReader, Writer, WriterBackend, Result}};"
)?;
if self.owned {
write!(
w,
"use core::{{convert::{{TryFrom, TryInto}}, ops::{{Deref, DerefMut}}}};"
)?;
}
writeln!(w, "use quick_protobuf::sizeofs::*;")?;
for include in &config.custom_includes {
writeln!(w, "{}", include)?;
}
Ok(())
}
fn write_imports<W: Write>(&self, w: &mut W) -> Result<()> {
// even if we don't have an explicit package, there is an implicit Rust module
// This `use` allows us to refer to the package root.
// NOTE! I'm suppressing not-needed 'use super::*' errors currently!
let mut depth = self.package.split('.').count();
if depth == 0 {
depth = 1;
}
write!(w, "use ")?;
for _ in 0..depth {
write!(w, "super::")?;
}
writeln!(w, "*;")?;
Ok(())
}
fn write_package_end<W: Write>(&self, w: &mut W) -> Result<()> {
writeln!(w)?;
Ok(())
}
fn write_enums<W: Write>(&self, w: &mut W) -> Result<()> {
for m in self.enums.iter().filter(|e| !e.imported) {
println!("Writing enum {}", m.name);
writeln!(w)?;
m.write_definition(w)?;
writeln!(w)?;
m.write_impl_default(w)?;
writeln!(w)?;
m.write_from_i32(w)?;
writeln!(w)?;
m.write_from_str(w)?;
}
Ok(())
}
fn write_rpc_services<W: Write>(&self, w: &mut W, config: &Config) -> Result<()> {
for m in self.rpc_services.iter() {
println!("Writing Rpc {}", m.service_name);
writeln!(w)?;
m.write_definition(w, config)?;
}
Ok(())
}
fn write_messages<W: Write>(&self, w: &mut W, config: &Config) -> Result<()> {
for m in self.messages.iter().filter(|m| !m.imported) {
m.write(w, &self, config)?;
}
Ok(())
}
}
/// Calculates the tag value
fn tag(number: u32, typ: &FieldType, packed: bool) -> u32 {
number << 3 | typ.wire_type_num(packed)
}
/// "" is ("",""), "a" is ("","a"), "a.b" is ("a"."b"), and so forth.
fn split_package(package: &str) -> (&str, &str) {
if package.is_empty() {
("", "")
} else if let Some(i) = package.rfind('.') {
(&package[0..i], &package[i + 1..])
} else {
("", package)
}
}
const MAGIC_HEADER: &str = "// Automatically generated mod.rs";
/// Given a file path, create or update the mod.rs file within its folder
fn update_mod_file(path: &Path) -> Result<()> {
let mut file = path.to_path_buf();
use std::fs::OpenOptions;
use std::io::prelude::*;
let name = file.file_stem().unwrap().to_string_lossy().to_string();
file.pop();
file.push("mod.rs");
let matches = "pub mod ";
let mut present = false;
let mut exists = false;
if let Ok(f) = File::open(&file) {
exists = true;
let mut first = true;
for line in BufReader::new(f).lines() {
let line = line?;
if first {
if line.find(MAGIC_HEADER).is_none() {
// it is NOT one of our generated mod.rs files, so don't modify it!
present = true;
break;
}
first = false;
}
if let Some(i) = line.find(matches) {
let rest = &line[i + matches.len()..line.len() - 1];
if rest == name {
// we already have a reference to this module...
present = true;
break;
}
}
}
}
if !present {
let mut f = if exists {
OpenOptions::new().append(true).open(&file)?
} else {
let mut f = File::create(&file)?;
writeln!(f, "{}", MAGIC_HEADER)?;
f
};
writeln!(f, "pub mod {};", name)?;
}
Ok(())
}
/// get the proper sanitized file stem from an input file path
fn get_file_stem(path: &Path) -> Result<String> {
let mut file_stem = path
.file_stem()
.and_then(|f| f.to_str())
.map(|s| s.to_string())
.ok_or_else(|| Error::OutputFile(format!("{}", path.display())))?;
file_stem = file_stem.replace(|c: char| !c.is_alphanumeric(), "_");
// will now be properly alphanumeric, but may be a keyword!
sanitize_keyword(&mut file_stem);
Ok(file_stem)
}<|fim▁end|> | }
|
<|file_name|>style_property_map.cc<|end_file_name|><|fim▁begin|>// Copyright 2016 the chromium authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/core/css/cssom/style_property_map.h"
#include "third_party/blink/renderer/core/css/css_identifier_value.h"
#include "third_party/blink/renderer/core/css/css_property_name.h"
#include "third_party/blink/renderer/core/css/css_value_list.h"
#include "third_party/blink/renderer/core/css/css_value_pair.h"
#include "third_party/blink/renderer/core/css/cssom/css_style_value.h"
#include "third_party/blink/renderer/core/css/cssom/cssom_types.h"
#include "third_party/blink/renderer/core/css/cssom/style_value_factory.h"
#include "third_party/blink/renderer/core/css/parser/css_parser.h"
#include "third_party/blink/renderer/core/css/parser/css_parser_context.h"
#include "third_party/blink/renderer/core/css/parser/css_tokenizer.h"
#include "third_party/blink/renderer/core/css/properties/css_property.h"
#include "third_party/blink/renderer/core/style_property_shorthand.h"
#include "third_party/blink/renderer/platform/bindings/exception_state.h"
#include "third_party/blink/renderer/platform/heap/heap.h"
namespace blink {
namespace {
CSSValueList* CssValueListForPropertyID(CSSPropertyID property_id) {
DCHECK(CSSProperty::Get(property_id).IsRepeated());
char separator = CSSProperty::Get(property_id).RepetitionSeparator();
switch (separator) {
case ' ':
return CSSValueList::CreateSpaceSeparated();
case ',':
return CSSValueList::CreateCommaSeparated();
case '/':
return CSSValueList::CreateSlashSeparated();
default:
NOTREACHED();
return nullptr;
}
}
const CSSValue* StyleValueToCSSValue(
const CSSProperty& property,
const AtomicString& custom_property_name,
const CSSStyleValue& style_value,
const ExecutionContext& execution_context) {
DCHECK_EQ(property.IDEquals(CSSPropertyID::kVariable),
!custom_property_name.IsNull());
const CSSPropertyID property_id = property.PropertyID();
if (!CSSOMTypes::PropertyCanTake(property_id, custom_property_name,
style_value)) {
return nullptr;
}
if (style_value.GetType() == CSSStyleValue::kUnknownType) {
return CSSParser::ParseSingleValue(
property.PropertyID(), style_value.toString(),
MakeGarbageCollected<CSSParserContext>(execution_context));
}
// Handle properties that use ad-hoc structures for their CSSValues:
// TODO(https://crbug.com/545324): Move this into a method on
// CSSProperty when there are more of these cases.
switch (property_id) {
case CSSPropertyID::kBorderBottomLeftRadius:
case CSSPropertyID::kBorderBottomRightRadius:
case CSSPropertyID::kBorderTopLeftRadius:
case CSSPropertyID::kBorderTopRightRadius: {
// level 1 only accept single <length-percentages>, but border-radius-*
// expects pairs.
const auto* value = style_value.ToCSSValue();
if (value->IsPrimitiveValue()) {
return MakeGarbageCollected<CSSValuePair>(
value, value, CSSValuePair::kDropIdenticalValues);
}
break;
}
case CSSPropertyID::kContain: {
// level 1 only accepts single values, which are stored internally
// as a single element list.
const auto* value = style_value.ToCSSValue();
if ((value->IsIdentifierValue() && !value->IsCSSWideKeyword()) ||
value->IsPrimitiveValue()) {
CSSValueList* list = CSSValueList::CreateSpaceSeparated();
list->Append(*style_value.ToCSSValue());
return list;
}
break;
}
case CSSPropertyID::kFontVariantEastAsian:
case CSSPropertyID::kFontVariantLigatures:
case CSSPropertyID::kFontVariantNumeric: {
// level 1 only accept single keywords, but font-variant-* store
// them as a list
if (const auto* value =
DynamicTo<CSSIdentifierValue>(style_value.ToCSSValue())) {
// 'none' and 'normal' are stored as a single value
if (value->GetValueID() == CSSValueID::kNone ||
value->GetValueID() == CSSValueID::kNormal) {
break;
}
CSSValueList* list = CSSValueList::CreateSpaceSeparated();
list->Append(*style_value.ToCSSValue());
return list;
}
break;
}
case CSSPropertyID::kGridAutoFlow: {
// level 1 only accepts single keywords
const auto* value = style_value.ToCSSValue();
// single keywords are wrapped in a list.
if (value->IsIdentifierValue() && !value->IsCSSWideKeyword()) {
CSSValueList* list = CSSValueList::CreateSpaceSeparated();
list->Append(*style_value.ToCSSValue());
return list;
}
break;
}
case CSSPropertyID::kOffsetRotate: {
// level 1 only accepts single values, which are stored internally
// as a single element list.
const auto* value = style_value.ToCSSValue();
if ((value->IsIdentifierValue() && !value->IsCSSWideKeyword()) ||
value->IsPrimitiveValue()) {
CSSValueList* list = CSSValueList::CreateSpaceSeparated();
list->Append(*style_value.ToCSSValue());
return list;
}
break;
}
case CSSPropertyID::kPaintOrder: {
// level 1 only accepts single keywords
const auto* value = style_value.ToCSSValue();
// only 'normal' is stored as an identifier, the other keywords are
// wrapped in a list.
auto* identifier_value = DynamicTo<CSSIdentifierValue>(value);
if (identifier_value && !value->IsCSSWideKeyword() &&
identifier_value->GetValueID() != CSSValueID::kNormal) {
CSSValueList* list = CSSValueList::CreateSpaceSeparated();
list->Append(*style_value.ToCSSValue());
return list;
}
break;
}
case CSSPropertyID::kTextDecorationLine: {
// level 1 only accepts single keywords
const auto* value = style_value.ToCSSValue();
// only 'none' is stored as an identifier, the other keywords are
// wrapped in a list.
auto* identifier_value = DynamicTo<CSSIdentifierValue>(value);
if (identifier_value && !value->IsCSSWideKeyword() &&
identifier_value->GetValueID() != CSSValueID::kNone) {
CSSValueList* list = CSSValueList::CreateSpaceSeparated();
list->Append(*style_value.ToCSSValue());
return list;
}
break;
}
case CSSPropertyID::kTextIndent: {
// level 1 only accepts single values, which are stored internally
// as a single element list.
const auto* value = style_value.ToCSSValue();
if (value->IsPrimitiveValue()) {
CSSValueList* list = CSSValueList::CreateSpaceSeparated();
list->Append(*value);
return list;
}
break;
}
case CSSPropertyID::kTransitionProperty:
case CSSPropertyID::kTouchAction: {
// level 1 only accepts single keywords, which are stored internally
// as a single element list
const auto* value = style_value.ToCSSValue();
if (value->IsIdentifierValue() && !value->IsCSSWideKeyword()) {
CSSValueList* list = CSSValueList::CreateSpaceSeparated();
list->Append(*style_value.ToCSSValue());
return list;
}
break;
}<|fim▁hole|>
return style_value.ToCSSValueWithProperty(property_id);
}
const CSSValue* CoerceStyleValueOrString(
const CSSProperty& property,
const AtomicString& custom_property_name,
const CSSStyleValueOrString& value,
const ExecutionContext& execution_context) {
DCHECK(!property.IsRepeated());
DCHECK_EQ(property.IDEquals(CSSPropertyID::kVariable),
!custom_property_name.IsNull());
if (value.IsCSSStyleValue()) {
if (!value.GetAsCSSStyleValue())
return nullptr;
return StyleValueToCSSValue(property, custom_property_name,
*value.GetAsCSSStyleValue(), execution_context);
} else {
DCHECK(value.IsString());
const auto values = StyleValueFactory::FromString(
property.PropertyID(), custom_property_name, value.GetAsString(),
MakeGarbageCollected<CSSParserContext>(execution_context));
if (values.size() != 1U)
return nullptr;
return StyleValueToCSSValue(property, custom_property_name, *values[0],
execution_context);
}
}
const CSSValue* CoerceStyleValuesOrStrings(
const CSSProperty& property,
const AtomicString& custom_property_name,
const HeapVector<CSSStyleValueOrString>& values,
const ExecutionContext& execution_context) {
DCHECK(property.IsRepeated());
DCHECK_EQ(property.IDEquals(CSSPropertyID::kVariable),
!custom_property_name.IsNull());
if (values.IsEmpty())
return nullptr;
CSSStyleValueVector style_values =
StyleValueFactory::CoerceStyleValuesOrStrings(
property, custom_property_name, values, execution_context);
if (style_values.IsEmpty())
return nullptr;
CSSValueList* result = CssValueListForPropertyID(property.PropertyID());
for (const auto& style_value : style_values) {
const CSSValue* css_value = StyleValueToCSSValue(
property, custom_property_name, *style_value, execution_context);
if (!css_value)
return nullptr;
if (css_value->IsCSSWideKeyword() || css_value->IsVariableReferenceValue())
return style_values.size() == 1U ? css_value : nullptr;
result->Append(*css_value);
}
return result;
}
} // namespace
void StylePropertyMap::set(const ExecutionContext* execution_context,
const String& property_name,
const HeapVector<CSSStyleValueOrString>& values,
ExceptionState& exception_state) {
const CSSPropertyID property_id =
cssPropertyID(execution_context, property_name);
if (property_id == CSSPropertyID::kInvalid) {
exception_state.ThrowTypeError("Invalid propertyName: " + property_name);
return;
}
DCHECK(isValidCSSPropertyID(property_id));
const CSSProperty& property = CSSProperty::Get(property_id);
if (property.IsShorthand()) {
if (values.size() != 1) {
exception_state.ThrowTypeError("Invalid type for property");
return;
}
String css_text;
if (values[0].IsCSSStyleValue()) {
CSSStyleValue* style_value = values[0].GetAsCSSStyleValue();
if (style_value &&
CSSOMTypes::PropertyCanTake(property_id, g_null_atom, *style_value)) {
css_text = style_value->toString();
}
} else {
css_text = values[0].GetAsString();
}
if (css_text.IsEmpty() ||
!SetShorthandProperty(property.PropertyID(), css_text,
execution_context->GetSecureContextMode()))
exception_state.ThrowTypeError("Invalid type for property");
return;
}
AtomicString custom_property_name = (property_id == CSSPropertyID::kVariable)
? AtomicString(property_name)
: g_null_atom;
const CSSValue* result = nullptr;
if (property.IsRepeated()) {
result = CoerceStyleValuesOrStrings(property, custom_property_name, values,
*execution_context);
} else if (values.size() == 1U) {
result = CoerceStyleValueOrString(property, custom_property_name, values[0],
*execution_context);
}
if (!result) {
exception_state.ThrowTypeError("Invalid type for property");
return;
}
if (property_id == CSSPropertyID::kVariable)
SetCustomProperty(custom_property_name, *result);
else
SetProperty(property_id, *result);
}
void StylePropertyMap::append(const ExecutionContext* execution_context,
const String& property_name,
const HeapVector<CSSStyleValueOrString>& values,
ExceptionState& exception_state) {
if (values.IsEmpty())
return;
const CSSPropertyID property_id =
cssPropertyID(execution_context, property_name);
if (property_id == CSSPropertyID::kInvalid) {
exception_state.ThrowTypeError("Invalid propertyName: " + property_name);
return;
}
const CSSProperty& property = CSSProperty::Get(property_id);
if (property_id == CSSPropertyID::kVariable) {
exception_state.ThrowTypeError(
"Appending to custom properties is not supported");
return;
}
if (!property.IsRepeated()) {
exception_state.ThrowTypeError("Property does not support multiple values");
return;
}
CSSValueList* current_value = nullptr;
if (const CSSValue* css_value = GetProperty(property_id)) {
current_value = To<CSSValueList>(css_value)->Copy();
} else {
current_value = CssValueListForPropertyID(property_id);
}
const CSSValue* result = CoerceStyleValuesOrStrings(
property, g_null_atom, values, *execution_context);
const auto* result_value_list = DynamicTo<CSSValueList>(result);
if (!result_value_list) {
exception_state.ThrowTypeError("Invalid type for property");
return;
}
for (const auto& value : *result_value_list) {
current_value->Append(*value);
}
SetProperty(property_id, *current_value);
}
void StylePropertyMap::remove(const ExecutionContext* execution_context,
const String& property_name,
ExceptionState& exception_state) {
CSSPropertyID property_id = cssPropertyID(execution_context, property_name);
if (property_id == CSSPropertyID::kInvalid) {
exception_state.ThrowTypeError("Invalid property name: " + property_name);
return;
}
if (property_id == CSSPropertyID::kVariable) {
RemoveCustomProperty(AtomicString(property_name));
} else {
RemoveProperty(property_id);
}
}
void StylePropertyMap::clear() {
RemoveAllProperties();
}
} // namespace blink<|fim▁end|> | default:
break;
} |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>"""
This module contains some assorted functions used in tests
"""
from __future__ import absolute_import
import os
from importlib import import_module
from twisted.trial.unittest import SkipTest
from scrapy.exceptions import NotConfigured
from scrapy.utils.boto import is_botocore
def assert_aws_environ():
"""Asserts the current environment is suitable for running AWS testsi.
Raises SkipTest with the reason if it's not.
"""
skip_if_no_boto()
if 'AWS_ACCESS_KEY_ID' not in os.environ:
raise SkipTest("AWS keys not found")
def assert_gcs_environ():
if 'GCS_PROJECT_ID' not in os.environ:
raise SkipTest("GCS_PROJECT_ID not found")
def skip_if_no_boto():
try:
is_botocore()
except NotConfigured as e:
raise SkipTest(e)
def get_s3_content_and_delete(bucket, path, with_key=False):
""" Get content from s3 key, and delete key afterwards.
"""
if is_botocore():
import botocore.session
session = botocore.session.get_session()
client = session.create_client('s3')
key = client.get_object(Bucket=bucket, Key=path)
content = key['Body'].read()
client.delete_object(Bucket=bucket, Key=path)
else:
import boto
# assuming boto=2.2.2
bucket = boto.connect_s3().get_bucket(bucket, validate=False)
key = bucket.get_key(path)
content = key.get_contents_as_string()
bucket.delete_key(path)
return (content, key) if with_key else content
def get_gcs_content_and_delete(bucket, path):
from google.cloud import storage
client = storage.Client(project=os.environ.get('GCS_PROJECT_ID'))
bucket = client.get_bucket(bucket)
blob = bucket.get_blob(path)
content = blob.download_as_string()
bucket.delete_blob(path)
return content, blob<|fim▁hole|> """Return an unconfigured Crawler object. If settings_dict is given, it
will be used to populate the crawler settings with a project level
priority.
"""
from scrapy.crawler import CrawlerRunner
from scrapy.spiders import Spider
runner = CrawlerRunner(settings_dict)
return runner.create_crawler(spidercls or Spider)
def get_pythonpath():
"""Return a PYTHONPATH suitable to use in processes so that they find this
installation of Scrapy"""
scrapy_path = import_module('scrapy').__path__[0]
return os.path.dirname(scrapy_path) + os.pathsep + os.environ.get('PYTHONPATH', '')
def get_testenv():
"""Return a OS environment dict suitable to fork processes that need to import
this installation of Scrapy, instead of a system installed one.
"""
env = os.environ.copy()
env['PYTHONPATH'] = get_pythonpath()
return env
def assert_samelines(testcase, text1, text2, msg=None):
"""Asserts text1 and text2 have the same lines, ignoring differences in
line endings between platforms
"""
testcase.assertEqual(text1.splitlines(), text2.splitlines(), msg)<|fim▁end|> |
def get_crawler(spidercls=None, settings_dict=None): |
<|file_name|>matrix_alga.rs<|end_file_name|><|fim▁begin|>#[cfg(all(feature = "alloc", not(feature = "std")))]
use alloc::vec::Vec;
use num::{One, Zero};
use alga::general::{
AbstractGroup, AbstractGroupAbelian, AbstractLoop, AbstractMagma, AbstractModule,
AbstractMonoid, AbstractQuasigroup, AbstractSemigroup, Additive, ClosedAdd, ClosedMul,
ClosedNeg, ComplexField, Field, Identity, JoinSemilattice, Lattice, MeetSemilattice, Module,
Multiplicative, RingCommutative, TwoSidedInverse,
};
use alga::linear::{
FiniteDimInnerSpace, FiniteDimVectorSpace, InnerSpace, NormedSpace, VectorSpace,
};
use crate::base::allocator::Allocator;
use crate::base::dimension::{Dim, DimName};
use crate::base::storage::{Storage, StorageMut};
use crate::base::{DefaultAllocator, MatrixMN, MatrixN, Scalar};
/*
*
* Additive structures.
*
*/
impl<N, R: DimName, C: DimName> Identity<Additive> for MatrixMN<N, R, C>
where
N: Scalar + Zero,
DefaultAllocator: Allocator<N, R, C>,
{
#[inline]
fn identity() -> Self {
Self::from_element(N::zero())
}
}
impl<N, R: DimName, C: DimName> AbstractMagma<Additive> for MatrixMN<N, R, C>
where
N: Scalar + ClosedAdd,
DefaultAllocator: Allocator<N, R, C>,
{
#[inline]
fn operate(&self, other: &Self) -> Self {
self + other
}
}
impl<N, R: DimName, C: DimName> TwoSidedInverse<Additive> for MatrixMN<N, R, C>
where
N: Scalar + ClosedNeg,
DefaultAllocator: Allocator<N, R, C>,
{
#[inline]
#[must_use = "Did you mean to use two_sided_inverse_mut()?"]
fn two_sided_inverse(&self) -> Self {
-self
}
#[inline]
fn two_sided_inverse_mut(&mut self) {
*self = -self.clone()
}
}
macro_rules! inherit_additive_structure(
($($marker: ident<$operator: ident> $(+ $bounds: ident)*),* $(,)*) => {$(
impl<N, R: DimName, C: DimName> $marker<$operator> for MatrixMN<N, R, C>
where N: Scalar + $marker<$operator> $(+ $bounds)*,
DefaultAllocator: Allocator<N, R, C> { }
)*}
);
inherit_additive_structure!(
AbstractSemigroup<Additive> + ClosedAdd,
AbstractMonoid<Additive> + Zero + ClosedAdd,
AbstractQuasigroup<Additive> + ClosedAdd + ClosedNeg,
AbstractLoop<Additive> + Zero + ClosedAdd + ClosedNeg,
AbstractGroup<Additive> + Zero + ClosedAdd + ClosedNeg,
AbstractGroupAbelian<Additive> + Zero + ClosedAdd + ClosedNeg
);
impl<N, R: DimName, C: DimName> AbstractModule for MatrixMN<N, R, C>
where
N: Scalar + RingCommutative,
DefaultAllocator: Allocator<N, R, C>,
{
type AbstractRing = N;
#[inline]
fn multiply_by(&self, n: N) -> Self {
self * n
}
}
impl<N, R: DimName, C: DimName> Module for MatrixMN<N, R, C>
where
N: Scalar + RingCommutative,
DefaultAllocator: Allocator<N, R, C>,
{
type Ring = N;
}
impl<N, R: DimName, C: DimName> VectorSpace for MatrixMN<N, R, C>
where
N: Scalar + Field,
DefaultAllocator: Allocator<N, R, C>,
{
type Field = N;
}
impl<N, R: DimName, C: DimName> FiniteDimVectorSpace for MatrixMN<N, R, C>
where
N: Scalar + Field,
DefaultAllocator: Allocator<N, R, C>,
{
#[inline]
fn dimension() -> usize {
R::dim() * C::dim()
}
#[inline]
fn canonical_basis_element(i: usize) -> Self {
assert!(i < Self::dimension(), "Index out of bound.");
let mut res = Self::zero();
unsafe {
*res.data.get_unchecked_linear_mut(i) = N::one();
}
res
}
#[inline]
fn dot(&self, other: &Self) -> N {
self.dot(other)
}
#[inline]
unsafe fn component_unchecked(&self, i: usize) -> &N {
self.data.get_unchecked_linear(i)
}
#[inline]
unsafe fn component_unchecked_mut(&mut self, i: usize) -> &mut N {
self.data.get_unchecked_linear_mut(i)
}
}
impl<
N: ComplexField + simba::scalar::ComplexField<RealField = <N as ComplexField>::RealField>,
R: DimName,
C: DimName,
> NormedSpace for MatrixMN<N, R, C>
where
<N as ComplexField>::RealField: simba::scalar::RealField,
DefaultAllocator: Allocator<N, R, C>,
{
type RealField = <N as ComplexField>::RealField;
type ComplexField = N;
#[inline]
fn norm_squared(&self) -> <N as ComplexField>::RealField {
self.norm_squared()
}
#[inline]
fn norm(&self) -> <N as ComplexField>::RealField {
self.norm()
}
#[inline]
#[must_use = "Did you mean to use normalize_mut()?"]
fn normalize(&self) -> Self {
self.normalize()
}
#[inline]
fn normalize_mut(&mut self) -> <N as ComplexField>::RealField {
self.normalize_mut()
}
#[inline]
#[must_use = "Did you mean to use try_normalize_mut()?"]
fn try_normalize(&self, min_norm: <N as ComplexField>::RealField) -> Option<Self> {
self.try_normalize(min_norm)
}
#[inline]
fn try_normalize_mut(
&mut self,
min_norm: <N as ComplexField>::RealField,
) -> Option<<N as ComplexField>::RealField> {
self.try_normalize_mut(min_norm)
}
}
impl<
N: ComplexField + simba::scalar::ComplexField<RealField = <N as ComplexField>::RealField>,
R: DimName,
C: DimName,
> InnerSpace for MatrixMN<N, R, C>
where
<N as ComplexField>::RealField: simba::scalar::RealField,
DefaultAllocator: Allocator<N, R, C>,
{
#[inline]
fn angle(&self, other: &Self) -> <N as ComplexField>::RealField {
self.angle(other)
}
#[inline]
fn inner_product(&self, other: &Self) -> N {
self.dotc(other)
}
}
// FIXME: specialization will greatly simplify this implementation in the future.
// In particular:
// − use `x()` instead of `::canonical_basis_element`
// − use `::new(x, y, z)` instead of `::from_slice`
impl<
N: ComplexField + simba::scalar::ComplexField<RealField = <N as ComplexField>::RealField>,
R: DimName,
C: DimName,
> FiniteDimInnerSpace for MatrixMN<N, R, C>
where
<N as ComplexField>::RealField: simba::scalar::RealField,
DefaultAllocator: Allocator<N, R, C>,
{
#[inline]
fn orthonormalize(vs: &mut [Self]) -> usize {
let mut nbasis_elements = 0;
for i in 0..vs.len() {
{
let (elt, basis) = vs[..i + 1].split_last_mut().unwrap();
for basis_element in &basis[..nbasis_elements] {
*elt -= &*basis_element * elt.dot(basis_element)
}
}
if vs[i]
.try_normalize_mut(<N as ComplexField>::RealField::zero())
.is_some()
{
// FIXME: this will be efficient on dynamically-allocated vectors but for
// statically-allocated ones, `.clone_from` would be better.
vs.swap(nbasis_elements, i);
nbasis_elements += 1;
// All the other vectors will be dependent.
if nbasis_elements == Self::dimension() {
break;
}
}
}
nbasis_elements
}
#[inline]
fn orthonormal_subspace_basis<F>(vs: &[Self], mut f: F)
where
F: FnMut(&Self) -> bool,
{
// FIXME: is this necessary?
assert!(
vs.len() <= Self::dimension(),
"The given set of vectors has no chance of being a free family."
);
match Self::dimension() {
1 => {
if vs.len() == 0 {
let _ = f(&Self::canonical_basis_element(0));
}
}
2 => {
if vs.len() == 0 {
let _ = f(&Self::canonical_basis_element(0))
&& f(&Self::canonical_basis_element(1));
} else if vs.len() == 1 {
let v = &vs[0];
let res = Self::from_column_slice(&[-v[1], v[0]]);
let _ = f(&res.normalize());
}
// Otherwise, nothing.
}
3 => {
if vs.len() == 0 {
let _ = f(&Self::canonical_basis_element(0))
&& f(&Self::canonical_basis_element(1))
&& f(&Self::canonical_basis_element(2));
} else if vs.len() == 1 {
let v = &vs[0];
let mut a;
if ComplexField::norm1(v[0]) > ComplexField::norm1(v[1]) {
a = Self::from_column_slice(&[v[2], N::zero(), -v[0]]);
} else {
a = Self::from_column_slice(&[N::zero(), -v[2], v[1]]);
};
let _ = a.normalize_mut();
if f(&a.cross(v)) {
let _ = f(&a);
}
} else if vs.len() == 2 {
let _ = f(&vs[0].cross(&vs[1]).normalize());
}
}
_ => {
#[cfg(any(feature = "std", feature = "alloc"))]
{
// XXX: use a GenericArray instead.
let mut known_basis = Vec::new();
for v in vs.iter() {
known_basis.push(v.normalize())
}
for i in 0..Self::dimension() - vs.len() {
let mut elt = Self::canonical_basis_element(i);
for v in &known_basis {
elt -= v * elt.dot(v)
}
if let Some(subsp_elt) =
elt.try_normalize(<N as ComplexField>::RealField::zero())
{
if !f(&subsp_elt) {
return;
};
known_basis.push(subsp_elt);
}
}
}
#[cfg(all(not(feature = "std"), not(feature = "alloc")))]
{
panic!("Cannot compute the orthogonal subspace basis of a vector with a dimension greater than 3 \
if #![no_std] is enabled and the 'alloc' feature is not enabled.")
}
}
}
}
}
/*
*
*
* Multiplicative structures.
*
*
*/
impl<N, D: DimName> Identity<Multiplicative> for MatrixN<N, D>
where
N: Scalar + Zero + One,
DefaultAllocator: Allocator<N, D, D>,
{
#[inline]
fn identity() -> Self {
Self::identity()
}
}
impl<N, D: DimName> AbstractMagma<Multiplicative> for MatrixN<N, D>
where
N: Scalar + Zero + One + ClosedAdd + ClosedMul,
DefaultAllocator: Allocator<N, D, D>,
{
#[inline]
fn operate(&self, other: &Self) -> Self {
self * other
}
}
macro_rules! impl_multiplicative_structure(
($($marker: ident<$operator: ident> $(+ $bounds: ident)*),* $(,)*) => {$(
impl<N, D: DimName> $marker<$operator> for MatrixN<N, D>
where N: Scalar + Zero + One + ClosedAdd + ClosedMul + $marker<$operator> $(+ $bounds)*,
DefaultAllocator: Allocator<N, D, D> { }<|fim▁hole|> AbstractSemigroup<Multiplicative>,
AbstractMonoid<Multiplicative> + One
);
/*
*
* Ordering
*
*/
impl<N, R: Dim, C: Dim> MeetSemilattice for MatrixMN<N, R, C>
where
N: Scalar + MeetSemilattice,
DefaultAllocator: Allocator<N, R, C>,
{
#[inline]
fn meet(&self, other: &Self) -> Self {
self.zip_map(other, |a, b| a.meet(&b))
}
}
impl<N, R: Dim, C: Dim> JoinSemilattice for MatrixMN<N, R, C>
where
N: Scalar + JoinSemilattice,
DefaultAllocator: Allocator<N, R, C>,
{
#[inline]
fn join(&self, other: &Self) -> Self {
self.zip_map(other, |a, b| a.join(&b))
}
}
impl<N, R: Dim, C: Dim> Lattice for MatrixMN<N, R, C>
where
N: Scalar + Lattice,
DefaultAllocator: Allocator<N, R, C>,
{
#[inline]
fn meet_join(&self, other: &Self) -> (Self, Self) {
let shape = self.data.shape();
assert!(
shape == other.data.shape(),
"Matrix meet/join error: mismatched dimensions."
);
let mut mres = unsafe { Self::new_uninitialized_generic(shape.0, shape.1) };
let mut jres = unsafe { Self::new_uninitialized_generic(shape.0, shape.1) };
for i in 0..shape.0.value() * shape.1.value() {
unsafe {
let mj = self
.data
.get_unchecked_linear(i)
.meet_join(other.data.get_unchecked_linear(i));
*mres.data.get_unchecked_linear_mut(i) = mj.0;
*jres.data.get_unchecked_linear_mut(i) = mj.1;
}
}
(mres, jres)
}
}<|fim▁end|> | )*}
);
impl_multiplicative_structure!( |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.