prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^(\d+)/$', 'onpsx.gallery.views.index'),
(r'^$', 'onpsx.gallery.views.index'),
)<|fim▁end|> | |
<|file_name|>cli.ts<|end_file_name|><|fim▁begin|>import * as yargs from "yargs";
<|fim▁hole|>import { IRawConversionSettings, parseSettings } from "./settingsParser";
import { StatusCode } from "./statusCode";
const rawConversionSettings: IRawConversionSettings = yargs
.usage("Usage: $0 --csproj <csproj> --target <target>")
.command("csproj", "File path to the source .csproj file.")
.command("reference", "References file to insert /// paths to all files (optional).")
.command("replacement", "key=value MSBuild pairs to replace in raw source file paths.")
.command("target", "File path to the target tsconfig.json file.")
.command("template", "File path to the template tsconfig.json file, if not <target>.")
.demandOption(["csproj"])
.argv as {} as IRawConversionSettings;
const main = async (): Promise<number> => {
const conversionSettings = parseSettings(rawConversionSettings);
const runner = new Runner({
onError: console.error.bind(console),
onLog: console.log.bind(console),
});
return await runner.run(conversionSettings);
};
main()
.then((exitCode: StatusCode) => {
process.exitCode = exitCode;
})
.catch((error) => {
// tslint:disable-next-line:no-console
console.error(error.stack || error.message);
process.exitCode = StatusCode.UnknownError;
});<|fim▁end|> | import { Runner } from "./runner"; |
<|file_name|>create_model.py<|end_file_name|><|fim▁begin|>import argparse
import pandas as pd
from sklearn.linear_model import LogisticRegression
from sklearn.feature_selection import RFECV
from sklearn.ensemble import RandomForestClassifier
from beveridge.models import ModelStorage
import pickle
parser = argparse.ArgumentParser(description="Create model from CSV stats data.")
parser.add_argument('file')
parser.add_argument('outfile')
args = parser.parse_args()
<|fim▁hole|>data = pd.read_csv(args.file)
#Drop team
del data['team']
#Cleanse to numeric data
data = data.apply(lambda x: pd.to_numeric(x, errors='coerce'))
#Delete any completely empty columns
data = data.dropna(axis=1, how='all')
#Delete any rows with empty values
data = data.dropna(axis=0, how='any')
#Set up some columns
data['home'] = data['home'].astype('bool')
data['win'] = data['win'].astype('bool')
#Build relative columns
data['relRebounds'] = data['rebounds'] / data['oppRebounds']
data['relDisposals'] = data['disposals'] / data['oppDisposals']
data['relKicks'] = data['kicks'] / data['oppKicks']
data['relHandballs'] = data['handballs'] / data['oppHandballs']
data['relClearances'] = data['clearances'] / data['oppClearances']
data['relHitouts'] = data['hitouts'] / data['oppHitouts']
data['relMarks'] = data['marks'] / data['oppMarks']
data['relInside50s'] = data['inside50s'] / data['oppInside50s']
data['relTackles'] = data['tackles'] / data['oppTackles']
data['relClangers'] = data['clangers'] / data['oppClangers']
data['relFrees'] = data['frees'] / data['oppFrees']
data['relContested'] = data['contested'] / data['oppContested']
data['relUncontested'] = data['uncontested'] / data['oppUncontested']
data['relContestedMarks'] = data['contestedMarks'] / data['oppContestedMarks']
data['relMarksIn50'] = data['marksIn50'] / data['oppMarksIn50']
data['relOnePercenters'] = data['onePercenters'] / data['oppOnePercenters']
data['relBounces'] = data['bounces'] / data['oppBounces']
#Try building a logistic regression model
print("Building initial logistic regression model.")
model = LogisticRegression()
#Only use the relative columns. I've tested with the absolute values and they are much less useful than relative.
trainColumns = pd.Series(['relRebounds', 'relDisposals', 'relKicks', 'relHandballs', 'relClearances', 'relHitouts', 'relMarks', 'relInside50s', 'relTackles', 'relClangers', 'relFrees', 'relContested', 'relUncontested', 'relContestedMarks', 'relMarksIn50', 'relOnePercenters', 'relBounces', 'home'])
model.fit(data[trainColumns], data['win'])
print("Training data accuracy: {:%}".format(model.score(data[trainColumns], data['win'])))
#Recursive feature selection with cross-validation
print("Running feature selection.")
fs = RFECV(model)
fs.fit(data[trainColumns], data['win'])
print("Accuracy after feature selection: {:%}".format(fs.score(data[trainColumns], data['win'])))
filteredColumns = trainColumns[fs.support_]
#Ignoring filtered columns for the random forest. Seems to produce better results
#Create a random forest model
print("Building random forest")
rf = RandomForestClassifier(n_estimators=100, min_samples_split=0.02, class_weight='balanced')
rf.fit(data[trainColumns], data['win'])
print("Random forest accuracy: {:%}".format(rf.score(data[trainColumns], data['win'])))
#Save random forest model to given filename
with open(args.outfile, 'wb') as file:
storage = ModelStorage(trainColumns, rf)
pickle.dump(storage, file)<|fim▁end|> | #Create DataFrame in Pandas |
<|file_name|>indexed.rs<|end_file_name|><|fim▁begin|>use std::ops::{Index, IndexMut};
struct Image<P> {
width: usize,
pixels: Vec<P>,<|fim▁hole|>impl<P: Default + Copy> Image<P> {
fn new(width: usize, height: usize) -> Image<P> {
Image {
width,
pixels: vec![P::default(); width * height],
}
}
}
impl<P> Index<usize> for Image<P> {
type Output = [P];
fn index(&self, row: usize) -> &[P] {
let start = row * self.width;
&self.pixels[start..start + self.width]
}
}
impl<P> IndexMut<usize> for Image<P> {
fn index_mut(&mut self, row: usize) -> &mut [P] {
let start = row * self.width;
&mut self.pixels[start..start + self.width]
}
}<|fim▁end|> | }
|
<|file_name|>response.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The [Response](https://fetch.spec.whatwg.org/#responses) object
//! resulting from a [fetch operation](https://fetch.spec.whatwg.org/#concept-fetch)
use {FetchMetadata, FilteredMetadata, Metadata, NetworkError};
use hyper::header::{AccessControlExposeHeaders, ContentType, Headers};
use hyper::status::StatusCode;
use hyper_serde::Serde;
use servo_url::ServoUrl;
use std::ascii::AsciiExt;
use std::cell::{Cell, RefCell};
use std::sync::{Arc, Mutex};
/// [Response type](https://fetch.spec.whatwg.org/#concept-response-type)
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize, HeapSizeOf)]
pub enum ResponseType {
Basic,
Cors,
Default,
Error(NetworkError),
Opaque,
OpaqueRedirect
}
/// [Response termination reason](https://fetch.spec.whatwg.org/#concept-response-termination-reason)
#[derive(Debug, Clone, Copy, Deserialize, Serialize, HeapSizeOf)]
pub enum TerminationReason {
EndUserAbort,
Fatal,
Timeout
}
/// The response body can still be pushed to after fetch
/// This provides a way to store unfinished response bodies
#[derive(Clone, Debug, PartialEq, HeapSizeOf)]
pub enum ResponseBody {
Empty, // XXXManishearth is this necessary, or is Done(vec![]) enough?
Receiving(Vec<u8>),
Done(Vec<u8>),
}
impl ResponseBody {
pub fn is_done(&self) -> bool {
match *self {
ResponseBody::Done(..) => true,
ResponseBody::Empty | ResponseBody::Receiving(..) => false
}
}
}<|fim▁hole|>pub enum CacheState {
None,
Local,
Validated,
Partial
}
/// [Https state](https://fetch.spec.whatwg.org/#concept-response-https-state)
#[derive(Debug, Clone, Copy, HeapSizeOf, Deserialize, Serialize)]
pub enum HttpsState {
None,
Deprecated,
Modern
}
pub enum ResponseMsg {
Chunk(Vec<u8>),
Finished,
Errored
}
/// A [Response](https://fetch.spec.whatwg.org/#concept-response) as defined by the Fetch spec
#[derive(Debug, Clone, HeapSizeOf)]
pub struct Response {
pub response_type: ResponseType,
pub termination_reason: Option<TerminationReason>,
url: Option<ServoUrl>,
pub url_list: RefCell<Vec<ServoUrl>>,
/// `None` can be considered a StatusCode of `0`.
#[ignore_heap_size_of = "Defined in hyper"]
pub status: Option<StatusCode>,
pub raw_status: Option<(u16, Vec<u8>)>,
#[ignore_heap_size_of = "Defined in hyper"]
pub headers: Headers,
#[ignore_heap_size_of = "Mutex heap size undefined"]
pub body: Arc<Mutex<ResponseBody>>,
pub cache_state: CacheState,
pub https_state: HttpsState,
pub referrer: Option<ServoUrl>,
/// [Internal response](https://fetch.spec.whatwg.org/#concept-internal-response), only used if the Response
/// is a filtered response
pub internal_response: Option<Box<Response>>,
/// whether or not to try to return the internal_response when asked for actual_response
pub return_internal: Cell<bool>,
}
impl Response {
pub fn new(url: ServoUrl) -> Response {
Response {
response_type: ResponseType::Default,
termination_reason: None,
url: Some(url),
url_list: RefCell::new(Vec::new()),
status: Some(StatusCode::Ok),
raw_status: Some((200, b"OK".to_vec())),
headers: Headers::new(),
body: Arc::new(Mutex::new(ResponseBody::Empty)),
cache_state: CacheState::None,
https_state: HttpsState::None,
referrer: None,
internal_response: None,
return_internal: Cell::new(true)
}
}
pub fn network_error(e: NetworkError) -> Response {
Response {
response_type: ResponseType::Error(e),
termination_reason: None,
url: None,
url_list: RefCell::new(vec![]),
status: None,
raw_status: None,
headers: Headers::new(),
body: Arc::new(Mutex::new(ResponseBody::Empty)),
cache_state: CacheState::None,
https_state: HttpsState::None,
referrer: None,
internal_response: None,
return_internal: Cell::new(true)
}
}
pub fn url(&self) -> Option<&ServoUrl> {
self.url.as_ref()
}
pub fn is_network_error(&self) -> bool {
match self.response_type {
ResponseType::Error(..) => true,
_ => false
}
}
pub fn get_network_error(&self) -> Option<&NetworkError> {
match self.response_type {
ResponseType::Error(ref e) => Some(e),
_ => None,
}
}
pub fn actual_response(&self) -> &Response {
if self.return_internal.get() && self.internal_response.is_some() {
&**self.internal_response.as_ref().unwrap()
} else {
self
}
}
pub fn to_actual(self) -> Response {
if self.return_internal.get() && self.internal_response.is_some() {
*self.internal_response.unwrap()
} else {
self
}
}
/// Convert to a filtered response, of type `filter_type`.
/// Do not use with type Error or Default
pub fn to_filtered(self, filter_type: ResponseType) -> Response {
match filter_type {
ResponseType::Default | ResponseType::Error(..) => panic!(),
_ => (),
}
let old_response = self.to_actual();
if let ResponseType::Error(e) = old_response.response_type {
return Response::network_error(e);
}
let old_headers = old_response.headers.clone();
let mut response = old_response.clone();
response.internal_response = Some(Box::new(old_response));
response.response_type = filter_type;
match response.response_type {
ResponseType::Default | ResponseType::Error(..) => unreachable!(),
ResponseType::Basic => {
let headers = old_headers.iter().filter(|header| {
match &*header.name().to_ascii_lowercase() {
"set-cookie" | "set-cookie2" => false,
_ => true
}
}).collect();
response.headers = headers;
},
ResponseType::Cors => {
let access = old_headers.get::<AccessControlExposeHeaders>();
let allowed_headers = access.as_ref().map(|v| &v[..]).unwrap_or(&[]);
let headers = old_headers.iter().filter(|header| {
match &*header.name().to_ascii_lowercase() {
"cache-control" | "content-language" | "content-type" |
"expires" | "last-modified" | "pragma" => true,
"set-cookie" | "set-cookie2" => false,
header => {
let result =
allowed_headers.iter().find(|h| *header == *h.to_ascii_lowercase());
result.is_some()
}
}
}).collect();
response.headers = headers;
},
ResponseType::Opaque => {
response.url_list = RefCell::new(vec![]);
response.url = None;
response.headers = Headers::new();
response.status = None;
response.body = Arc::new(Mutex::new(ResponseBody::Empty));
response.cache_state = CacheState::None;
},
ResponseType::OpaqueRedirect => {
response.headers = Headers::new();
response.status = None;
response.body = Arc::new(Mutex::new(ResponseBody::Empty));
response.cache_state = CacheState::None;
}
}
response
}
pub fn metadata(&self) -> Result<FetchMetadata, NetworkError> {
fn init_metadata(response: &Response, url: &ServoUrl) -> Metadata {
let mut metadata = Metadata::default(url.clone());
metadata.set_content_type(match response.headers.get() {
Some(&ContentType(ref mime)) => Some(mime),
None => None
});
metadata.headers = Some(Serde(response.headers.clone()));
metadata.status = response.raw_status.clone();
metadata.https_state = response.https_state;
metadata.referrer = response.referrer.clone();
metadata
};
if let Some(error) = self.get_network_error() {
return Err(error.clone());
}
let metadata = self.url.as_ref().map(|url| init_metadata(self, url));
if let Some(ref response) = self.internal_response {
match response.url {
Some(ref url) => {
let unsafe_metadata = init_metadata(response, url);
Ok(FetchMetadata::Filtered {
filtered: match metadata {
Some(m) => FilteredMetadata::Transparent(m),
None => FilteredMetadata::Opaque
},
unsafe_: unsafe_metadata
})
}
None => Err(NetworkError::Internal("No url found in unsafe response".to_owned()))
}
} else {
Ok(FetchMetadata::Unfiltered(metadata.unwrap()))
}
}
}<|fim▁end|> |
/// [Cache state](https://fetch.spec.whatwg.org/#concept-response-cache-state)
#[derive(Clone, Debug, Deserialize, Serialize, HeapSizeOf)] |
<|file_name|>main.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import flask
from oslo_config import cfg
from oslo_log import log
import oslo_middleware.cors as cors_middleware
from oslo_middleware import request_id
from oslo_service import systemd
import six
import stevedore
from werkzeug import exceptions as werkzeug_exceptions
from sahara.api import acl
from sahara.api.middleware import auth_valid
from sahara.api.middleware import log_exchange
from sahara.api import v10 as api_v10
from sahara.api import v11 as api_v11
from sahara import config
from sahara import context
from sahara.i18n import _LI
from sahara.i18n import _LW
from sahara.plugins import base as plugins_base
from sahara.service import api as service_api
from sahara.service.edp import api as edp_api
from sahara.service import ops as service_ops
from sahara.service import periodic
from sahara.utils import api as api_utils
from sahara.utils.openstack import cinder
from sahara.utils import remote
from sahara.utils import rpc as messaging
from sahara.utils import wsgi
LOG = log.getLogger(__name__)
opts = [
cfg.StrOpt('os_region_name',
help='Region name used to get services endpoints.'),
cfg.StrOpt('infrastructure_engine',
default='heat',
help='An engine which will be used to provision '
'infrastructure for Hadoop cluster.'),
cfg.StrOpt('remote',
default='ssh',
help='A method for Sahara to execute commands '
'on VMs.'),
cfg.IntOpt('api_workers', default=0,
help="Number of workers for Sahara API service (0 means "
"all-in-one-thread configuration).")
]
CONF = cfg.CONF
CONF.register_opts(opts)
def setup_common(possible_topdir, service_name):
dev_conf = os.path.join(possible_topdir,
'etc',
'sahara',
'sahara.conf')
config_files = None
if os.path.exists(dev_conf):
config_files = [dev_conf]
config.parse_configs(config_files)
log.setup(CONF, "sahara")
# Validate other configurations (that may produce logs) here
cinder.validate_config()
if service_name != 'all-in-one' or cfg.CONF.enable_notifications:
messaging.setup()
plugins_base.setup_plugins()
LOG.info(_LI('Sahara {service} started').format(service=service_name))
def setup_sahara_api(mode):
ops = _get_ops_driver(mode)
service_api.setup_service_api(ops)
edp_api.setup_edp_api(ops)
def setup_sahara_engine():
periodic.setup()
engine = _get_infrastructure_engine()
service_ops.setup_ops(engine)
remote_driver = _get_remote_driver()
remote.setup_remote(remote_driver, engine)
def setup_auth_policy():
acl.setup_policy()
def make_app():
"""App builder (wsgi)
Entry point for Sahara REST API server
"""
app = flask.Flask('sahara.api')
@app.route('/', methods=['GET'])
def version_list():
context.set_ctx(None)
return api_utils.render({
"versions": [
{"id": "v1.0", "status": "SUPPORTED"},
{"id": "v1.1", "status": "CURRENT"}
]
})
@app.teardown_request
def teardown_request(_ex=None):
context.set_ctx(None)
app.register_blueprint(api_v10.rest, url_prefix='/v1.0')
app.register_blueprint(api_v10.rest, url_prefix='/v1.1')
app.register_blueprint(api_v11.rest, url_prefix='/v1.1')
def make_json_error(ex):
status_code = (ex.code
if isinstance(ex, werkzeug_exceptions.HTTPException)
else 500)
description = (ex.description
if isinstance(ex, werkzeug_exceptions.HTTPException)
else str(ex))
return api_utils.render({'error': status_code,
'error_message': description},
status=status_code)<|fim▁hole|>
for code in six.iterkeys(werkzeug_exceptions.default_exceptions):
app.error_handler_spec[None][code] = make_json_error
if CONF.debug and not CONF.log_exchange:
LOG.debug('Logging of request/response exchange could be enabled using'
' flag --log-exchange')
# Create a CORS wrapper, and attach sahara-specific defaults that must be
# included in all CORS responses.
app.wsgi_app = cors_middleware.CORS(app.wsgi_app, CONF)
app.wsgi_app.set_latent(
allow_headers=['X-Auth-Token', 'X-Server-Management-Url'],
allow_methods=['GET', 'PUT', 'POST', 'DELETE', 'PATCH'],
expose_headers=['X-Auth-Token', 'X-Server-Management-Url']
)
if CONF.log_exchange:
app.wsgi_app = log_exchange.LogExchange.factory(CONF)(app.wsgi_app)
app.wsgi_app = auth_valid.wrap(app.wsgi_app)
app.wsgi_app = acl.wrap(app.wsgi_app)
app.wsgi_app = request_id.RequestId(app.wsgi_app)
return app
def _load_driver(namespace, name):
extension_manager = stevedore.DriverManager(
namespace=namespace,
name=name,
invoke_on_load=True
)
LOG.info(_LI("Driver {name} successfully loaded").format(name=name))
return extension_manager.driver
def _get_infrastructure_engine():
"""Import and return one of sahara.service.*_engine.py modules."""
LOG.debug("Infrastructure engine {engine} is loading".format(
engine=CONF.infrastructure_engine))
if CONF.infrastructure_engine == "direct":
LOG.warning(_LW("Direct infrastructure engine is deprecated in Liberty"
" release and will be removed after that release."
" Use Heat infrastructure engine instead."))
return _load_driver('sahara.infrastructure.engine',
CONF.infrastructure_engine)
def _get_remote_driver():
LOG.debug("Remote {remote} is loading".format(remote=CONF.remote))
return _load_driver('sahara.remote', CONF.remote)
def _get_ops_driver(driver_name):
LOG.debug("Ops {driver} is loading".format(driver=driver_name))
return _load_driver('sahara.run.mode', driver_name)
def start_server(app):
server = wsgi.Server()
server.start(app)
systemd.notify_once()
server.wait()<|fim▁end|> | |
<|file_name|>test_gaux.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# HiPart is a program to analyze the electronic structure of molecules with
# fuzzy-atom partitioning methods.
# Copyright (C) 2007 - 2012 Toon Verstraelen <[email protected]>
#
# This file is part of HiPart.
#
# HiPart is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#<|fim▁hole|># but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
#--
from hipart.gint.gint_ext import gaux
import numpy
from scipy.special import erf
def test_gaux():
for t in 0.1, 0.5, 0.7, 1.0, 2.0, 4.0:
u1 = gaux(t, 0)
u2 = numpy.sqrt(numpy.pi/t)/2*erf(numpy.sqrt(t))<|fim▁end|> | # HiPart is distributed in the hope that it will be useful, |
<|file_name|>config_perm_provider.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
from trac.config import ConfigSection
from trac.perm import IPermissionRequestor
class ExtraPermissionsProvider(Component):
"""Extra permission provider."""
implements(IPermissionRequestor)
extra_permissions_section = ConfigSection('extra-permissions',
doc="""This section provides a way to add arbitrary permissions to a
Trac environment. This can be useful for adding new permissions to use
for workflow actions, for example.
To add new permissions, create a new section `[extra-permissions]` in
your `trac.ini`. Every entry in that section defines a meta-permission
and a comma-separated list of permissions. For example:
{{{
[extra-permissions]
extra_admin = extra_view, extra_modify, extra_delete
}}}
This entry will define three new permissions `EXTRA_VIEW`,
`EXTRA_MODIFY` and `EXTRA_DELETE`, as well as a meta-permissions
`EXTRA_ADMIN` that grants all three permissions.
If you don't want a meta-permission, start the meta-name with an
underscore (`_`):
{{{
[extra-permissions]
_perms = extra_view, extra_modify
}}}
""")
def get_permission_actions(self):<|fim▁hole|> permissions = {}
for meta, perms in self.extra_permissions_section.options():
perms = [each.strip().upper() for each in perms.split(',')]
for perm in perms:
permissions.setdefault(perm, [])
meta = meta.strip().upper()
if meta and not meta.startswith('_'):
permissions.setdefault(meta, []).extend(perms)
return [(k, v) if v else k for k, v in permissions.iteritems()]<|fim▁end|> | |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>from django.core.urlresolvers import reverse
from tastypie import authorization
from tastypie.authentication import MultiAuthentication
from crits.events.event import Event
from crits.events.handlers import add_new_event
from crits.core.api import CRITsApiKeyAuthentication, CRITsSessionAuthentication
from crits.core.api import CRITsSerializer, CRITsAPIResource
from crits.vocabulary.events import EventTypes
class EventResource(CRITsAPIResource):
"""
Class to handle everything related to the Event API.
Currently supports GET and POST.
"""
class Meta:
object_class = Event
allowed_methods = ('get', 'post', 'patch')
resource_name = "events"
authentication = MultiAuthentication(CRITsApiKeyAuthentication(),
CRITsSessionAuthentication())
authorization = authorization.Authorization()
serializer = CRITsSerializer()
def get_object_list(self, request):
"""
Use the CRITsAPIResource to get our objects but provide the class to get
the objects from.
:param request: The incoming request.
:type request: :class:`django.http.HttpRequest`
:returns: Resulting objects in the specified format (JSON by default).
"""
return super(EventResource, self).get_object_list(request, Event)
def obj_create(self, bundle, **kwargs):
"""
Handles creating Events through the API.
:param bundle: Bundle containing the information to create the Event.
:type bundle: Tastypie Bundle object.
:returns: HttpResponse.
"""
analyst = bundle.request.user.username
title = bundle.data.get('title', None)
description = bundle.data.get('description', None)
event_type = bundle.data.get('event_type', None)
source = bundle.data.get('source', None)
method = bundle.data.get('method', None)
reference = bundle.data.get('reference', None)
date = bundle.data.get('date', None)
bucket_list = bundle.data.get('bucket_list', None)
ticket = bundle.data.get('ticket', None)
content = {'return_code': 0,
'type': 'Event'}
if not title or not event_type or not source or not description:
content['message'] = 'Must provide a title, event_type, source, and description.'
self.crits_response(content)
if event_type not in EventTypes.values():
content['message'] = 'Not a valid Event Type.'
self.crits_response(content)
result = add_new_event(title,
description,
event_type,
source,
method,
reference,
date,
analyst,
bucket_list,
ticket)
if result.get('message'):
content['message'] = result.get('message')<|fim▁hole|> url = reverse('api_dispatch_detail',
kwargs={'resource_name': 'events',
'api_name': 'v1',
'pk': result.get('id')})
content['url'] = url
if result['success']:
content['return_code'] = 0
self.crits_response(content)<|fim▁end|> | content['id'] = result.get('id', '')
if result.get('id'): |
<|file_name|>login.component.ts<|end_file_name|><|fim▁begin|>import { Component, Injectable } from '@angular/core';
import { Router } from '@angular/router';
import { FormUserLogin } from './forms/userLogin.component';
import { SessionService } from './services/sessionService.ts';
@Component({
templateUrl: "login.html",
directives: [FormUserLogin],
providers: [[SessionService]]<|fim▁hole|>
@Injectable()
export class LoginComponent {
constructor (private sessionService: SessionService, private router: Router) {
if (this.sessionService.isAuthentificated() === true) {
this.router.navigate(['/']);
}
}
}<|fim▁end|> | }) |
<|file_name|>ja_dlg.js<|end_file_name|><|fim▁begin|>tinyMCE.addI18n('ja.emotions_dlg',{
title:"\u30B9\u30DE\u30A4\u30EA\u30FC\u306E\u633F\u5165",
desc:"\u30B9\u30DE\u30A4\u30EA\u30FC",
cool:"Cool",
cry:"Cry",
embarassed:"Embarassed",
foot_in_mouth:"Foot in mouth",
frown:"Frown",
innocent:"Innocent",
kiss:"Kiss",
laughing:"Laughing",
<|fim▁hole|>tongue_out:"Tongue out",
undecided:"Undecided",
wink:"Wink",
yell:"Yell"
});<|fim▁end|> | money_mouth:"Money mouth",
sealed:"Sealed",
smile:"Smile",
surprised:"Surprised",
|
<|file_name|>setupTests.ts<|end_file_name|><|fim▁begin|>import * as Enzyme from 'enzyme';
import * as Adapter from 'enzyme-adapter-react-16';
<|fim▁hole|>Enzyme.configure({ adapter: new Adapter() });<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Home Assistant command line scripts."""
from __future__ import annotations
import argparse
import asyncio
import importlib
import logging
import os
import sys
from typing import Sequence
from homeassistant import runner
from homeassistant.bootstrap import async_mount_local_lib_path
from homeassistant.config import get_default_config_dir
from homeassistant.requirements import pip_kwargs
from homeassistant.util.package import install_package, is_installed, is_virtual_env
# mypy: allow-untyped-defs, no-warn-return-any
def run(args: list) -> int:
"""Run a script."""
scripts = []
path = os.path.dirname(__file__)
for fil in os.listdir(path):
if fil == "__pycache__":
continue
if os.path.isdir(os.path.join(path, fil)):
scripts.append(fil)
elif fil != "__init__.py" and fil.endswith(".py"):
scripts.append(fil[:-3])
if not args:
print("Please specify a script to run.")
print("Available scripts:", ", ".join(scripts))
return 1
if args[0] not in scripts:
print("Invalid script specified.")
print("Available scripts:", ", ".join(scripts))
return 1
script = importlib.import_module(f"homeassistant.scripts.{args[0]}")
config_dir = extract_config_dir()
loop = asyncio.get_event_loop()
if not is_virtual_env():
loop.run_until_complete(async_mount_local_lib_path(config_dir))
_pip_kwargs = pip_kwargs(config_dir)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
for req in getattr(script, "REQUIREMENTS", []):
if is_installed(req):
continue
if not install_package(req, **_pip_kwargs):
print("Aborting script, could not install dependency", req)
return 1
asyncio.set_event_loop_policy(runner.HassEventLoopPolicy(False))
<|fim▁hole|> return script.run(args[1:]) # type: ignore
def extract_config_dir(args: Sequence[str] | None = None) -> str:
"""Extract the config dir from the arguments or get the default."""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("-c", "--config", default=None)
parsed_args = parser.parse_known_args(args)[0]
return (
os.path.join(os.getcwd(), parsed_args.config)
if parsed_args.config
else get_default_config_dir()
)<|fim▁end|> | |
<|file_name|>itemrecv.cpp<|end_file_name|><|fim▁begin|>/*
* The ManaPlus Client
* Copyright (C) 2004-2009 The Mana World Development Team
* Copyright (C) 2009-2010 The Mana Developers
* Copyright (C) 2011-2018 The ManaPlus Developers<|fim▁hole|> *
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "net/eathena/itemrecv.h"
#include "actormanager.h"
#include "itemcolormanager.h"
#include "logger.h"
#include "const/resources/item/cards.h"
#include "net/messagein.h"
#include "debug.h"
extern int packetVersion;
extern bool packets_zero;
namespace EAthena
{
void ItemRecv::processItemDropped(Net::MessageIn &msg)
{
const BeingId id = msg.readBeingId("id");
const int itemId = msg.readItemId("item id");
ItemTypeT itemType = ItemType::Unknown;
if (msg.getVersion() >= 20130000)
itemType = static_cast<ItemTypeT>(msg.readInt16("type"));
const Identified identified = fromInt(
msg.readUInt8("identify"), Identified);
const int x = msg.readInt16("x");
const int y = msg.readInt16("y");
const int subX = CAST_S32(msg.readInt8("subx"));
const int subY = CAST_S32(msg.readInt8("suby"));
const int amount = msg.readInt16("count");
if (packets_zero || packetVersion >= 20180418)
{
msg.readUInt8("show drop effect");
msg.readInt16("show effect mode");
}
if (actorManager != nullptr)
{
actorManager->createItem(id,
itemId,
x, y,
itemType,
amount,
0,
ItemColor_one,
identified,
Damaged_false,
subX, subY,
nullptr);
}
}
void ItemRecv::processItemDropped2(Net::MessageIn &msg)
{
const BeingId id = msg.readBeingId("id");
const int itemId = msg.readInt16("item id"); // +++ need use int32
const ItemTypeT itemType = static_cast<ItemTypeT>(msg.readUInt8("type"));
const Identified identified = fromInt(
msg.readUInt8("identify"), Identified);
const Damaged damaged = fromBool(msg.readUInt8("attribute"), Damaged);
const uint8_t refine = msg.readUInt8("refine");
int cards[maxCards];
for (int f = 0; f < maxCards; f++)
cards[f] = msg.readUInt16("card"); // ++ need use int32
const int x = msg.readInt16("x");
const int y = msg.readInt16("y");
const int amount = msg.readInt16("amount");
const int subX = CAST_S32(msg.readInt8("subx"));
const int subY = CAST_S32(msg.readInt8("suby"));
// +++ probably need add drop effect fields?
if (actorManager != nullptr)
{
actorManager->createItem(id,
itemId,
x, y,
itemType,
amount,
refine,
ItemColorManager::getColorFromCards(&cards[0]),
identified,
damaged,
subX, subY,
&cards[0]);
}
}
void ItemRecv::processItemMvpDropped(Net::MessageIn &msg)
{
UNIMPLEMENTEDPACKET;
msg.readInt16("len");
msg.readUInt8("type");
msg.readItemId("item id");
msg.readUInt8("len");
msg.readString(24, "name");
msg.readUInt8("monster name len");
msg.readString(24, "monster name");
}
void ItemRecv::processItemVisible(Net::MessageIn &msg)
{
const BeingId id = msg.readBeingId("item object id");
const int itemId = msg.readItemId("item id");
const Identified identified = fromInt(
msg.readUInt8("identify"), Identified);
const int x = msg.readInt16("x");
const int y = msg.readInt16("y");
const int amount = msg.readInt16("amount");
const int subX = CAST_S32(msg.readInt8("sub x"));
const int subY = CAST_S32(msg.readInt8("sub y"));
if (actorManager != nullptr)
{
actorManager->createItem(id,
itemId,
x, y,
ItemType::Unknown,
amount,
0,
ItemColor_one,
identified,
Damaged_false,
subX, subY,
nullptr);
}
}
void ItemRecv::processItemVisible2(Net::MessageIn &msg)
{
const BeingId id = msg.readBeingId("item object id");
const int itemId = msg.readInt16("item id"); // +++ need use int32
const ItemTypeT itemType = static_cast<ItemTypeT>(
msg.readUInt8("type"));
const Identified identified = fromInt(
msg.readUInt8("identify"), Identified);
const Damaged damaged = fromBool(msg.readUInt8("attribute"), Damaged);
const uint8_t refine = msg.readUInt8("refine");
int cards[maxCards];
for (int f = 0; f < maxCards; f++)
cards[f] = msg.readUInt16("card"); // +++ need use int32
const int x = msg.readInt16("x");
const int y = msg.readInt16("y");
const int amount = msg.readInt16("amount");
const int subX = CAST_S32(msg.readInt8("sub x"));
const int subY = CAST_S32(msg.readInt8("sub y"));
if (actorManager != nullptr)
{
actorManager->createItem(id,
itemId,
x, y,
itemType,
amount,
refine,
ItemColorManager::getColorFromCards(&cards[0]),
identified,
damaged,
subX, subY,
&cards[0]);
}
}
} // namespace EAthena<|fim▁end|> | *
* This file is part of The ManaPlus Client. |
<|file_name|>resource_digitalocean_droplet.go<|end_file_name|><|fim▁begin|>package digitalocean
import (
"fmt"
"log"
"strings"
"time"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/helper/schema"
"github.com/pearkes/digitalocean"
)
func resourceDigitalOceanDroplet() *schema.Resource {
return &schema.Resource{
Create: resourceDigitalOceanDropletCreate,
Read: resourceDigitalOceanDropletRead,
Update: resourceDigitalOceanDropletUpdate,
Delete: resourceDigitalOceanDropletDelete,
Schema: map[string]*schema.Schema{
"image": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"name": &schema.Schema{
Type: schema.TypeString,
Required: true,
},
"region": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"size": &schema.Schema{
Type: schema.TypeString,
Required: true,
},
"status": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
"locked": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
"backups": &schema.Schema{
Type: schema.TypeBool,
Optional: true,
},
"ipv6": &schema.Schema{
Type: schema.TypeBool,
Optional: true,
},
"ipv6_address": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
"ipv6_address_private": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
"private_networking": &schema.Schema{
Type: schema.TypeBool,
Optional: true,
},
"ipv4_address": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
"ipv4_address_private": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
"ssh_keys": &schema.Schema{
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
},
"user_data": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
},
}
}
func resourceDigitalOceanDropletCreate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*digitalocean.Client)
// Build up our creation options
opts := &digitalocean.CreateDroplet{
Image: d.Get("image").(string),
Name: d.Get("name").(string),
Region: d.Get("region").(string),
Size: d.Get("size").(string),
}
if attr, ok := d.GetOk("backups"); ok {
opts.Backups = attr.(bool)
}
if attr, ok := d.GetOk("ipv6"); ok {
opts.IPV6 = attr.(bool)
}
if attr, ok := d.GetOk("private_networking"); ok {
opts.PrivateNetworking = attr.(bool)
}
if attr, ok := d.GetOk("user_data"); ok {
opts.UserData = attr.(string)
}
// Get configured ssh_keys
ssh_keys := d.Get("ssh_keys.#").(int)
if ssh_keys > 0 {
opts.SSHKeys = make([]string, 0, ssh_keys)
for i := 0; i < ssh_keys; i++ {
key := fmt.Sprintf("ssh_keys.%d", i)
opts.SSHKeys = append(opts.SSHKeys, d.Get(key).(string))
}
}
log.Printf("[DEBUG] Droplet create configuration: %#v", opts)
id, err := client.CreateDroplet(opts)
if err != nil {
return fmt.Errorf("Error creating droplet: %s", err)
}
// Assign the droplets id
d.SetId(id)
log.Printf("[INFO] Droplet ID: %s", d.Id())
_, err = WaitForDropletAttribute(d, "active", []string{"new"}, "status", meta)
if err != nil {
return fmt.Errorf(
"Error waiting for droplet (%s) to become ready: %s", d.Id(), err)
}
return resourceDigitalOceanDropletRead(d, meta)
}
func resourceDigitalOceanDropletRead(d *schema.ResourceData, meta interface{}) error {
client := meta.(*digitalocean.Client)
// Retrieve the droplet properties for updating the state
droplet, err := client.RetrieveDroplet(d.Id())
if err != nil {
// check if the droplet no longer exists.
if err.Error() == "Error retrieving droplet: API Error: 404 Not Found" {
d.SetId("")
return nil
}
return fmt.Errorf("Error retrieving droplet: %s", err)
}
if droplet.ImageSlug() != "" {
d.Set("image", droplet.ImageSlug())
} else {
d.Set("image", droplet.ImageId())
}
d.Set("name", droplet.Name)
d.Set("region", droplet.RegionSlug())
d.Set("size", droplet.SizeSlug)
d.Set("status", droplet.Status)
d.Set("locked", droplet.IsLocked())
if droplet.IPV6Address("public") != "" {
d.Set("ipv6", true)
d.Set("ipv6_address", droplet.IPV6Address("public"))
d.Set("ipv6_address_private", droplet.IPV6Address("private"))
}
d.Set("ipv4_address", droplet.IPV4Address("public"))
if droplet.NetworkingType() == "private" {
d.Set("private_networking", true)
d.Set("ipv4_address_private", droplet.IPV4Address("private"))
}
// Initialize the connection info
d.SetConnInfo(map[string]string{
"type": "ssh",
"host": droplet.IPV4Address("public"),
})
return nil
}
func resourceDigitalOceanDropletUpdate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*digitalocean.Client)
if d.HasChange("size") {
oldSize, newSize := d.GetChange("size")
err := client.PowerOff(d.Id())
if err != nil && !strings.Contains(err.Error(), "Droplet is already powered off") {
return fmt.Errorf(
"Error powering off droplet (%s): %s", d.Id(), err)
}
// Wait for power off
_, err = WaitForDropletAttribute(d, "off", []string{"active"}, "status", client)
if err != nil {
return fmt.Errorf(
"Error waiting for droplet (%s) to become powered off: %s", d.Id(), err)
}
// Resize the droplet
err = client.Resize(d.Id(), newSize.(string))
if err != nil {
newErr := powerOnAndWait(d, meta)
if newErr != nil {
return fmt.Errorf(
"Error powering on droplet (%s) after failed resize: %s", d.Id(), err)
}
return fmt.Errorf(
"Error resizing droplet (%s): %s", d.Id(), err)
}
// Wait for the size to change
_, err = WaitForDropletAttribute(
d, newSize.(string), []string{"", oldSize.(string)}, "size", meta)<|fim▁hole|> newErr := powerOnAndWait(d, meta)
if newErr != nil {
return fmt.Errorf(
"Error powering on droplet (%s) after waiting for resize to finish: %s", d.Id(), err)
}
return fmt.Errorf(
"Error waiting for resize droplet (%s) to finish: %s", d.Id(), err)
}
err = client.PowerOn(d.Id())
if err != nil {
return fmt.Errorf(
"Error powering on droplet (%s) after resize: %s", d.Id(), err)
}
// Wait for power off
_, err = WaitForDropletAttribute(d, "active", []string{"off"}, "status", meta)
if err != nil {
return err
}
}
if d.HasChange("name") {
oldName, newName := d.GetChange("name")
// Rename the droplet
err := client.Rename(d.Id(), newName.(string))
if err != nil {
return fmt.Errorf(
"Error renaming droplet (%s): %s", d.Id(), err)
}
// Wait for the name to change
_, err = WaitForDropletAttribute(
d, newName.(string), []string{"", oldName.(string)}, "name", meta)
if err != nil {
return fmt.Errorf(
"Error waiting for rename droplet (%s) to finish: %s", d.Id(), err)
}
}
// As there is no way to disable private networking,
// we only check if it needs to be enabled
if d.HasChange("private_networking") && d.Get("private_networking").(bool) {
err := client.EnablePrivateNetworking(d.Id())
if err != nil {
return fmt.Errorf(
"Error enabling private networking for droplet (%s): %s", d.Id(), err)
}
// Wait for the private_networking to turn on
_, err = WaitForDropletAttribute(
d, "true", []string{"", "false"}, "private_networking", meta)
return fmt.Errorf(
"Error waiting for private networking to be enabled on for droplet (%s): %s", d.Id(), err)
}
// As there is no way to disable IPv6, we only check if it needs to be enabled
if d.HasChange("ipv6") && d.Get("ipv6").(bool) {
err := client.EnableIPV6s(d.Id())
if err != nil {
return fmt.Errorf(
"Error turning on ipv6 for droplet (%s): %s", d.Id(), err)
}
// Wait for ipv6 to turn on
_, err = WaitForDropletAttribute(
d, "true", []string{"", "false"}, "ipv6", meta)
if err != nil {
return fmt.Errorf(
"Error waiting for ipv6 to be turned on for droplet (%s): %s", d.Id(), err)
}
}
return resourceDigitalOceanDropletRead(d, meta)
}
func resourceDigitalOceanDropletDelete(d *schema.ResourceData, meta interface{}) error {
client := meta.(*digitalocean.Client)
_, err := WaitForDropletAttribute(
d, "false", []string{"", "true"}, "locked", meta)
if err != nil {
return fmt.Errorf(
"Error waiting for droplet to be unlocked for destroy (%s): %s", d.Id(), err)
}
log.Printf("[INFO] Deleting droplet: %s", d.Id())
// Destroy the droplet
err = client.DestroyDroplet(d.Id())
// Handle remotely destroyed droplets
if err != nil && strings.Contains(err.Error(), "404 Not Found") {
return nil
}
if err != nil {
return fmt.Errorf("Error deleting droplet: %s", err)
}
return nil
}
func WaitForDropletAttribute(
d *schema.ResourceData, target string, pending []string, attribute string, meta interface{}) (interface{}, error) {
// Wait for the droplet so we can get the networking attributes
// that show up after a while
log.Printf(
"[INFO] Waiting for droplet (%s) to have %s of %s",
d.Id(), attribute, target)
stateConf := &resource.StateChangeConf{
Pending: pending,
Target: target,
Refresh: newDropletStateRefreshFunc(d, attribute, meta),
Timeout: 60 * time.Minute,
Delay: 10 * time.Second,
MinTimeout: 3 * time.Second,
// This is a hack around DO API strangeness.
// https://github.com/hashicorp/terraform/issues/481
//
NotFoundChecks: 60,
}
return stateConf.WaitForState()
}
// TODO This function still needs a little more refactoring to make it
// cleaner and more efficient
func newDropletStateRefreshFunc(
d *schema.ResourceData, attribute string, meta interface{}) resource.StateRefreshFunc {
client := meta.(*digitalocean.Client)
return func() (interface{}, string, error) {
err := resourceDigitalOceanDropletRead(d, meta)
if err != nil {
return nil, "", err
}
// If the droplet is locked, continue waiting. We can
// only perform actions on unlocked droplets, so it's
// pointless to look at that status
if d.Get("locked").(string) == "true" {
log.Println("[DEBUG] Droplet is locked, skipping status check and retrying")
return nil, "", nil
}
// See if we can access our attribute
if attr, ok := d.GetOk(attribute); ok {
// Retrieve the droplet properties
droplet, err := client.RetrieveDroplet(d.Id())
if err != nil {
return nil, "", fmt.Errorf("Error retrieving droplet: %s", err)
}
return &droplet, attr.(string), nil
}
return nil, "", nil
}
}
// Powers on the droplet and waits for it to be active
func powerOnAndWait(d *schema.ResourceData, meta interface{}) error {
client := meta.(*digitalocean.Client)
err := client.PowerOn(d.Id())
if err != nil {
return err
}
// Wait for power on
_, err = WaitForDropletAttribute(d, "active", []string{"off"}, "status", client)
if err != nil {
return err
}
return nil
}<|fim▁end|> |
if err != nil { |
<|file_name|>resets.js<|end_file_name|><|fim▁begin|>import when from 'when';
import { post } from 'utils/http';
import baseUrl from 'utils/baseUrl';
export function requestPasswordReset( email ) {
return when(
post( baseUrl( 'auth/reset_password' ), {
params: {
email
}
} )
);
}
<|fim▁hole|> post( baseUrl( 'auth/verify_reset_password_token' ), {
params: {
token,
password
}
} )
);
}<|fim▁end|> | export function verifyResetPasswordToken( token, password) {
return when( |
<|file_name|>result-info-native-replay-summary.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2016, Daniel Liew
# This file is covered by the license in LICENSE-SVCB.txt<|fim▁hole|>from load_klee_runner import add_KleeRunner_to_module_search_path
from load_klee_analysis import add_kleeanalysis_to_module_search_path
from load_native_analysis import add_nativeanalysis_to_module_search_path
add_KleeRunner_to_module_search_path()
add_kleeanalysis_to_module_search_path()
add_nativeanalysis_to_module_search_path()
from KleeRunner import ResultInfo
import KleeRunner.DriverUtil as DriverUtil
import KleeRunner.InvocationInfo
import KleeRunner.util
import nativeanalysis.analyse
import argparse
import logging
import os
import pprint
import subprocess
import sys
import yaml
_logger = logging.getLogger(__name__)
def main(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('result_info_file',
help='Result info file',
type=argparse.FileType('r'))
parser.add_argument('--dump-unknowns',
dest='dump_unknowns',
action='store_true')
parser.add_argument('--dump-timeouts',
dest='dump_timeouts',
action='store_true')
DriverUtil.parserAddLoggerArg(parser)
pargs = parser.parse_args()
DriverUtil.handleLoggerArgs(pargs, parser)
_logger.info('Loading "{}"...'.format(pargs.result_info_file.name))
resultInfos, resultInfoMisc = ResultInfo.loadResultInfos(pargs.result_info_file)
_logger.info('Loading complete')
# Check the misc data
if resultInfoMisc is None:
_logger.error('Expected result info to have misc data')
return 1
if resultInfoMisc['runner'] != 'NativeReplay':
_logger.error('Expected runner to have been NativeReplay but was "{}"'.format(
resultInfoMisc['runner']))
return 1
errorTypeToErrorListMap = dict()
multipeOutcomeList = []
for result_index, r in enumerate(resultInfos):
_logger.info('Processing {}/{}'.format(result_index + 1, len(resultInfos)))
raw_result = r.GetInternalRepr()
program_path = r.RawInvocationInfo['program']
outcome = nativeanalysis.analyse.get_test_case_run_outcome(raw_result)
error_list = None
try:
error_list = errorTypeToErrorListMap[type(outcome)]
except KeyError:
error_list = []
errorTypeToErrorListMap[type(outcome)] = error_list
error_list.append(outcome)
# Print report
print('#'*70)
print("# of test cases with multiple outcomes: {}".format(len(multipeOutcomeList)))
for ty, error_list in errorTypeToErrorListMap.items():
print("# of {}: {}".format(ty, len(error_list)))
if ty == nativeanalysis.analyse.UnknownError and pargs.dump_unknowns:
for error in error_list:
print(error)
if ty == nativeanalysis.analyse.TimeoutError and pargs.dump_timeouts:
for error in error_list:
print(error)
# Now emit as YAML
#as_yaml = yaml.dump(program_to_coverage_info, default_flow_style=False)
#pargs.output_yaml.write(as_yaml)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))<|fim▁end|> | # vim: set sw=4 ts=4 softtabstop=4 expandtab:
"""
Read a result info describing a set of KLEE test case replays.
""" |
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import sys
sys.path.append("helper")
import web
from helper import session
web.config.debug = False
urls = (
"/", "controller.start.index", <|fim▁hole|>
app = web.application(urls, globals())
sessions = session.Sessions()
if __name__ == "__main__":
app.run()<|fim▁end|> | "/1", "controller.start.one",
"/2", "controller.start.two",
)
|
<|file_name|>security_saml_sp_modify_parameters.go<|end_file_name|><|fim▁begin|>// Code generated by go-swagger; DO NOT EDIT.
package security
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
<|fim▁hole|> "context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/strfmt"
"github.com/netapp/trident/storage_drivers/ontap/api/rest/models"
)
// NewSecuritySamlSpModifyParams creates a new SecuritySamlSpModifyParams object,
// with the default timeout for this client.
//
// Default values are not hydrated, since defaults are normally applied by the API server side.
//
// To enforce default values in parameter, use SetDefaults or WithDefaults.
func NewSecuritySamlSpModifyParams() *SecuritySamlSpModifyParams {
return &SecuritySamlSpModifyParams{
timeout: cr.DefaultTimeout,
}
}
// NewSecuritySamlSpModifyParamsWithTimeout creates a new SecuritySamlSpModifyParams object
// with the ability to set a timeout on a request.
func NewSecuritySamlSpModifyParamsWithTimeout(timeout time.Duration) *SecuritySamlSpModifyParams {
return &SecuritySamlSpModifyParams{
timeout: timeout,
}
}
// NewSecuritySamlSpModifyParamsWithContext creates a new SecuritySamlSpModifyParams object
// with the ability to set a context for a request.
func NewSecuritySamlSpModifyParamsWithContext(ctx context.Context) *SecuritySamlSpModifyParams {
return &SecuritySamlSpModifyParams{
Context: ctx,
}
}
// NewSecuritySamlSpModifyParamsWithHTTPClient creates a new SecuritySamlSpModifyParams object
// with the ability to set a custom HTTPClient for a request.
func NewSecuritySamlSpModifyParamsWithHTTPClient(client *http.Client) *SecuritySamlSpModifyParams {
return &SecuritySamlSpModifyParams{
HTTPClient: client,
}
}
/* SecuritySamlSpModifyParams contains all the parameters to send to the API endpoint
for the security saml sp modify operation.
Typically these are written to a http.Request.
*/
type SecuritySamlSpModifyParams struct {
/* Info.
Information specification
*/
Info *models.SecuritySamlSp
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithDefaults hydrates default values in the security saml sp modify params (not the query body).
//
// All values with no default are reset to their zero value.
func (o *SecuritySamlSpModifyParams) WithDefaults() *SecuritySamlSpModifyParams {
o.SetDefaults()
return o
}
// SetDefaults hydrates default values in the security saml sp modify params (not the query body).
//
// All values with no default are reset to their zero value.
func (o *SecuritySamlSpModifyParams) SetDefaults() {
// no default values defined for this parameter
}
// WithTimeout adds the timeout to the security saml sp modify params
func (o *SecuritySamlSpModifyParams) WithTimeout(timeout time.Duration) *SecuritySamlSpModifyParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the security saml sp modify params
func (o *SecuritySamlSpModifyParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the security saml sp modify params
func (o *SecuritySamlSpModifyParams) WithContext(ctx context.Context) *SecuritySamlSpModifyParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the security saml sp modify params
func (o *SecuritySamlSpModifyParams) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the security saml sp modify params
func (o *SecuritySamlSpModifyParams) WithHTTPClient(client *http.Client) *SecuritySamlSpModifyParams {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the security saml sp modify params
func (o *SecuritySamlSpModifyParams) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithInfo adds the info to the security saml sp modify params
func (o *SecuritySamlSpModifyParams) WithInfo(info *models.SecuritySamlSp) *SecuritySamlSpModifyParams {
o.SetInfo(info)
return o
}
// SetInfo adds the info to the security saml sp modify params
func (o *SecuritySamlSpModifyParams) SetInfo(info *models.SecuritySamlSp) {
o.Info = info
}
// WriteToRequest writes these params to a swagger request
func (o *SecuritySamlSpModifyParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
if o.Info != nil {
if err := r.SetBodyParam(o.Info); err != nil {
return err
}
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}<|fim▁end|> | import ( |
<|file_name|>OperationFilterHelper.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010-2013 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.notifications.impl.helpers;
import com.evolveum.midpoint.notifications.api.events.Event;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.common.common_3.EventHandlerType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.EventOperationType;
import org.springframework.stereotype.Component;
/**
* @author mederly
*/
@Component
public class OperationFilterHelper extends BaseHelper {
private static final Trace LOGGER = TraceManager.getTrace(OperationFilterHelper.class);
public boolean processEvent(Event event, EventHandlerType eventHandlerType) {
if (eventHandlerType.getOperation().isEmpty()) {
return true;
}
logStart(LOGGER, event, eventHandlerType, eventHandlerType.getOperation());
boolean retval = false;<|fim▁hole|> if (eventOperationType == null) {
LOGGER.warn("Filtering on null eventOperationType; filter = " + eventHandlerType);
} else if (event.isOperationType(eventOperationType)) {
retval = true;
break;
}
}
logEnd(LOGGER, event, eventHandlerType, retval);
return retval;
}
}<|fim▁end|> |
for (EventOperationType eventOperationType : eventHandlerType.getOperation()) { |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># created based on
# https://python-packaging.readthedocs.io/en/latest/minimal.html
# But instead of python setup.py register sdist upload,
# use https://pypi.org/p/twine/
#
from setuptools import setup
import sys
import os
import re
sys.path.append("src")
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return "File '%s' not found.\n" % fname
def readVersion():
txt = read("src/moddy/version.py")
ver = re.findall(r"([0-9]+)", txt)
print("ver=%s" % ver)
return ver[0] + "." + ver[1] + "." + ver[2]
setup(
name="moddy",
install_requires=["svgwrite"],
version=readVersion(),
description="A discrete event simulator generating sequence diagrams",
long_description=read("README.rst"),
url="https://github.com/KlausPopp/Moddy",
project_urls={
"Documentation": "https://klauspopp.github.io/Moddy/",
"Source Code": "https://github.com/KlausPopp/Moddy/",
},
keywords="simulation modelling",
author="Klaus Popp",
author_email="[email protected]",
license="LGPL-3.0",
platforms="OS Independent",
package_dir={"": "src"},
packages=[
"moddy",
"moddy.seq_diag_interactive_viewer",
"moddy.lib",
"moddy.lib.net",<|fim▁hole|> package_data={"moddy.seq_diag_interactive_viewer": ["*.css", "*.js"]},
)<|fim▁end|> | ], |
<|file_name|>generation.rs<|end_file_name|><|fim▁begin|>use std::sync::Arc;
use cge::{Network, Activation};
use rand::thread_rng;
use rand::distributions::{IndependentSample, Range};
use crate::utils::Individual;
use crate::cge_utils::Mutation;
use crate::NNFitnessFunction;
// Creates a generation of random, minimal neural networks
pub fn initialize_generation<T>(population_size: usize,
offspring_count: usize,
inputs: usize,
outputs: usize,
activation: Activation,
object: Arc<T>) -> Vec<Individual<T>>
where T: NNFitnessFunction + Clone
{
let mut rng = thread_rng();
let mut generation = Vec::new();
for _ in 0..population_size * (offspring_count + 1) {
let mut network = Network {
size: 0,<|fim▁hole|> function: activation.clone(),
};
for i in (0..outputs).rev() {
network.add_subnetwork(i, 0, inputs)
}
network.size = network.genome.len() - 1;
generation.push(Individual::new(inputs, outputs, network, object.clone()));
}
// Make sure all inputs are connected
for individual in &mut generation {
let range = Range::new(0, outputs);
for i in 0..inputs {
if individual.get_input_copies(i) == 0 {
let id = range.ind_sample(&mut rng);
let index = individual.network.get_neuron_index(id).unwrap() + 1;
individual.add_input(i, index);
}
}
}
generation
}<|fim▁end|> | genome: Vec::new(), |
<|file_name|>test-nodes.cc<|end_file_name|><|fim▁begin|>#define BOOST_TEST_MODULE TEST_NODES
#include <dynet/functors.h>
#include <dynet/dynet.h>
#include <dynet/expr.h>
#include <dynet/grad-check.h>
#include <boost/test/unit_test.hpp>
#include "test.h"
#include <stdexcept>
using namespace dynet;
using namespace std;
struct NodeTest {
NodeTest() {
// initialize if necessary
if (default_device == nullptr) {
for (auto x : {"NodeTest", "--dynet-mem", "100"}) {
av.push_back(strdup(x));
}
ADD_EXTRA_ARGUMENTS(av)
char **argv = &av[0];
int argc = av.size();
dynet::initialize(argc, argv);
}
ones3_vals = {1.f, 1.f, 1.f};
first_one_vals = {1.f, 0.f, 0.f};
ones2_vals = {1.f, 1.f};
batch_vals = {1.1f, 2.6f, 3.3f, 4.0f, 5.1f, 6.6f};
// Create parameters
std::vector<float> param1_vals = {1.1f, -2.2f, 3.3f};
std::vector<float> param2_vals = {2.2f, 3.4f, -1.2f};
std::vector<float> param3_vals = {1.1f, 2.2f, 3.3f};
std::vector<float> param4_vals = {1.1f, 2.2f, 3.3f, -1.2f, 2.1f, 3.4f};
std::vector<float> param5_vals = {-0.2f, 0.0f, 0.1f};
std::vector<float> param_scalar1_vals = {2.2f};
std::vector<float> param_scalar2_vals = {1.1f};
std::vector<float> param_kernel1_vals = {1.1f, 2.2f, -1.0f, 1.2f, -3.4f, -0.2f};
std::vector<float> param_filter1_vals = {1.1f, 2.2f, -1.0f, 1.2f, -3.4f, -0.2f,
11.1f, 12.2f, 13.3f, 11.2f, 12.2f, 13.2f
};
std::vector<float> param_square1_vals = {1.1f, 2.2f, 3.4f, 1.2f, 2.5f, 3.2f, 5.3f, 2.3f, 3.3f};
std::vector<float> param_cube1_vals = {.051f, .062f, .073f, .052f, .062f, .072f, .053f, .063f, .073f,
.111f, -.122f, -.033f, -.112f, -.022f, -.132f, -.113f, -.123f, -.133f,
.211f, .222f, .233f, .212f, .222f, .232f, .213f, .223f, .233f
};
std::vector<float> param_cube2_vals = {
.011f, 1.011f, .022f, 1.022f, .033f, 1.033f, .012f, 1.012f, .022f, 1.022f, .032f, 1.032f, .013f, 1.013f, .023f, 1.023f, .033f, 1.033f, // 18
.111f, 1.111f, -.122f, -1.122f, -.033f, -1.033f, -.112f, -1.112f, -.022f, -1.022f, -.132f, -1.132f, -.113f, -1.113f, -.123f, -1.123f, -.133f, -1.133f, // 18
.211f, 1.211f, .222f, 1.222f, .233f, 1.233f, .212f, 1.212f, .222f, 1.222f, .232f, 1.232f, .213f, 1.213f, .223f, 1.223f, .233f, 1.233f
};
param1 = mod.add_parameters({3});
TensorTools::set_elements(param1.get_storage().values, param1_vals);
param2 = mod.add_parameters({3});
TensorTools::set_elements(param2.get_storage().values, param2_vals);
param3 = mod.add_parameters({3});
TensorTools::set_elements(param3.get_storage().values, param3_vals);
param4 = mod.add_parameters({6});
TensorTools::set_elements(param4.get_storage().values, param4_vals);
param5 = mod.add_parameters({3});
TensorTools::set_elements(param5.get_storage().values, param5_vals);
param_scalar1 = mod.add_parameters({1});
TensorTools::set_elements(param_scalar1.get_storage().values, param_scalar1_vals);
param_scalar2 = mod.add_parameters({1});
TensorTools::set_elements(param_scalar2.get_storage().values, param_scalar2_vals);
param_kernel1 = mod.add_parameters({3, 2});
TensorTools::set_elements(param_kernel1.get_storage().values, param_kernel1_vals);
param_filter1 = mod.add_parameters({3, 2, 2});
TensorTools::set_elements(param_filter1.get_storage().values, param_filter1_vals);
param_square1 = mod.add_parameters({3, 3});
TensorTools::set_elements(param_square1.get_storage().values, param_square1_vals);
param_cube1 = mod.add_parameters({3, 3, 3});
TensorTools::set_elements(param_cube1.get_storage().values, param_cube1_vals);
param_cube2 = mod.add_parameters({3, 3, 6});
TensorTools::set_elements(param_cube2.get_storage().values, param_cube2_vals);
lookup1 = mod.add_lookup_parameters(3, {3});
TensorTools::set_elements(lookup1.get_storage().all_values, param_square1_vals);
lookup2 = mod.add_lookup_parameters(10, {3});
lookup3 = mod2.add_lookup_parameters(10, {3});
lookup4 = mod.add_lookup_parameters(10, {2,3,4,5});
}
~NodeTest() {
// for (auto x : av) free(x);
}
template <class T>
std::string print_vec(const std::vector<T> vec) {
ostringstream oss;
if (vec.size()) oss << vec[0];
for (size_t i = 1; i < vec.size(); i++)
oss << ' ' << vec[i];
return oss.str();
}
// When testing a function that produces a non-scalar result, we need to
// convert the tensor to a scalar so that we can backprop. However, if you
// aren't careful, you can end up with partial derivatives that have
// symmetries that are likely to mask certain bugs. This function provides
// "asymmetric" gradients onto the tensor valued function.
static Expression to_scalar(const Expression& e) {
// square = guarantee element's gradients are not all 1
// sqrt = if e has multiple batches, guarantees the gradients onto the
// elements of the batch will be not all 1.
return sqrt(sum_elems(square(e)));
}
std::vector<float> ones3_vals, ones2_vals, first_one_vals, batch_vals;
std::vector<char*> av;
dynet::ParameterCollection mod, mod2;
dynet::Parameter param1, param2, param3, param4, param5, param_scalar1, param_scalar2, param_kernel1, param_filter1, param_square1, param_cube1, param_cube2;
dynet::LookupParameter lookup1, lookup2, lookup3, lookup4;
};
// define the test suite
BOOST_FIXTURE_TEST_SUITE(node_test, NodeTest);
// Expression constant(const Dim d, float val);
BOOST_AUTO_TEST_CASE( constant_value ) {
dynet::ComputationGraph cg;
float mystery_constant = 3.14159f;
Expression x = constant(cg, Dim({3}), mystery_constant);
vector<float> z = as_vector(x.value());
for (unsigned i = 0; i < 3; i++)
BOOST_CHECK_EQUAL(z[i], mystery_constant);
}
// Expression zeros(const Dim d, float val);
BOOST_AUTO_TEST_CASE( zeros_value ) {
dynet::ComputationGraph cg;
Expression x = zeros(cg, Dim({3}));
vector<float> z = as_vector(x.value());
for (unsigned i = 0; i < 3; i++)
BOOST_CHECK_EQUAL(z[i], 0.f);
}
// Expression operator-(const Expression& x);
BOOST_AUTO_TEST_CASE( negate_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = -x1;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator+(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( add_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = x1 + x2;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator+(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cadd_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = x1 + x2;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator+(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cadd_scalar_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param_scalar2);
Expression y = (x1 + x2) + (x2 + x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator+(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cadd_broadcast2_gradient ) {
Dim dim_permutations[] = {Dim({3,1},2), Dim({3,2},1)};
dynet::ComputationGraph cg;
for(int i=0; i<2; i++){
Dim dim = dim_permutations[i];
Expression x1 = reshape(parameter(cg, param1), Dim({3,1},1));
Expression x2 = reshape(parameter(cg, param4), dim);
Expression y = (x1 + x2) + (x2 + x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression operator+(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cadd_broadcast3_gradient ) {
Dim dim_permutations[] = {Dim({3,3,3},1), Dim({3,3,1},3), Dim({1,3,3},3), Dim({9,3,1},1), Dim({1,3,9},1), Dim({1,3,1},9), Dim({3,3},3), Dim({9,3},1), Dim({1,3},9)};
dynet::ComputationGraph cg;
for(int i=0; i<6; i++){
Dim dim = dim_permutations[i];
Expression x1 = reshape(parameter(cg, param1), Dim({1,3,1},1));
Expression x2 = reshape(parameter(cg, param_cube1), dim);
Expression y = (x1 + x2) + (x2 - x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression operator+(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cadd_broadcast2_neg_val ) {
dynet::ComputationGraph cg;
Expression x1 = reshape(parameter(cg, param1), Dim({3,1},1));
Expression x2 = reshape(parameter(cg, param4), Dim({3,1},2));
Expression y = x1 - x2;
Expression z = sum_batches(sum_elems(y));
BOOST_CHECK_CLOSE(as_scalar(z.value()), -6.5, 0.001);
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_expr_add_1_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param_scalar2);
Expression y = x1 + x2;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_expr_add_2_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_scalar2);
Expression x2 = parameter(cg, param1);
Expression y = x1 + x2;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_expr_add_batch1_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = reshape(parameter(cg, param_square1), Dim({1, 3}, 3));
Expression x2 = parameter(cg, param_scalar2);
Expression y = x1 + x2;
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_expr_add_batch2_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = reshape(parameter(cg, param_square1), Dim({1}, 9));
Expression y = x1 + x2;
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_expr_sub_1_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param_scalar2);
Expression y = x1 - x2;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_expr_sub_2_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_scalar2);
Expression x2 = parameter(cg, param1);
Expression y = x1 - x2;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_expr_sub_batch1_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = reshape(parameter(cg, param_square1), Dim({1, 3}, 3));
Expression x2 = parameter(cg, param_scalar2);
Expression y = x1 - x2;
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_expr_sub_batch2_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = reshape(parameter(cg, param_square1), Dim({1}, 9));
Expression y = x1 - x2;
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sum(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( sum_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = sum({x2, x1, x2});
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sum(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( sum_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression x3 = input(cg, Dim({3}, 2), batch_vals);
Expression y = sum({x3, x1, cmult(x2, x3)});
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sum(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( empty_sum ) {
dynet::ComputationGraph cg;
vector<Expression> y;
BOOST_CHECK_THROW(as_vector(sum(y).value()), std::invalid_argument);
}
// Expression sum(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( cumsum_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param_cube1);
vector<Expression> y;
for (unsigned d=0;d<3;d++){
y.push_back(squared_norm(cumsum(x, d)));
}
Expression z = sum(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression logsumexp(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( logsumexp_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_scalar1);
Expression x2 = parameter(cg, param_scalar2);
Expression z = logsumexp({x1, x2});
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression logsumexp(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( logsumexp_vector_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression z = to_scalar(logsumexp({x1, x2}));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression logsumexp(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( logsumexp_singleelem_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param1);
Expression y = reshape(x, Dim({1}, 3));
Expression z = sum_batches(logsumexp({y}));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression logsumexp(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( logsumexp_inequal_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression x3 = x1 + x2;
Expression z = sum_batches(to_scalar(logsumexp({x1, x3})));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression logsumexp(x);
BOOST_AUTO_TEST_CASE( logsumexp_dim_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param_square1);
vector<Expression> exps;
for (int d = 1; d >= 0; d--)
exps.push_back(logsumexp_dim(x, d));
Expression z = to_scalar(sum(exps));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator+(const Expression& x, real y);
BOOST_AUTO_TEST_CASE( addscalar_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = x1 + 2.0;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator+(real x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalaradd_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = 2.0 + x1;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator-(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( subtract_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = x1 - x2;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator-(real x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalarsubtract_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = 2.0 - x1;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator-(const Expression& x, real y);
BOOST_AUTO_TEST_CASE( subtractscalar_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = x1 - 2.0;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator*(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( multiply_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = x1 * transpose(x2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator*(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( multiply_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression y = x1 * transpose(x2);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator*(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( affine_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression scalar = parameter(cg, param_scalar1);
Expression x2 = parameter(cg, param2);
Expression y = sqrt(affine_transform({x1, x2, scalar}));
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
BOOST_CHECK(y.dim() == x1.dim());
}
// Expression operator*(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( affine_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression scalar = parameter(cg, param_scalar1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression y = sqrt(affine_transform({x1, x2, scalar}));
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator*(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( affine_batch_col_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression scalar = parameter(cg, param_scalar1);
Expression x2 = input(cg, Dim({1, 3}, 2), batch_vals);
Expression y = sqrt(affine_transform({transpose(x1), scalar, x2}));
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator*(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( affine_batch2_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = input(cg, Dim({1, 3}, 2), batch_vals);
Expression scalar = parameter(cg, param_scalar1);
Expression x2 = parameter(cg, param2);
Expression y = sqrt( affine_transform({x1, scalar, transpose(x2) }) );
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator*(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( affine_batch3_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param_square1);
Expression inp = input(cg, Dim({3}, 2), batch_vals);
Expression y = sqrt( affine_transform({x1, x2, inp }) );
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression operator*(const Expression& x, float y);
BOOST_AUTO_TEST_CASE( multiplyscalar_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = x1 * 2.0;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// inline Expression operator*(float y, const Expression& x) { return x * y; }
BOOST_AUTO_TEST_CASE( scalarmultiply_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = 2.0 * x1;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// inline Expression operator/(const Expression& x, float y) { return x * (1.f / y); }
BOOST_AUTO_TEST_CASE( dividescalar_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = x1 / 2.0;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cdiv_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = cdiv(x1, x2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cdiv_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression y = cdiv(x2, x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_cdiv_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param_scalar2);
Expression y = cdiv(x1, x2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_cdiv_batch1_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = reshape(parameter(cg, param_square1), Dim({1, 3}, 3));
Expression x2 = parameter(cg, param_scalar2);
Expression y = cdiv(x1, x2);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_cdiv_batch2_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression y = cdiv(x2, x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cdiv_broadcast2_gradient ) {
Dim dim_permutations[] = {Dim({3,1},2), Dim({3,2},1)};
dynet::ComputationGraph cg;
for(int i=0; i<2; i++){
Dim dim = dim_permutations[i];
Expression x1 = reshape(parameter(cg, param1), Dim({3,1},1));
Expression x2 = reshape(parameter(cg, param4), dim);
Expression y = cdiv(x2, x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression cdiv(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cdiv_broadcast3_gradient ) {
Dim dim_permutations[] = {Dim({3,3,3},1), Dim({3,3,1},3), Dim({1,3,3},3), Dim({9,3,1},1), Dim({1,3,9},1), Dim({1,3,1},9)};
dynet::ComputationGraph cg;
for(int i=0; i<6; i++){
Dim dim = dim_permutations[i];
Expression x1 = reshape(parameter(cg, param1), Dim({1,3,1},1));
Expression x2 = reshape(parameter(cg, param_cube1), dim);
Expression y = cdiv(x2, x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression colwise_add(const Expression& x, const Expression& bias);
BOOST_AUTO_TEST_CASE( colwise_add_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = colwise_add(x1 * transpose(x2), x2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression colwise_add(const Expression& x, const Expression& bias);
BOOST_AUTO_TEST_CASE( colwise_add_batch1_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression x3 = input(cg, Dim({1, 3}, 2), batch_vals);
Expression y = colwise_add(x1 * x3, x2);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression colwise_add(const Expression& x, const Expression& bias);
BOOST_AUTO_TEST_CASE( colwise_add_batch2_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression x3 = input(cg, Dim({3}, 2), batch_vals);
Expression y = colwise_add(x1 * transpose(x2), cmult(x2, x3));
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression concatenate_cols(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( concatenate_cols_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = concatenate_cols({x1, x2, x1});
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression concatenate_cols(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( concatenate_cols_vecmatrix_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param_square1);
Expression y = concatenate_cols({x1, x2, x1});
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression concatenate_to_batch(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( concatenate_to_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression xsquare = parameter(cg, param_square1);
Expression y = concatenate_to_batch({x1, x2});
Expression z = sum_batches(to_scalar(xsquare * y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression concatenate(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( concatenate_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = transpose(parameter(cg, param1));
Expression x2 = transpose(parameter(cg, param2));
Expression y = concatenate({x1, x2, x1});
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression concatenate(const std::initializer_list<Expression>& xs);
BOOST_AUTO_TEST_CASE( concatenate_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = transpose(parameter(cg, param1));
Expression x2 = transpose(parameter(cg, param2));
Expression x3 = input(cg, Dim({1, 3}, 2), batch_vals);
Expression y = concatenate({x1, x2, cmult(x2, x3)});
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression contract3d_1d(const Expression& x, const Expression& y, const Expression& b);
BOOST_AUTO_TEST_CASE( contract3d_1d_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression square1 = parameter(cg, param_square1);
Expression cube1 = parameter(cg, param_cube1);
Expression y = contract3d_1d(cube1, x1, square1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression contract3d_1d(const Expression& x, const Expression& y, const Expression& b);
BOOST_AUTO_TEST_CASE( contract3d_batch_1d_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression square1 = parameter(cg, param_square1);
Expression cube1 = parameter(cg, param_cube1);
Expression batched_cube1 = concatenate_to_batch({cube1, cube1, cube1});
Expression y = contract3d_1d(batched_cube1, x1, square1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression contract3d_1d(const Expression& x, const Expression& y, const Expression& b);
BOOST_AUTO_TEST_CASE( contract3d_1d_batch_gradient ) {
dynet::ComputationGraph cg;
Expression batched_x1 = reshape(parameter(cg, param_square1), Dim({3}, 3));
Expression square1 = parameter(cg, param_square1);
Expression cube1 = parameter(cg, param_cube1);
Expression y = contract3d_1d(cube1, batched_x1, square1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression contract3d_1d(const Expression& x, const Expression& y, const Expression& b);
BOOST_AUTO_TEST_CASE( contract3d_batch_1d_batch_gradient ) {
dynet::ComputationGraph cg;
Expression batched_x1 = reshape(parameter(cg, param_square1), Dim({3}, 3));
Expression square1 = parameter(cg, param_square1);
Expression cube1 = parameter(cg, param_cube1);
Expression batched_cube1 = concatenate_to_batch({cube1, cube1, cube1});
Expression y = contract3d_1d(batched_cube1, batched_x1, square1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression contract3d_1d_1d(const Expression& x, const Expression& y, const Expression& z, const Expression& b);
BOOST_AUTO_TEST_CASE( contract3d_1d_1d_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression x3 = parameter(cg, param3);
Expression cube1 = parameter(cg, param_cube1);
Expression y = contract3d_1d_1d(cube1, x1, x2, x3);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sqrt(const Expression& x);
BOOST_AUTO_TEST_CASE( sqrt_gradient ) {
dynet::ComputationGraph cg;
Expression x3 = parameter(cg, param3);
Expression y = sqrt(x3);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression abs(const Expression& x);
BOOST_AUTO_TEST_CASE( abs_gradient ) {
dynet::ComputationGraph cg;
Expression x3 = parameter(cg, param3);
Expression y = abs(x3);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression erf(const Expression& x);
BOOST_AUTO_TEST_CASE( erf_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = erf(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sin(const Expression& x);
BOOST_AUTO_TEST_CASE( sin_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = sin(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cos(const Expression& x);
BOOST_AUTO_TEST_CASE( cos_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = cos(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression tan(const Expression& x);
BOOST_AUTO_TEST_CASE( tan_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = tan(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression asin(const Expression& x);
BOOST_AUTO_TEST_CASE( asin_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param5);
Expression y = asin(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression acos(const Expression& x);
BOOST_AUTO_TEST_CASE( acos_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param5);
Expression y = acos(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression atan(const Expression& x);
BOOST_AUTO_TEST_CASE( atan_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param5);
Expression y = atan(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sinh(const Expression& x);
BOOST_AUTO_TEST_CASE( sinh_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = sinh(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cosh(const Expression& x);
BOOST_AUTO_TEST_CASE( cosh_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = cosh(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression tanh(const Expression& x);
BOOST_AUTO_TEST_CASE( tanh_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = tanh(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression asinh(const Expression& x);
BOOST_AUTO_TEST_CASE( asinh_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param3);
Expression y = asinh(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression acosh(const Expression& x);
BOOST_AUTO_TEST_CASE( acosh_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param3);
Expression y = acosh(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression atanh(const Expression& x);
BOOST_AUTO_TEST_CASE( atanh_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param5);
Expression y = atanh(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression exp(const Expression& x);
BOOST_AUTO_TEST_CASE( exp_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = exp(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression square(const Expression& x);
BOOST_AUTO_TEST_CASE( square_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = square(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cube(const Expression& x);
BOOST_AUTO_TEST_CASE( cube_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = cube(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression log_sigmoid(const Expression& x);
BOOST_AUTO_TEST_CASE( log_sigmoid_gradient ) {
dynet::ComputationGraph cg;
Expression x2 = parameter(cg, param2);
Expression y = log_sigmoid(x2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression lgamma(const Expression& x);
BOOST_AUTO_TEST_CASE( lgamma_gradient ) {
dynet::ComputationGraph cg;
Expression x2 = parameter(cg, param2);
Expression y = lgamma(x2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression log(const Expression& x);
BOOST_AUTO_TEST_CASE( log_gradient ) {
dynet::ComputationGraph cg;
Expression x3 = parameter(cg, param3);
Expression y = log(x3);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression logistic(const Expression& x);
BOOST_AUTO_TEST_CASE( logistic_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = logistic(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression rectify(const Expression& x);
BOOST_AUTO_TEST_CASE( rectify_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = rectify(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression elu(const Expression& x);
BOOST_AUTO_TEST_CASE( elu_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = elu(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression selu(const Expression& x);
BOOST_AUTO_TEST_CASE( selu_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = selu(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression silu(const Expression& x);
BOOST_AUTO_TEST_CASE( silu_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = silu(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression round(const Expression& x, GradientMode gradient_mode);
BOOST_AUTO_TEST_CASE( round_forward ) {
// batch_vals = {1.1f, 2.6f, 3.3f, 4.0f, 5.1f, 6.6f};
dynet::ComputationGraph cg;
Expression x = input(cg, Dim({3}, 2), batch_vals);
Expression y = round(x, zero_gradient);
std::vector<float> v = as_vector(y.value());
BOOST_CHECK_EQUAL(v[0], 1.0);
BOOST_CHECK_EQUAL(v[1], 3.0);
BOOST_CHECK_EQUAL(v[2], 3.0);
BOOST_CHECK_EQUAL(v[3], 4.0);
BOOST_CHECK_EQUAL(v[4], 5.0);
BOOST_CHECK_EQUAL(v[5], 7.0);
}
// Expression ceil(const Expression& x, GradientMode gradient_mode);
BOOST_AUTO_TEST_CASE( ceil_forward ) {
// batch_vals = {1.1f, 2.6f, 3.3f, 4.0f, 5.1f, 6.6f};
dynet::ComputationGraph cg;
Expression x = input(cg, Dim({3}, 2), batch_vals);
Expression y = ceil(x, zero_gradient);
std::vector<float> v = as_vector(y.value());
BOOST_CHECK_EQUAL(v[0], 2.0);
BOOST_CHECK_EQUAL(v[1], 3.0);
BOOST_CHECK_EQUAL(v[2], 4.0);
BOOST_CHECK_EQUAL(v[3], 4.0);
BOOST_CHECK_EQUAL(v[4], 6.0);
BOOST_CHECK_EQUAL(v[5], 7.0);
}
// Expression floor(const Expression& x, GradientMode gradient_mode);
BOOST_AUTO_TEST_CASE( floor_forward ) {
// batch_vals = {1.1f, 2.6f, 3.3f, 4.0f, 5.1f, 6.6f};
dynet::ComputationGraph cg;
Expression x = input(cg, Dim({3}, 2), batch_vals);
Expression y = floor(x, zero_gradient);
std::vector<float> v = as_vector(y.value());
BOOST_CHECK_EQUAL(v[0], 1.0);
BOOST_CHECK_EQUAL(v[1], 2.0);
BOOST_CHECK_EQUAL(v[2], 3.0);
BOOST_CHECK_EQUAL(v[3], 4.0);
BOOST_CHECK_EQUAL(v[4], 5.0);
BOOST_CHECK_EQUAL(v[5], 6.0);
}
// Expression hinge(const Expression& x, unsigned index, float m = 1.0);
BOOST_AUTO_TEST_CASE( hinge_gradient ) {
unsigned index = 0;
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression z = hinge(x1, index, 0.5);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression hinge(const Expression& x, unsigned index, float m = 1.0);
BOOST_AUTO_TEST_CASE( hinge_multiple_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
vector<Expression> exp;
for (unsigned index = 3; index > 0; --index)
exp.push_back(hinge(x1, index - 1, 0.5));
Expression z = sum(exp);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression hinge(const Expression& x, unsigned index, float m = 1.0);
BOOST_AUTO_TEST_CASE( hinge_batch_gradient ) {
std::vector<unsigned> idx = {1, 2};
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression z = sum_batches(hinge(x1 + x2, idx, 2.f));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression hinge(const Expression& x, const unsigned* pindex, float m = 1.0);
BOOST_AUTO_TEST_CASE( hingeptr_gradient ) {
unsigned index = 0;
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression z = hinge(x1, &index, 0.5);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression hinge_dim(const Expression& x, unsigned index, unsigned dim = 0, float m = 1.0);
BOOST_AUTO_TEST_CASE( hinge_dim_gradient ) {
std::vector<unsigned> index = {0, 1, 2};
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_square1);
Expression z = to_scalar(hinge_dim(x1, index, 0, 0.5) + hinge_dim(x1, index, 1, 0.5));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression log_softmax(const Expression& x);
BOOST_AUTO_TEST_CASE( log_softmax_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = log_softmax(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression log_softmax(const Expression& x);
BOOST_AUTO_TEST_CASE( log_softmax_autobatch_gradient ) {
auto autobatch_cache = dynet::autobatch_flag;
dynet::autobatch_flag = 1;
dynet::ComputationGraph cg;
vector<Expression> vals;
{
Expression x1 = parameter(cg, param1);
vals.push_back(log_softmax(x1));
}
{
Expression x2 = parameter(cg, param2);
vals.push_back(log_softmax(x2));
}
Expression y = sum(vals);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
dynet::autobatch_flag = autobatch_cache;
}
// Expression log_softmax(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( log_softmax_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression y = log_softmax(x1 + x2);
Expression z = sum_batches(input(cg, {1, 3}, first_one_vals) * y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression log_softmax(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( log_softmax_colbatch_gradient ) {
dynet::ComputationGraph cg;
Expression x = reshape(parameter(cg, param_cube1), Dim({3, 3}, 3));
Expression y = log_softmax(x);
Expression z = sum_batches(input(cg, {1, 3}, first_one_vals) * y * input(cg, {3}, first_one_vals));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression log_softmax(const Expression& x, const std::vector<unsigned>& restriction);
BOOST_AUTO_TEST_CASE( restricted_log_softmax_gradient ) {
vector<unsigned> restriction = {0, 1};
dynet::ComputationGraph cg;
Expression x3 = parameter(cg, param3);
Expression y = exp( log_softmax(x3, restriction) );
Expression z = input(cg, {1, 3}, first_one_vals) * y;
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression softmax(const Expression& x);
BOOST_AUTO_TEST_CASE( softmax_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = log(softmax(x1));
Expression z = input(cg, {1, 3}, first_one_vals) * y;
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression softmax(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( softmax_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression y = log(softmax(x1 + x2));
Expression z = sum_batches(input(cg, {1, 3}, first_one_vals) * y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression softmax(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( softmax_colbatch_gradient ) {
dynet::ComputationGraph cg;
Expression x = reshape(parameter(cg, param_cube1), Dim({3, 3}, 3));
Expression y = softmax(x);
Expression z = sum_batches(input(cg, {1, 3}, first_one_vals) * y * input(cg, {3}, first_one_vals));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression softmax(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( softmax_cols_colbatch_gradient ) {
dynet::ComputationGraph cg;
Expression x = reshape(parameter(cg, param_cube1), Dim({3, 3}, 3));
Expression y = softmax(x, 1);
Expression z = sum_batches(input(cg, {1, 3}, first_one_vals) * y * input(cg, {3}, first_one_vals));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sparsemax(const Expression& x);
BOOST_AUTO_TEST_CASE( sparsemax_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = sparsemax(x1);
Expression z = input(cg, {1, 3}, first_one_vals) * y;
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sparsemax_loss(const Expression& x);
BOOST_AUTO_TEST_CASE( sparsemax_loss_gradient ) {
std::vector<unsigned> idxs(2); idxs[0] = 1; idxs[1] = 2;
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression z = sparsemax_loss(x1, idxs);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression softsign(const Expression& x);
BOOST_AUTO_TEST_CASE( softsign_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = softsign(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression pow(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( pow_gradient ) {
dynet::ComputationGraph cg;
Expression x3 = parameter(cg, param3);
Expression x_scalar1 = parameter(cg, param_scalar1);
Expression y = pow(x3, x_scalar1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression min(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( min_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = min(x1, x2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression max(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( max_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = max(x1, x2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// TODO: Noise is random, so it cannot be tested simply?
// Expression noise(const Expression& x, real stddev);
BOOST_AUTO_TEST_CASE( noise_forward ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = noise(x1, 0.5);
Expression z = to_scalar(y);
cg.forward(z);
}
//TODO: Dropout scales the gradients at training time, so they don't match.
// Expression dropout(const Expression& x, real p);
BOOST_AUTO_TEST_CASE( dropout_forward ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = dropout(x1, 0.5);
Expression z = to_scalar(y);
cg.forward(z);
}
BOOST_AUTO_TEST_CASE( dropout_batch_forward ) {
dynet::ComputationGraph cg;
Expression x = input(cg, Dim({3}, 2), batch_vals);
Expression y = dropout_batch(x, 0.5);
Expression z = to_scalar(y);
cg.forward(z);
}
BOOST_AUTO_TEST_CASE( dropout_dim_forward ) {
for (unsigned d = 0; d < 3; d++) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param_cube1);
Expression y = dropout_dim(x, d, 0.5);
Expression z = to_scalar(y);
cg.forward(z);
}
}
// TODO: Dropout scales the gradients at training time, so they don't match.
// Expression block_dropout(const Expression& x, real p);
// Expression argmax(const Expression& x, GradientMode gradient_mode);
BOOST_AUTO_TEST_CASE( argmax_forward ) {
dynet::ComputationGraph cg;
Expression x = input(cg, Dim({3}, 2), batch_vals);
Expression y = argmax(x, zero_gradient);
std::vector<float> v = as_vector(y.value());
BOOST_CHECK_EQUAL(v[0], 0.0);
BOOST_CHECK_EQUAL(v[1], 0.0);
BOOST_CHECK_EQUAL(v[2], 1.0);
BOOST_CHECK_EQUAL(v[3], 0.0);
BOOST_CHECK_EQUAL(v[4], 0.0);
BOOST_CHECK_EQUAL(v[5], 1.0);
}
// Expression argmax(const Expression& x, GradientMode gradient_mode);
BOOST_AUTO_TEST_CASE( argmax_backward ) {
dynet::ComputationGraph cg;
Expression x = input(cg, Dim({3}, 2), batch_vals);
Expression y = argmax(x, zero_gradient);
Expression z = sum_batches(squared_norm(y));
cg.backward(z, true);
std::vector<float> g_x = as_vector(x.gradient());
BOOST_CHECK_EQUAL(g_x[0], 0.0);
BOOST_CHECK_EQUAL(g_x[1], 0.0);
BOOST_CHECK_EQUAL(g_x[2], 0.0);
BOOST_CHECK_EQUAL(g_x[3], 0.0);
BOOST_CHECK_EQUAL(g_x[4], 0.0);
BOOST_CHECK_EQUAL(g_x[5], 0.0);
}
// Expression argmax(const Expression& x, GradientMode gradient_mode);
BOOST_AUTO_TEST_CASE( straight_through_backward ) {
dynet::ComputationGraph cg;
Expression x = input(cg, Dim({3}, 2), batch_vals);
Expression x_ = input(cg, Dim({3}, 2), batch_vals);
Expression y = argmax(x, straight_through_gradient);
Expression z = sum_batches(dot_product(y, x_));
cg.backward(z, true);
std::vector<float> g_x = as_vector(x.gradient());
BOOST_CHECK_EQUAL(g_x[0], batch_vals[0]);
BOOST_CHECK_EQUAL(g_x[1], batch_vals[1]);
BOOST_CHECK_EQUAL(g_x[2], batch_vals[2]);
BOOST_CHECK_EQUAL(g_x[3], batch_vals[3]);
BOOST_CHECK_EQUAL(g_x[4], batch_vals[4]);
BOOST_CHECK_EQUAL(g_x[5], batch_vals[5]);
}
// Expression reshape(const Expression& x, const Dim& d);
BOOST_AUTO_TEST_CASE( reshape_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = reshape(x1, {1, 3});
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression reshape(const Expression& x, const Dim& d);
BOOST_AUTO_TEST_CASE( reshape_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression y1 = x1 * transpose(x2);
Expression y2 = reshape(y1, Dim({3, 3}, 2));
Expression z = sum_batches(to_scalar(y2));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression transpose(const Expression& x);
BOOST_AUTO_TEST_CASE( transpose_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_square1);
Expression y = x1 * transpose(x1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression transpose(const Expression& x);
BOOST_AUTO_TEST_CASE( transpose_higherorder_gradient ) {
dynet::ComputationGraph cg;
Expression cube1 = parameter(cg, param_cube1);
Expression x1 = reshape(transpose(cube1, {2, 0, 1}), Dim({9, 3}));
Expression x2 = reshape(transpose(cube1, {1, 2, 0}), Dim({3, 9}));
Expression z = to_scalar(x1 * x2);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression logdet(const Expression& x);
BOOST_AUTO_TEST_CASE( logdet_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param_square1);
Expression y = logdet(-x);
BOOST_CHECK(check_grad(mod, y, 0));
}
// Expression inverse(const Expression& x);
BOOST_AUTO_TEST_CASE( inverse_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param_square1);
Expression y = inverse(x);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression trace_of_product(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( trace_of_product_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression z = trace_of_product(x1, x2);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cmult(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cadd_broadcast_gradient_scalar ) {
dynet::ComputationGraph cg;
Expression x1 = reshape(parameter(cg, param_scalar1), Dim({1},1));
Expression x2 = reshape(parameter(cg, param4), Dim({3,1,1},2));
Expression y = (x1 + x2) + (x2 + x1) + (x1 - x2) + (x2 - x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cmult(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cdiv_broadcast_gradient_scalar ) {
dynet::ComputationGraph cg;
Expression x1 = reshape(parameter(cg, param_scalar1), Dim({1},1));
Expression x2 = reshape(parameter(cg, param4), Dim({3,1,1},2));
Expression y = cdiv(x2, x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cmult(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cmult_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression y = cmult(x1, x2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cmult(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cmult_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression y = cmult(x1, x2) + cmult(x2, x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cmult(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_cmult_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_scalar1);
Expression x2 = parameter(cg, param2);
Expression y = cmult(x1, x2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cmult(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( scalar_cmult_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_scalar1);
Expression x2 = reshape(parameter(cg, param_square1), Dim({1, 3}, 3));
Expression y = cmult(x1, x2) + cmult(x2, x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cmult(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cmult_broadcast_gradient_scalar ) {
dynet::ComputationGraph cg;
Expression x1 = reshape(parameter(cg, param_scalar1), Dim({1},1));
Expression x2 = reshape(parameter(cg, param4), Dim({3,1,1},2));
Expression y = cmult(x1, x2) + cmult(x2, x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression cmult(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( cmult_broadcast2_gradient ) {
Dim dim_permutations[] = {Dim({3,1},2), Dim({3,2},1)};
dynet::ComputationGraph cg;
for(int i=0; i<2; i++){
Dim dim = dim_permutations[i];
Expression x1 = reshape(parameter(cg, param1), Dim({3,1},1));
Expression x2 = reshape(parameter(cg, param4), dim);
Expression y = cmult(x1, x2) + cmult(x2, x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression cmult(const Expression& x, const Expression& y);<|fim▁hole|> dynet::ComputationGraph cg;
for(int i=0; i<6; i++){
Dim dim = dim_permutations[i];
Expression x1 = reshape(parameter(cg, param1), Dim({1,3,1},1));
Expression x2 = reshape(parameter(cg, param_cube1), dim);
Expression y = cmult(x1, x2) + cmult(x2, x1);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression dot_product(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( dot_product_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression z = dot_product(x1, x2);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression dot_product(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( dot_product_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression z = sum_batches(dot_product(x1, x2) + dot_product(x2, x1) * 2);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression dot_product(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( dot_product_matrix_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_square1);
Expression z = dot_product(x1, x1);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression squared_distance(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( squared_distance_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression z = squared_distance(x1, x2);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression squared_distance(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( squared_distance_batchright_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression z = sum_batches(squared_distance(x1, x1 + x2));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression squared_distance(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( squared_distance_batchleft_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression z = sum_batches(squared_distance(x1 + x2, x1));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression squared_distance(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( squared_distance_batchboth_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression z = sum_batches(squared_distance(x1 + x2, cmult(x1, x2)));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression squared_distance(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( squared_norm_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression z = squared_norm(x1);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression squared_distance(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( squared_norm_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression z = sum_batches(squared_norm(x1 + x2));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression l2_norm(const Expression&);
BOOST_AUTO_TEST_CASE( l2_norm_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression z = l2_norm(x1);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression l2_norm(const Expression& x);
BOOST_AUTO_TEST_CASE( l2_norm_batch_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression z = sum_batches(l2_norm(x1 + x2));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression huber_distance(const Expression& x, const Expression& y, float c = 1.345f);
BOOST_AUTO_TEST_CASE( huber_distance_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression z = huber_distance(x1, x2);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression l1_distance(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( l1_distance_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression z = l1_distance(x1, x2);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression binary_log_loss(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( binary_log_loss_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = logistic( parameter(cg, param1) );
Expression x2 = input(cg, {3}, ones3_vals);
Expression z = binary_log_loss(x1, x2);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression binary_log_loss(const Expression& x, const Expression& y);
BOOST_AUTO_TEST_CASE( binary_log_loss_edgecases ) {
dynet::ComputationGraph cg;
float val, infinity = - log(DYNET_DEVICE_MIN);
Expression x, y, z;
vector<float> values = {0.0, 0.5, 1.0};
for (float vx : values) {
for (float vy : values) {
x = input(cg, vx);
// and y == 0
y = input(cg, vy);
z = binary_log_loss(x, y);
val = as_scalar(z.value());
if (vx == 0.5)
BOOST_CHECK_CLOSE(val, log(2), 0.1);
else if (vx == vy)
BOOST_CHECK_CLOSE(val, 0, 0.1);
else
BOOST_CHECK_CLOSE(val, infinity, 0.1);
}
}
}
// Expression pairwise_rank_loss(const Expression& x, const Expression& y, real m=1.0);
BOOST_AUTO_TEST_CASE( pairwise_rank_loss_gradient ) {
dynet::ComputationGraph cg;
Expression x_scalar1 = parameter(cg, param_scalar1);
Expression x_scalar2 = parameter(cg, param_scalar2);
Expression z = pairwise_rank_loss(x_scalar1, x_scalar2);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression poisson_loss(const Expression& x, unsigned y);
BOOST_AUTO_TEST_CASE( possion_loss_gradient ) {
dynet::ComputationGraph cg;
Expression scalar = parameter(cg, param_scalar1);
Expression z = poisson_loss(scalar, 3);
BOOST_CHECK(check_grad(mod, z, 0));
}
/*
// Expression conv1d_narrow(const Expression& x, const Expression& f);
BOOST_AUTO_TEST_CASE( conv1d_narrow_gradient ) {
dynet::ComputationGraph cg;
Expression xsquare = parameter(cg, param_square1);
Expression xkernel = parameter(cg, param_kernel1);
Expression y = conv1d_narrow(xsquare, xkernel);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression conv1d_wide(const Expression& x, const Expression& f);
BOOST_AUTO_TEST_CASE( conv1d_wide_gradient ) {
dynet::ComputationGraph cg;
Expression xkernel = parameter(cg, param_kernel1);
Expression y = conv1d_wide(xkernel, xkernel);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
*/
// Expression filter1d_narrow(const Expression& x, const Expression& f);
BOOST_AUTO_TEST_CASE( filter1d_narrow_gradient ) {
dynet::ComputationGraph cg;
Expression xsquare = parameter(cg, param_square1);
Expression xfilter = parameter(cg, param_filter1);
Expression y = filter1d_narrow(xsquare, xfilter);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression kmax_pooling(const Expression& x, unsigned k);
BOOST_AUTO_TEST_CASE( kmax_pooling_keq1_gradient ) {
dynet::ComputationGraph cg;
Expression xsquare = parameter(cg, param_square1);
Expression y = tanh(kmax_pooling(xsquare, 1));
Expression z = pickneglogsoftmax(y, 1);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression kmax_pooling(const Expression& x, unsigned k);
BOOST_AUTO_TEST_CASE( kmax_pooling_keq2_gradient ) {
dynet::ComputationGraph cg;
Expression xsquare = parameter(cg, param_square1);
Expression y = tanh(kmax_pooling(xsquare, 2));
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression fold_rows(const Expression& x, unsigned nrows=2);
BOOST_AUTO_TEST_CASE( fold_rows_gradient ) {
dynet::ComputationGraph cg;
Expression x4 = parameter(cg, param4);
Expression y = fold_rows(x4, 2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression circ_corr(const Expression& u, const Expression& v);
BOOST_AUTO_TEST_CASE( circ_corr_gradient ) {
dynet::ComputationGraph cg;
Expression u = parameter(cg, param5);
Expression v = parameter(cg, param2);
Expression y = circ_corr(u, v);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression circ_conv(const Expression& u, const Expression& v);
BOOST_AUTO_TEST_CASE( circ_conv_gradient ) {
dynet::ComputationGraph cg;
Expression u = parameter(cg, param5);
Expression v = parameter(cg, param2);
Expression y = circ_conv(u, v);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression average(const Expression& x);
BOOST_AUTO_TEST_CASE( average_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression x3 = parameter(cg, param3);
Expression y = average({x1, x2, x3});
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression average_cols(const Expression& x);
BOOST_AUTO_TEST_CASE( average_cols_gradient ) {
dynet::ComputationGraph cg;
Expression xsquare = parameter(cg, param_square1);
Expression y = tanh(average_cols(xsquare));
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sum_cols(const Expression& x);
BOOST_AUTO_TEST_CASE( sum_cols_gradient ) {
dynet::ComputationGraph cg;
Expression xsquare = parameter(cg, param_square1);
Expression y = tanh(sum_cols(xsquare));
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression conv2d(const Expression& x ,const Expression& f, const std::vector<unsigned>& stride, bool is_valid);
BOOST_AUTO_TEST_CASE( conv2d_valid_gradient ) {
dynet::ComputationGraph cg;
Parameter param_kernel = mod.add_parameters({2, 2, 2, 3});
std::vector<float> param_kernel_vals = {.011f, .022f, .033f, .012f, .022f, .032f, .013f, .023f, .033f,
.111f, -.122f, -.033f, -.112f, -.022f, -.132f, -.113f, -.123f, -.133f,
.211f, .222f, .233f, .212f, .222f, .232f
};
TensorTools::set_elements(param_kernel.get_storage().values, param_kernel_vals);
std::vector<float> conv2d_batch_vals(50 * 50 * 2 * 2);
for (unsigned i = 0; i < conv2d_batch_vals.size(); ++i) {
conv2d_batch_vals[i] = i * 0.011f + (i + 1) * 0.001f;
}
Expression x = input(cg, Dim({50, 50, 2}, 2), conv2d_batch_vals);
Expression kernel = parameter(cg, param_kernel);
vector<unsigned> stride = {3, 3}; bool is_valid = true;
Expression y = conv2d(x, kernel, stride, is_valid);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression log_softmax(const Expression& x);
BOOST_AUTO_TEST_CASE( conv2d_autobatch_gradient ) {
auto autobatch_cache = dynet::autobatch_flag;
dynet::autobatch_flag = 1;
dynet::ComputationGraph cg;
Parameter param_kernel = mod.add_parameters({2, 2, 2, 3});
std::vector<float> param_kernel_vals = {.011f, .022f, .033f, .012f, .022f, .032f, .013f, .023f, .033f,
.111f, -.122f, -.033f, -.112f, -.022f, -.132f, -.113f, -.123f, -.133f,
.211f, .222f, .233f, .212f, .222f, .232f
};
TensorTools::set_elements(param_kernel.get_storage().values, param_kernel_vals);
Expression kernel = parameter(cg, param_kernel);
vector<unsigned> stride = {3, 3}; bool is_valid = true;
std::vector<float> conv2d_vals1(50 * 50 * 2), conv2d_vals2(50 * 50 * 2);
for (unsigned i = 0; i < conv2d_vals1.size(); ++i) {
conv2d_vals1[i] = i * 0.011f + (i + 1) * 0.001f;
conv2d_vals2[i] = i * 0.015f + (i + 1) * -0.001f;
}
vector<Expression> zs;
{
Expression x = input(cg, Dim({50, 50, 2}), conv2d_vals1);
Expression y = conv2d(x, kernel, stride, is_valid);
zs.push_back(to_scalar(y));
}
{
Expression x = input(cg, Dim({50, 50, 2}), conv2d_vals2);
Expression y = conv2d(x, kernel, stride, is_valid);
zs.push_back(to_scalar(y));
}
Expression z = sum(zs);
BOOST_CHECK(check_grad(mod, z, 0));
dynet::autobatch_flag = autobatch_cache;
}
// Expression conv2d(const Expression& x ,const Expression& f, const std::vector<unsigned>& stride, bool is_valid);
BOOST_AUTO_TEST_CASE( conv2d_valid_singlefilter_gradient ) {
dynet::ComputationGraph cg;
Parameter param_kernel = mod.add_parameters({2, 4, 1, 3});
std::vector<float> param_kernel_vals = {.011f, .022f, .033f, .012f, .022f, .032f, .013f, .023f, .033f,
.111f, -.122f, -.033f, -.112f, -.022f, -.132f, -.113f, -.123f, -.133f,
.211f, .222f, .233f, .212f, .222f, .232f
};
TensorTools::set_elements(param_kernel.get_storage().values, param_kernel_vals);
std::vector<float> conv2d_batch_vals(50 * 100 * 1 * 2);
for (unsigned i = 0; i < conv2d_batch_vals.size(); ++i) {
conv2d_batch_vals[i] = i * 0.011f + (i + 1) * 0.001f;
}
Expression x = input(cg, Dim({50, 100}, 2), conv2d_batch_vals);
Expression kernel = parameter(cg, param_kernel);
vector<unsigned> stride = {3, 3}; bool is_valid = true;
Expression y = conv2d(x, kernel, stride, is_valid);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
BOOST_AUTO_TEST_CASE( conv2d_same_gradient ) {
dynet::ComputationGraph cg;
Parameter param_kernel = mod.add_parameters({2, 2, 2, 3});
std::vector<float> param_kernel_vals = {.011f, .022f, .033f, .012f, .022f, .032f, .013f, .023f, .033f,
.111f, -.122f, -.033f, -.112f, -.022f, -.132f, -.113f, -.123f, -.133f,
.211f, .222f, .233f, .212f, .222f, .232f
};
TensorTools::set_elements(param_kernel.get_storage().values, param_kernel_vals);
Parameter param_kernel2 = mod.add_parameters({2, 2, 3, 2});
TensorTools::set_elements(param_kernel2.get_storage().values, param_kernel_vals);
std::vector<float> conv2d_batch_vals(2 * 50 * 50 * 2);
for (unsigned i = 0; i < conv2d_batch_vals.size(); ++i) {
conv2d_batch_vals[i] = i * 0.011f + (i + 1) * 0.001f;
}
Expression x = input(cg, Dim({50, 50, 2}, 2), conv2d_batch_vals);
Expression kernel = parameter(cg, param_kernel);
vector<unsigned> stride = {4, 4}; bool is_valid = false;
Expression y = conv2d(x, kernel, stride, is_valid);
Expression kernel2 = parameter(cg, param_kernel2);
Expression y2 = conv2d(y, kernel2, stride, is_valid);
Expression z = sum_batches(to_scalar(y2));
BOOST_CHECK(check_grad(mod, z, 0));
}
BOOST_AUTO_TEST_CASE( maxpooling2d_same_gradient ) {
dynet::ComputationGraph cg;
Parameter param_kernel = mod.add_parameters({2, 2, 1, 1});
std::vector<float> param_kernel_vals = {.011f, .022f, .012f, .022f};
TensorTools::set_elements(param_kernel.get_storage().values, param_kernel_vals);
std::vector<float> maxpooling2d_batch_vals(1 * 11 * 11 * 2);
for (unsigned i = 0; i < maxpooling2d_batch_vals.size(); ++i) {
maxpooling2d_batch_vals[i] = i * 0.011f + (i + 1) * 0.001f;
}
Expression x = input(cg, Dim({11, 11, 1}, 2), maxpooling2d_batch_vals);
Expression kernel = parameter(cg, param_kernel);
std::vector<unsigned> ksize = {2, 2};
std::vector<unsigned> stride = {2, 5};
bool is_valid = false;
Expression w = conv2d(x, kernel, stride, is_valid);
//Expression z = sum_batches(to_scalar(w));
//BOOST_CHECK(check_grad(mod, z, 0));
is_valid = false;
Expression y = maxpooling2d(w, ksize, stride, is_valid);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
BOOST_AUTO_TEST_CASE( maxpooling2d_valid_gradient ) {
dynet::ComputationGraph cg;
Parameter param_kernel = mod.add_parameters({2, 2, 1, 1});
std::vector<float> param_kernel_vals = {.011f, .022f, .012f, .022f};
TensorTools::set_elements(param_kernel.get_storage().values, param_kernel_vals);
std::vector<float> maxpooling2d_batch_vals(1 * 21 * 21 * 2);
for (unsigned i = 0; i < maxpooling2d_batch_vals.size(); ++i) {
maxpooling2d_batch_vals[i] = i * 0.011f + (i + 1) * 0.001f;
}
Expression x = input(cg, Dim({21, 21, 1}, 2), maxpooling2d_batch_vals);
Expression kernel = parameter(cg, param_kernel);
std::vector<unsigned> ksize = {2, 2};
std::vector<unsigned> stride = {2, 5};
bool is_valid = false;
Expression w = conv2d(x, kernel, stride, is_valid);
is_valid = true;
Expression y = maxpooling2d(w, ksize, stride, is_valid);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
BOOST_AUTO_TEST_CASE( maxpooling2d_same_gradient_two ) {
dynet::ComputationGraph cg;
Parameter param_kernel = mod.add_parameters({2, 2, 1, 1});
std::vector<float> param_kernel_vals = {.011f, .022f, .012f, .022f};
TensorTools::set_elements(param_kernel.get_storage().values, param_kernel_vals);
std::vector<float> maxpooling2d_batch_vals(1 * 31 * 16 * 2);
for (unsigned i = 0; i < maxpooling2d_batch_vals.size(); ++i) {
maxpooling2d_batch_vals[i] = i * 0.011f + (i + 1) * 0.001f;
}
Expression x = input(cg, Dim({31, 16, 1}, 2), maxpooling2d_batch_vals);
Expression kernel = parameter(cg, param_kernel);
std::vector<unsigned> ksize = {3, 2};
std::vector<unsigned> stride = {3, 3};
bool is_valid = false;
Expression w = conv2d(x, kernel, stride, is_valid);
is_valid = true;
Expression y = maxpooling2d(w, ksize, stride, is_valid);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
// TODO: These are all unimplemented
// Expression kmh_ngram(const Expression& x, unsigned n);
// Expression pick(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( pick_gradient ) {
unsigned idx = 1;
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression z = pick(x1, idx);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression pick(const Expression& x, unsigned* pv);
BOOST_AUTO_TEST_CASE( pickptr_gradient ) {
unsigned idx = 1;
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression z = pick(x1, &idx);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression pick(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( pick_batch_gradient ) {
std::vector<unsigned> idx = {1, 2};
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression z = sum_batches(pick(x1 + x2, idx));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression pick(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( pick_batch_broadcast_gradient ) {
std::vector<unsigned> idx = {1, 2};
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_square1);
Expression z = sum_batches(squared_norm(pick(x1, idx, 0)));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression pick_batch_elem(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( pick_batch_elem_gradient ) {
unsigned idx = 0;
dynet::ComputationGraph cg;
Expression x1 = input(cg, Dim({ 3 }, 2), batch_vals);
Expression z = sum_rows(pick_batch_elem(x1, idx));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression pick_batch_elems(const Expression& x, cosnt std::vector<unsigned> & v);
BOOST_AUTO_TEST_CASE( pick_batch_elems_gradient ) {
dynet::ComputationGraph cg;
std::vector<unsigned> indices = { 0, 1 };
Expression x1 = input(cg, Dim({ 3 }, 2), batch_vals);
Expression picked_x1 = pick_batch_elems(x1, indices);
Expression z = sum({
sum_rows(pick_batch_elem(picked_x1, (unsigned) 0)),
sum_rows(pick_batch_elem(picked_x1, (unsigned) 1))
});
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression pick_range(const Expression& x, unsigned v, unsigned u);
BOOST_AUTO_TEST_CASE( pick_range_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression y = pick_range(x1, 0, 2);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression pick_range(const Expression& x, unsigned v, unsigned u);
BOOST_AUTO_TEST_CASE( pick_range_dim_gradient ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_square1);
Expression y = pick_range(x1, 0, 2, 1);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression select_rows(const Expression& x, vector<unsigned>& rows);
BOOST_AUTO_TEST_CASE( select_rows_gradient ) {
dynet::ComputationGraph cg;
vector<unsigned> rows = {1};
Expression x1 = parameter(cg, param_square1);
Expression y = select_rows(x1, rows);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression select_rows(const Expression& x, vector<unsigned>& rows);
BOOST_AUTO_TEST_CASE( select_rows_multiple_gradient ) {
dynet::ComputationGraph cg;
vector<unsigned> rows = {0, 2};
Expression x1 = parameter(cg, param_square1);
Expression y = select_rows(x1, rows) * x1;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression select_rows(const Expression& x, vector<unsigned>& rows);
BOOST_AUTO_TEST_CASE( select_rows_oob ) {
dynet::ComputationGraph cg;
vector<unsigned> rows = {3};
Expression x1 = parameter(cg, param_square1);
Expression y = select_rows(x1, rows);
BOOST_CHECK_THROW(y.value(), std::invalid_argument);
}
// Expression select_rows(const Expression& x, vector<unsigned>& rows);
BOOST_AUTO_TEST_CASE( select_rows_autobatch_gradient ) {
auto autobatch_cache = dynet::autobatch_flag;
dynet::autobatch_flag = 1;
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param_square1);
vector<Expression> vals;
{
vector<unsigned> rows = {0, 2};
Expression y = select_rows(x1, rows) * x1;
vals.push_back(to_scalar(y));
}
{
vector<unsigned> rows = {2, 1};
Expression y = select_rows(x1, rows) * x1;
vals.push_back(to_scalar(y));
}
{
vector<unsigned> rows = {0};
Expression y = select_rows(x1, rows) * x1;
vals.push_back(to_scalar(y));
}
Expression z = sum(vals);
BOOST_CHECK(check_grad(mod, z, 0));
dynet::autobatch_flag = autobatch_cache;
}
// Expression select_cols(const Expression& x, vector<unsigned>& rows);
BOOST_AUTO_TEST_CASE( select_cols_gradient ) {
dynet::ComputationGraph cg;
vector<unsigned> cols = {1};
Expression x1 = parameter(cg, param_square1);
Expression y = select_cols(x1, cols);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression select_cols(const Expression& x, vector<unsigned>& cols);
BOOST_AUTO_TEST_CASE( select_cols_multiple_gradient ) {
dynet::ComputationGraph cg;
vector<unsigned> cols = {0, 2};
Expression x1 = parameter(cg, param_square1);
Expression y = x1 * select_cols(x1, cols);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression select_cols(const Expression& x, vector<unsigned>& rows);
BOOST_AUTO_TEST_CASE( select_cols_oob ) {
dynet::ComputationGraph cg;
vector<unsigned> cols = {3};
Expression x1 = parameter(cg, param_square1);
Expression y = select_cols(x1, cols);
BOOST_CHECK_THROW(y.value(), std::invalid_argument);
}
// Expression pickneglogsoftmax(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( pickneglogsoftmax_gradient ) {
unsigned idx = 1;
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression z = pickneglogsoftmax(x1, idx);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression pickneglogsoftmax(const Expression& x, unsigned v);
BOOST_AUTO_TEST_CASE( pickneglogsoftmax_batch_gradient ) {
std::vector<unsigned> idx = {1, 2};
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = input(cg, Dim({3}, 2), batch_vals);
Expression z = sum_batches(pickneglogsoftmax(x1 + x2, idx));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression strided_select(const Expression& x, vector<unsigned>& indices);
BOOST_AUTO_TEST_CASE( strided_select_gradient_noop ) {
dynet::ComputationGraph cg;
const vector<int> strides = {};
Expression x1 = parameter(cg, param_square1);
Expression y = strided_select(x1, strides);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
BOOST_CHECK(x1.dim().size() == y.dim().size());
}
// Expression strided_select(const Expression& x, vector<unsigned>& indices);
BOOST_AUTO_TEST_CASE( strided_select_gradient ) {
dynet::ComputationGraph cg;
for(int stride=1;stride<4;stride++){
const vector<int> strides = {stride,stride,stride};
Expression x1 = parameter(cg, param_cube1);
Expression y3 = strided_select(x1, strides);
Expression z = to_scalar(y3);
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression strided_select(const Expression& x, vector<unsigned>& indices);
BOOST_AUTO_TEST_CASE( strided_select_gradient2 ) {
dynet::ComputationGraph cg;
for(int from=0;from<2;from++){
for(int to=from+1;to<4;to++){
for(int stride=1;stride<4;stride++){
const vector<int> strides = {stride,stride,stride};
const vector<int> to_range = {to,to,to};
const vector<int> from_range = {from,from,from};
Expression x1 = parameter(cg, param_cube1);
Expression y = strided_select(x1, strides, from_range, to_range);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
}
}
}
// Expression strided_select(const Expression& x, vector<unsigned>& indices);
BOOST_AUTO_TEST_CASE( strided_select_gradient3 ) {
dynet::ComputationGraph cg;
for(int from=0;from<2;from++){
for(int stride=1;stride<4;stride++){
const vector<int> strides = {stride,stride,stride};
const vector<int> from_range = {from,from,from};
Expression x1 = parameter(cg, param_cube1);
Expression y2 = strided_select(x1, strides, from_range);
Expression z = to_scalar(y2);
BOOST_CHECK(check_grad(mod, z, 0));
}
}
}
// Expression strided_select(const Expression& x, vector<unsigned>& indices);
BOOST_AUTO_TEST_CASE( strided_select_gradient4 ) {
dynet::ComputationGraph cg;
for(int from=0;from<2;from++){
for(int to=from+1;to<4;to++){
for(int stride=1;stride<4;stride++){
const vector<int> strides = {stride,1,stride,stride};
const vector<int> from_range = {from,0,from,from};
const vector<int> to_range = {to,1,to,to};
Expression x1 = reshape(parameter(cg, param_cube1), Dim({3,1,3},3));
Expression y = strided_select(x1, strides, from_range, to_range);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
}
}
}
// Expression strided_select(const Expression& x, vector<unsigned>& indices);
BOOST_AUTO_TEST_CASE( strided_select_gradient5 ) {
dynet::ComputationGraph cg;
for(int from=0;from<2;from++){
for(int to=from+1;to<4;to++){
for(int stride=1;stride<4;stride++){
const vector<int> strides = {stride,stride};
const vector<int> from_range = {from,from};
const vector<int> to_range = {to,to};
Expression x1 = reshape(parameter(cg, param_cube1), Dim({3,3,3,1},1));
Expression y = strided_select(x1, strides, from_range, to_range);
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
}
}
}
// Expression sum_elems(x);
BOOST_AUTO_TEST_CASE( sum_elems_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param4);
Expression z = sum_elems(x);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression mean_elems(x);
BOOST_AUTO_TEST_CASE( mean_elems_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param4);
Expression z = mean_elems(x);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression moment_elems(x, r);
BOOST_AUTO_TEST_CASE( moment_elems_gradient ) {
for (unsigned r = 2; r < 5; r++) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param4);
Expression z = moment_elems(x, r);
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression std_elems(x);
BOOST_AUTO_TEST_CASE( std_elems_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param4);
Expression z = std_elems(x);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sum_batches(x);
BOOST_AUTO_TEST_CASE( sum_batches_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param4);
Expression y = reshape(x, Dim({1}, 6));
Expression z = sum_batches(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression mean_batches(x);
BOOST_AUTO_TEST_CASE( mean_batches_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param4);
Expression y = reshape(x, Dim({1}, 6));
Expression z = mean_batches(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression mean_batches(x);
BOOST_AUTO_TEST_CASE( mean_batches_gradient_multidim ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param4);
Expression y = reshape(x, Dim({1, 2}, 3));
Expression z = mean_batches(y);
z = mean_dim(z, {1});
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression moment_batches(x, r);
BOOST_AUTO_TEST_CASE( moment_batches_gradient ) {
for (unsigned r = 2; r < 5; r++) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param4);
Expression y = reshape(x, Dim({1}, 6));
Expression z = moment_batches(y, r);
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression sum_dim(x, r);
BOOST_AUTO_TEST_CASE( sum_dim_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param_cube1);
Expression z = x;
for (unsigned d = 3; d > 0; d--)
z = sum_dim(z, vector<unsigned>({d - 1}), false);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression std_batches(x);
BOOST_AUTO_TEST_CASE( std_batches_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param4);
Expression y = reshape(x, Dim({1}, 6));
Expression z = std_batches(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression mean_dim(x);
BOOST_AUTO_TEST_CASE( mean_dim_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param_cube1);
Expression z = x;
for (unsigned d = 3; d > 0; d--)
z = mean_dim(z, {d - 1});
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression moment_dim(x, r);
BOOST_AUTO_TEST_CASE( moment_dim_gradient ) {
for (unsigned r = 2; r < 5; r++) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param_cube1);
Expression z = x;
for (unsigned d = 3; d > 0; d--)
z = moment_dim(z, vector<unsigned>({d - 1}), r, false);
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression moment_dim(x, r);
BOOST_AUTO_TEST_CASE( moment_dim_gradient2 ) {
for (unsigned r = 2; r < 5; r++){
dynet::ComputationGraph cg;
Expression z = dynet::reshape(parameter(cg, param_cube2), Dim({3,3,3}, 2)) / 10;
for (unsigned d = 3; d > 0; d--)
z = moment_dim(z, vector<unsigned>({d - 1}), r, false);
z = moment_dim(z, vector<unsigned>({}), r, true);
BOOST_CHECK(check_grad(mod, z, 0));
}
}
// Expression moment_dim(x, r);
BOOST_AUTO_TEST_CASE( moment_dim_gradient3 ) {
for (unsigned r=1;r<5;r++){
dynet::ComputationGraph cg;
Expression x = dynet::reshape(parameter(cg, param_cube2), Dim({27}, 2))/10;
Expression y = moment_dim(x, vector<unsigned>({0}), r, true);
Expression z = moment_dim(x, vector<unsigned>({0}), r, false);
z = moment_dim(z, vector<unsigned>({}), r, true);
BOOST_CHECK(check_grad(mod, y, 0));
BOOST_CHECK(check_grad(mod, z, 0));
if(r==1) BOOST_CHECK_CLOSE(as_scalar(y.value()), as_scalar(z.value()), 0.001);
}
}
// Expression moment_dim(x, r);
BOOST_AUTO_TEST_CASE( moment_dim_gradient4 ) {
for (unsigned r=1;r<5;r++){
dynet::ComputationGraph cg;
Expression x = dynet::reshape(parameter(cg, param_cube2), Dim({3,9}, 2)) / 10;
Expression y = moment_dim(x, vector<unsigned>({0,1}), r, true);
Expression z = moment_dim(x, vector<unsigned>({0,1}), r, false);
z = moment_dim(z, vector<unsigned>({}), r, true);
BOOST_CHECK(check_grad(mod, y, 0));
BOOST_CHECK(check_grad(mod, z, 0));
if(r==1) BOOST_CHECK_CLOSE(as_scalar(y.value()), as_scalar(z.value()), 0.001);
}
}
// Expression std_dim(x);
BOOST_AUTO_TEST_CASE( std_dim_gradient3 ) {
dynet::ComputationGraph cg;
Expression x = dynet::reshape(parameter(cg, param_cube2), Dim({27}, 2))/10;
Expression y = std_dim(x, vector<unsigned>({0}), true);
Expression z = std_dim(x, vector<unsigned>({0}), false);
z = std_dim(z, vector<unsigned>({}), true);
BOOST_CHECK(check_grad(mod, y, 0));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression std_dim(x);
BOOST_AUTO_TEST_CASE( std_dim_gradient4 ) {
dynet::ComputationGraph cg;
Expression x = dynet::reshape(parameter(cg, param_cube2), Dim({3,9}, 2))/10;
Expression y = std_dim(x, vector<unsigned>({0,1}), true);
Expression z = std_dim(x, vector<unsigned>({0,1}), false);
z = std_dim(z, vector<unsigned>({}), true);
BOOST_CHECK(check_grad(mod, y, 0));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression std_dim(x);
BOOST_AUTO_TEST_CASE( std_dim_value ) {
dynet::ComputationGraph cg;
Expression x = dynet::reshape(parameter(cg, param_cube1), Dim({3,3}, 3));
Expression y = std_dim(x, vector<unsigned>({0}), true);
Expression z = mean_dim(y, vector<unsigned>({0}), false);
BOOST_CHECK_CLOSE(as_scalar(z.value()), 0.128319368, 0.1);
}
// Expression mean_dim(x);
BOOST_AUTO_TEST_CASE( std_dim_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param_cube1);
Expression z = x;
for (unsigned d = 3; d > 0; d--)
z = std_dim(z, {d - 1});
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression layer_norm(x,g,b);
BOOST_AUTO_TEST_CASE( layer_norm_backward_gradient ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param1);
Expression g = parameter(cg, param2);
Expression b = parameter(cg, param3);
Expression y = layer_norm(x, g, b);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression layer_norm(x,g,b);
BOOST_AUTO_TEST_CASE( layer_norm_forward ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, param1);
Expression g = input(cg, Dim({3}), ones3_vals);
Expression b = zeroes(cg, Dim({3}));
Expression y = layer_norm(x, g, b);
float mu = abs(as_scalar((sum_elems(y) / 3.0).value()));
float std = as_scalar(sqrt(sum_elems(square(y)) / 3.0).value());
BOOST_CHECK_LT(mu, 1e-6);
BOOST_CHECK_CLOSE(std, 1, 0.01);
}
// Expression weight_norm(x,g);
BOOST_AUTO_TEST_CASE( weight_norm_forward ) {
dynet::ComputationGraph cg;
Expression w = parameter(cg, param_square1);
Expression g = parameter(cg, param_scalar1);
Expression y = weight_norm(w, g);
float norm = as_scalar(sqrt(sum_elems(square(y))).value());
BOOST_CHECK_CLOSE(norm, 2.2, 0.01);
}
// Expression layer_norm(x,g);
BOOST_AUTO_TEST_CASE( weight_norm_backward_gradient ) {
dynet::ComputationGraph cg;
Expression w = parameter(cg, param_square1);
Expression g = parameter(cg, param_scalar1);
Expression y = weight_norm(w, g);
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression sparse_input(vector<unsigned int>& ids, vector<float>& src, float def);
BOOST_AUTO_TEST_CASE( sparse_input_test ) {
dynet::ComputationGraph cg;
std::vector<unsigned int> ids = {0, 4};
Expression z = input(cg, Dim({3}, 2), ids, ones2_vals, 0.5);
std::vector<float> exp = {1.0f, 0.5f, 0.5f, 0.5f, 1.0f, 0.5f};
std::vector<float> act = as_vector(cg.forward(z));
assert(exp.size() == act.size());
for (size_t i = 0; i < exp.size(); ++i)
BOOST_CHECK_CLOSE(exp[i], act[i], 0.001);
}
// Expression one_hot(ComputationGraph& g, unsigned int d, unsigned int idx, Device *device = dynet::default_device);
BOOST_AUTO_TEST_CASE( one_hot_test ) {
dynet::ComputationGraph cg;
unsigned int idx = 5;
unsigned int d = 10;
Expression z = one_hot(cg, d, idx);
std::vector<float> values = as_vector(cg.forward(z));
BOOST_CHECK_EQUAL(d, values.size());
for (size_t i = 0; i < d; ++i)
BOOST_CHECK_EQUAL(values[i], i == idx ? 1.0 : 0.0);
}
// Expression one_hot(ComputationGraph& g, unsigned int d, unsigned int batch_size, const std::vector<unsigned int>& ids, Device *device = dynet::default_device);
BOOST_AUTO_TEST_CASE( batched_one_hot_test ) {
dynet::ComputationGraph cg;
vector<unsigned int> idxs = {1, 6};
unsigned int d = 10;
unsigned int batch_size = idxs.size();
Expression z = one_hot(cg, d, idxs);
std::vector<float> values = as_vector(cg.forward(z));
BOOST_CHECK_EQUAL(d * batch_size, values.size());
for (size_t b = 0; b < batch_size; ++b)
for (size_t i = 0; i < d; ++i)
BOOST_CHECK_EQUAL(values[b * d + i], (b * d + i == 1 || b * d + i == 16 ? 1.0 : 0.0));
}
// Expression lookup();
BOOST_AUTO_TEST_CASE( lookup_test ) {
dynet::ComputationGraph cg;
Expression x1 = lookup(cg, lookup1, (unsigned)0);
Expression x2 = lookup(cg, lookup1, (unsigned)2);
Expression y = x1 + x2;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression lookup();
BOOST_AUTO_TEST_CASE( lookup_highdim_batched_test ) {
dynet::ComputationGraph cg;
Expression x = lookup(cg, lookup4, {0, 2});
Expression z = sum_batches(to_scalar(x));
BOOST_CHECK(check_grad(mod, z, 0));
}
// Expression lookup();
BOOST_AUTO_TEST_CASE( lookup_autobatch_dim_test ) {
auto autobatch_cache = dynet::autobatch_flag;
dynet::autobatch_flag = 1;
dynet::ComputationGraph cg;
Expression x1 = lookup(cg, lookup1, (unsigned)0);
Expression x2 = lookup(cg, lookup2, (unsigned)5);
Expression y = x1 + x2;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
dynet::autobatch_flag = autobatch_cache;
}
// Expression lookup();
BOOST_AUTO_TEST_CASE( lookup_autobatch_diffmodel_test ) {
auto autobatch_cache = dynet::autobatch_flag;
dynet::autobatch_flag = 1;
dynet::ComputationGraph cg;
Expression x1 = lookup(cg, lookup1, (unsigned)0);
Expression x2 = lookup(cg, lookup3, (unsigned)5);
Expression y = x1 + x2;
Expression z = to_scalar(y);
BOOST_CHECK(check_grad(mod, z, 0));
dynet::autobatch_flag = autobatch_cache;
}
// Expression lookup();
BOOST_AUTO_TEST_CASE( lookup_autobatch_and_manbatch_test ) {
auto autobatch_cache = dynet::autobatch_flag;
for (dynet::autobatch_flag = 0; dynet::autobatch_flag < 2; ++dynet::autobatch_flag) {
dynet::ComputationGraph cg;
Expression x1 = lookup(cg, lookup1, {0, 1});
Expression x2 = lookup(cg, lookup1, {2, 0});
Expression y = x1 + x2;
Expression z = sum_batches(to_scalar(y));
BOOST_CHECK(check_grad(mod, z, 0));
}
dynet::autobatch_flag = autobatch_cache;
}
// Expression parameter() with lookup parameter input;
BOOST_AUTO_TEST_CASE( lookup_matrix_test ) {
dynet::ComputationGraph cg;
Expression x = parameter(cg, lookup1);
Expression z = to_scalar(x);
BOOST_CHECK(check_grad(mod, z, 0));
}
BOOST_AUTO_TEST_CASE( backward_test ) {
dynet::ComputationGraph cg;
Expression x1 = lookup(cg, lookup1, (unsigned)0);
Expression x2 = lookup(cg, lookup1, (unsigned)2);
Expression y = x1 + x2;
Expression z = to_scalar(y);
cg.backward(z);
}
BOOST_AUTO_TEST_CASE( gradient_value_test ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression l = dot_product(x1, x2);
cg.backward(l);
vector<float> x1_g1 = as_vector(x1.gradient());
vector<float> x1_g2 = as_vector(param1.get_storage().g);
for (unsigned i = 0; i < 3; i++) {
BOOST_CHECK_CLOSE(x1_g1[i], x1_g2[i], 0.001);
}
}
BOOST_AUTO_TEST_CASE( gradient_sanity_test ) {
dynet::ComputationGraph cg;
Expression x1 = parameter(cg, param1);
Expression x2 = parameter(cg, param2);
Expression l = dot_product(x1, x2);
cg.forward(l);
BOOST_CHECK_THROW(x1.gradient() , std::runtime_error);
}
// This just makes sure that nothing crashes
BOOST_AUTO_TEST_CASE( random_gumbel_test ) {
dynet::ComputationGraph cg;
Expression x1 = random_gumbel(cg, {20});
x1.value();
}
BOOST_AUTO_TEST_CASE( sanity_test ) {
Expression x;
{
dynet::ComputationGraph cg;
x = input(cg, {3}, ones3_vals);
}
BOOST_CHECK_THROW(x.value() , std::runtime_error);
}
BOOST_AUTO_TEST_SUITE_END()<|fim▁end|> | BOOST_AUTO_TEST_CASE( cmult_broadcast3_gradient ) {
Dim dim_permutations[] = {Dim({3,3,3},1), Dim({3,3,1},3), Dim({1,3,3},3), Dim({9,3,1},1), Dim({1,3,9},1), Dim({1,3,1},9)}; |
<|file_name|>sanitise_text.py<|end_file_name|><|fim▁begin|>import unicodedata
class SanitiseText:
ALLOWED_CHARACTERS = set()
REPLACEMENT_CHARACTERS = {
'–': '-', # EN DASH (U+2013)
'—': '-', # EM DASH (U+2014)
'…': '...', # HORIZONTAL ELLIPSIS (U+2026)
'‘': '\'', # LEFT SINGLE QUOTATION MARK (U+2018)
'’': '\'', # RIGHT SINGLE QUOTATION MARK (U+2019)
'“': '"', # LEFT DOUBLE QUOTATION MARK (U+201C)
'”': '"', # RIGHT DOUBLE QUOTATION MARK (U+201D)
'\u180E': '', # Mongolian vowel separator
'\u200B': '', # zero width space
'\u200C': '', # zero width non-joiner
'\u200D': '', # zero width joiner
'\u2060': '', # word joiner
'\uFEFF': '', # zero width non-breaking space
'\u00A0': ' ', # NON BREAKING WHITE SPACE (U+200B)
'\t': ' ', # TAB
}
@classmethod
def encode(cls, content):
return ''.join(cls.encode_char(char) for char in content)
@classmethod
def get_non_compatible_characters(cls, content):
"""
Given an input string, return a set of non compatible characters.
This follows the same rules as `cls.encode`, but returns just the characters that encode would replace with `?`
"""
return set(c for c in content if c not in cls.ALLOWED_CHARACTERS and cls.downgrade_character(c) is None)
@staticmethod
def get_unicode_char_from_codepoint(codepoint):
"""
Given a unicode codepoint (eg 002E for '.', 0061 for 'a', etc), return that actual unicode character.
unicodedata.decomposition returns strings containing codepoints, so we need to eval them ourselves
"""
# lets just make sure we aren't evaling anything weird
if not set(codepoint) <= set('0123456789ABCDEF') or not len(codepoint) == 4:
raise ValueError('{} is not a valid unicode codepoint'.format(codepoint))
return eval('"\\u{}"'.format(codepoint))
@classmethod
def downgrade_character(cls, c):
"""
Attempt to downgrade a non-compatible character to the allowed character set. May downgrade to multiple
characters, eg `… -> ...`
Will return None if character is either already valid or has no known downgrade
"""
decomposed = unicodedata.decomposition(c)
if decomposed != '' and '<' not in decomposed:
# decomposition lists the unicode code points a character is made up of, if it's made up of multiple
# points. For example the á character returns '0061 0301', as in, the character a, followed by a combining<|fim▁hole|> # acute accent. The decomposition might, however, also contain a decomposition mapping in angle brackets.
# For a full list of the types, see here: https://www.compart.com/en/unicode/decomposition.
# If it's got a mapping, we're not sure how best to downgrade it, so just see if it's in the
# REPLACEMENT_CHARACTERS map. If not, then it's probably a letter with a modifier, eg á
# ASSUMPTION: The first character of a combined unicode character (eg 'á' == '0061 0301')
# will be the ascii char
return cls.get_unicode_char_from_codepoint(decomposed.split()[0])
else:
# try and find a mapping (eg en dash -> hyphen ('–': '-')), else return None
return cls.REPLACEMENT_CHARACTERS.get(c)
@classmethod
def encode_char(cls, c):
"""
Given a single unicode character, return a compatible character from the allowed set.
"""
# char is a good character already - return that native character.
if c in cls.ALLOWED_CHARACTERS:
return c
else:
c = cls.downgrade_character(c)
return c if c is not None else '?'
class SanitiseSMS(SanitiseText):
"""
Given an input string, makes it GSM and Welsh character compatible. This involves removing all non-gsm characters by
applying the following rules
* characters within the GSM character set (https://en.wikipedia.org/wiki/GSM_03.38)
and extension character set are kept
* Welsh characters not included in the default GSM character set are kept
* characters with sensible downgrades are replaced in place
* characters with diacritics (accents, umlauts, cedillas etc) are replaced with their base character, eg é -> e
* en dash and em dash (– and —) are replaced with hyphen (-)
* left/right quotation marks (‘, ’, “, ”) are replaced with ' and "
* zero width spaces (sometimes used to stop eg "gov.uk" linkifying) are removed
* tabs are replaced with a single space
* any remaining unicode characters (eg chinese/cyrillic/glyphs/emoji) are replaced with ?
"""
WELSH_DIACRITICS = set(
'àèìòùẁỳ' 'ÀÈÌÒÙẀỲ' # grave
'áéíóúẃý' 'ÁÉÍÓÚẂÝ' # acute
'äëïöüẅÿ' 'ÄËÏÖÜẄŸ' # diaeresis
'âêîôûŵŷ' 'ÂÊÎÔÛŴŶ' # carets
)
EXTENDED_GSM_CHARACTERS = set('^{}\\[~]|€')
GSM_CHARACTERS = set(
'@£$¥èéùìòÇ\nØø\rÅåΔ_ΦΓΛΩΠΨΣΘΞ\x1bÆæßÉ !"#¤%&\'()*+,-./0123456789:;<=>?' +
'¡ABCDEFGHIJKLMNOPQRSTUVWXYZÄÖÑܧ¿abcdefghijklmnopqrstuvwxyzäöñüà'
) | EXTENDED_GSM_CHARACTERS
ALLOWED_CHARACTERS = GSM_CHARACTERS | WELSH_DIACRITICS
# some welsh characters are in GSM and some aren't - we need to distinguish between these for counting fragments
WELSH_NON_GSM_CHARACTERS = WELSH_DIACRITICS - GSM_CHARACTERS
class SanitiseASCII(SanitiseText):
"""
As SMS above, but the allowed characters are printable ascii, from character range 32 to 126 inclusive.
[chr(x) for x in range(32, 127)]
"""
ALLOWED_CHARACTERS = set(
' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ' +
'[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~'
)<|fim▁end|> | |
<|file_name|>compiler.py<|end_file_name|><|fim▁begin|>""" Notices indicate how a regulation has changed since the last version. This
module contains code to compile a regulation from a notice's changes. """
from bisect import bisect
from collections import defaultdict
import copy
import itertools
import logging
from regparser.grammar.tokens import Verb
from regparser.tree.struct import Node, find
from regparser.tree.xml_parser import interpretations
from regparser.tree.xml_parser import tree_utils
from regparser.utils import roman_nums
def get_parent_label(node):
""" Given a node, get the label of it's parent. """
if node.node_type == Node.SUBPART:
return node.label[0]
elif node.node_type == Node.INTERP:
marker_position = node.label.index(Node.INTERP_MARK)
interpreting = node.label[:marker_position]
comment_pars = node.label[marker_position + 1:]
if comment_pars: # 111-3-a-Interp-4-i
return '-'.join(node.label[:-1])
elif len(interpreting) > 1: # 111-3-a-Interp
return '-'.join(interpreting[:-1] + [Node.INTERP_MARK])
else: # 111-Interp
return node.label[0]
else:
parent_label = node.label[:-1]
return '-'.join(parent_label)
def make_label_sortable(label, roman=False):
""" Make labels sortable, but converting them as appropriate.
Also, appendices have labels that look like 30(a), we make those
appropriately sortable. """
if label.isdigit():
return (int(label),)
if roman:
romans = list(itertools.islice(roman_nums(), 0, 50))
return (1 + romans.index(label),)
# segment the label piece into component parts
# e.g. 45Ai33b becomes (45, 'A', 'i', 33, 'b')
INT, UPPER, LOWER = 1, 2, 3
segments, segment, seg_type = [], "", None
for ch in label:
if ch.isdigit():
ch_type = INT
elif ch.isalpha() and ch == ch.upper():
ch_type = UPPER
elif ch.isalpha() and ch == ch.lower():
ch_type = LOWER
else:
# other character, e.g. parens, guarantee segmentation
ch_type = None
if ch_type != seg_type and segment: # new type of character
segments.append(segment)
segment = ""
seg_type = ch_type
if ch_type:
segment += ch
if segment: # ended with something other than a paren
segments.append(segment)
segments = [int(seg) if seg.isdigit() else seg for seg in segments]
return tuple(segments)
def make_root_sortable(label, node_type):
""" Child nodes of the root contain nodes of various types, these
need to be sorted correctly. This returns a tuple to help
sort these first level nodes. """
if node_type == Node.SUBPART or node_type == Node.EMPTYPART:
return (0, label[-1])
elif node_type == Node.APPENDIX:
return (1, label[-1])
elif node_type == Node.INTERP:
return (2,)
def replace_first_sentence(text, replacement):
""" Replace the first sentence in text with replacement. This makes
some incredibly simplifying assumptions - so buyer beware. """
no_periods_replacement = replacement.replace('.', '')
sentences = text.split('.', 1)
if len(sentences) > 1:
sentences[0] = no_periods_replacement
return '.'.join(sentences)
else:
return replacement
def overwrite_marker(origin, new_label):
""" The node passed in has a label, but we're going to give it a
new one (new_label). This is necessary during node moves. """
if origin.node_type == Node.REGTEXT:
marker_list = tree_utils.get_paragraph_markers(origin.text)
if len(marker_list) > 0:
marker = '(%s)' % marker_list[0]
new_marker = '(%s)' % new_label
origin.text = origin.text.replace(marker, new_marker, 1)
elif origin.node_type == Node.INTERP:
marker = interpretations.get_first_interp_marker(origin.text)
marker = marker + '.'
new_marker = new_label + '.'
origin.text = origin.text.replace(marker, new_marker, 1)
return origin
def is_reserved_node(node):
""" Return true if the node is reserved. """
reserved_title = node.title and '[Reserved]' in node.title
reserved_text = node.text and '[Reserved]' in node.text
return (reserved_title or reserved_text)
def is_interp_placeholder(node):
"""Interpretations may have nodes that exist purely to enforce
structure. Knowing if a node is such a placeholder makes it easier to
know if a POST should really just modify the existing placeholder."""
return (Node.INTERP_MARK in node.label
and not node.text and not node.title)
class RegulationTree(object):
""" This encapsulates a regulation tree, and methods to change that tree.
"""
def __init__(self, previous_tree):
self.tree = copy.deepcopy(previous_tree)
self._kept__by_parent = defaultdict(list)
def keep(self, labels):
"""The 'KEEP' verb tells us that a node should not be removed
(generally because it would had we dropped the children of its
parent). "Keeping" those nodes makes sure they do not disappear when
editing their parent"""
for label in labels:
node = self.find_node(label)
parent_label = get_parent_label(node)
self._kept__by_parent[parent_label].append(node)
def get_parent(self, node):
""" Get the parent of a node. Returns None if parent not found. """
parent_label_id = get_parent_label(node)
return find(self.tree, parent_label_id)
def add_to_root(self, node):
""" Add a child to the root of the tree. """
self.tree.children.append(node)
for c in self.tree.children:
c.sortable = make_root_sortable(c.label, c.node_type)
self.tree.children.sort(key=lambda x: x.sortable)
for c in self.tree.children:
del c.sortable
def add_child(self, children, node, order=None):
""" Add a child to the children, and sort appropriately. This is used
for non-root nodes. """
children = children + [node] # non-destructive
if order and set(order) == set(c.label_id() for c in children):
lookup = {}
for c in children:
lookup[c.label_id()] = c
return [lookup[label_id] for label_id in order]
else:
sort_order = []
for c in children:
if c.label[-1] == Node.INTERP_MARK:
sort_order.append((2,) + make_label_sortable(
c.label[-2], roman=(len(c.label) == 6)))
elif Node.INTERP_MARK in c.label:
marker_idx = c.label.index(Node.INTERP_MARK)
comment_pars = c.label[marker_idx + 1:]
sort_order.append((1,) + make_label_sortable(
comment_pars[-1], roman=(len(comment_pars) == 2)))
elif c.node_type == Node.APPENDIX:
sort_order.append(make_label_sortable(c.label[-1], False))
else:
sort_order.append(make_label_sortable(
c.label[-1], roman=(len(c.label) == 5)))
new_el_sort = sort_order[-1]
sort_order = sort_order[:-1]
# Use bisect so the whole list isn't resorted (the original list
# may not be strictly sorted)
insert_idx = bisect(sort_order, new_el_sort)
return children[:insert_idx] + [node] + children[insert_idx:-1]
def delete_from_parent(self, node):
""" Delete node from it's parent, effectively removing it from the
tree. """
parent = self.get_parent(node)
other_children = [c for c in parent.children if c.label != node.label]
parent.children = other_children
def delete(self, label_id):
""" Delete the node with label_id from the tree. """
node = find(self.tree, label_id)
if node is None:
logging.warning("Attempting to delete %s failed", label_id)
else:
self.delete_from_parent(node)
def reserve(self, label_id, node):
""" Reserve either an existing node (by replacing it) or
reserve by adding a new node. When a node is reserved, it's
represented in the FR XML. We simply use that representation here
instead of doing something else. """
existing_node = find(self.tree, label_id)
if existing_node is None:
self.add_node(node)
else:
self.replace_node_and_subtree(node)
def move(self, origin, destination):
""" Move a node from one part in the tree to another. """
origin = find(self.tree, origin)
self.delete_from_parent(origin)
origin = overwrite_marker(origin, destination[-1])
origin.label = destination
self.add_node(origin)
def get_section_parent(self, node):
""" If we're trying to get the parent of an existing section, it
might be part of a subpart. So, let's find the correct subpart. """
subpart = self.get_subpart_for_node(node.label_id())
if subpart is not None:
return subpart
else:
return self.get_parent(node)
def replace_node_and_subtree(self, node):
""" Replace an existing node in the tree with node. """
if len(node.label) == 2 and node.node_type == Node.REGTEXT:
parent = self.get_section_parent(node)
else:
parent = self.get_parent(node)
prev_idx = [idx for idx, c in enumerate(parent.children)
if c.label == node.label]
if prev_idx:
# replace existing element in place
prev_idx = prev_idx[0]
parent.children = (parent.children[:prev_idx] + [node] +
parent.children[prev_idx + 1:])
else:
# actually adding a new element
parent.children = self.add_child(parent.children, node,
getattr(parent, 'child_labels',
[]))
# Finally, we see if this node is the parent of any 'kept' children.
# If so, add them back
label_id = node.label_id()
if label_id in self._kept__by_parent:
for kept in self._kept__by_parent[label_id]:
node.children = self.add_child(node.children, kept,
getattr(node, 'child_labels',
[]))
def create_empty_node(self, node_label):
""" In rare cases, we need to flush out the tree by adding
an empty node. Returns the created node"""
node_label = node_label.split('-')
if Node.INTERP_MARK in node_label:
node_type = Node.INTERP
elif len(node_label) > 1 and not node_label[1].isdigit():
node_type = Node.APPENDIX
else:
node_type = Node.REGTEXT
node = Node(label=node_label, node_type=node_type)
parent = self.get_parent(node)
if not parent:
parent = self.create_empty_node(get_parent_label(node))
parent.children = self.add_child(parent.children, node,
getattr(parent, 'child_labels', []))
return node
def contains(self, label):
"""Is this label already in the tree? label can be a list or a
string"""
return bool(self.find_node(label))
def find_node(self, label):
if isinstance(label, list):
label = '-'.join(label)
return find(self.tree, label)
def add_node(self, node):
""" Add an entirely new node to the regulation tree. """
existing = find(self.tree, node.label_id())
if existing and is_reserved_node(existing):
logging.warning('Replacing reserved node: %s' % node.label_id())
return self.replace_node_and_subtree(node)
elif existing and is_interp_placeholder(existing):
existing.title = node.title
existing.text = node.text
if hasattr(node, 'tagged_text'):
existing.tagged_text = node.tagged_text
# Unfortunately, the same nodes (particularly headers) might be
# added by multiple notices...
elif (existing and existing.text == node.text
and existing.title == node.title
and getattr(existing, 'tagged_text', '') == getattr(
node, 'tagged_text', '')):
pass
else:
if existing:
logging.warning(
'Adding a node that already exists: %s' % node.label_id())
print '%s %s' % (existing.text, node.label)
print '----'
if ((node.node_type == Node.APPENDIX and len(node.label) == 2)
or node.node_type == Node.SUBPART):
return self.add_to_root(node)
else:
parent = self.get_parent(node)
if parent is None:
# This is a corner case, where we're trying to add a child
# to a parent that should exist.
logging.warning('No existing parent for: %s' %
node.label_id())
parent = self.create_empty_node(get_parent_label(node))
# Fix the case where the node with label "<PART>-Subpart" is
# the correct parent.
if (parent.children
and parent.children[0].node_type == Node.EMPTYPART):
parent = parent.children[0]
parent.children = self.add_child(
parent.children, node, getattr(parent, 'child_labels',
[]))
def add_section(self, node, subpart_label):
""" Add a new section to a subpart. """
subpart = find(self.tree, '-'.join(subpart_label))
subpart.children = self.add_child(subpart.children, node)
def replace_node_text(self, label, change):
""" Replace just a node's text. """
node = find(self.tree, label)
node.text = change['node']['text']
def replace_node_title(self, label, change):
""" Replace just a node's title. """
node = find(self.tree, label)
node.title = change['node']['title']
def replace_node_heading(self, label, change):
""" A node's heading is it's keyterm. We handle this here, but not
well, I think. """
node = find(self.tree, label)
node.text = replace_first_sentence(node.text, change['node']['text'])
if hasattr(node, 'tagged_text') and 'tagged_text' in change['node']:
node.tagged_text = replace_first_sentence(
node.tagged_text, change['node']['tagged_text'])
def get_subparts(self):
""" Get all the subparts and empty parts in the tree. """
def subpart_type(c):
""" Return True if a subpart or an empty part. """
return c.node_type in (Node.EMPTYPART, Node.SUBPART)
return [c for c in self.tree.children if subpart_type(c)]
def create_new_subpart(self, subpart_label):
""" Create a whole new subpart. """
# XXX Subparts need titles. We'll need to pull this up from parsing.
subpart_node = Node('', [], subpart_label, None, Node.SUBPART)
self.add_to_root(subpart_node)
return subpart_node
def get_subpart_for_node(self, label_id):
""" Return the subpart a node resides in. Note that this can't be
determined by simply looking at a node's label. """
subparts = self.get_subparts()
subparts_with_label = [s for s in subparts
if find(s, label_id) is not None]
if len(subparts_with_label) > 0:
return subparts_with_label[0]
def move_to_subpart(self, label, subpart_label):
""" Move an existing node to another subpart. If the new subpart
doesn't exist, create it. """
destination = find(self.tree, '-'.join(subpart_label))
if destination is None:
destination = self.create_new_subpart(subpart_label)
subpart_with_node = self.get_subpart_for_node(label)
if destination and subpart_with_node:
node = find(subpart_with_node, label)
other_children = [c for c in subpart_with_node.children
if c.label_id() != label]
subpart_with_node.children = other_children
destination.children = self.add_child(destination.children, node)
if not subpart_with_node.children:
self.delete('-'.join(subpart_with_node.label))
def dict_to_node(node_dict):
""" Convert a dictionary representation of a node into a Node object if
it contains the minimum required fields. Otherwise, pass it through
unchanged. """
minimum_fields = set(('text', 'label', 'node_type'))
if minimum_fields.issubset(node_dict.keys()):
node = Node(
node_dict['text'], [], node_dict['label'],
node_dict.get('title', None), node_dict['node_type'])
if 'tagged_text' in node_dict:
node.tagged_text = node_dict['tagged_text']
if 'child_labels' in node_dict:
node.child_labels = node_dict['child_labels']
return node
else:
return node_dict
def sort_labels(labels):
""" Deal with higher up elements first. """
sorted_labels = sorted(labels, key=lambda x: len(x))
# The length of a Subpart label doesn't indicate it's level in the tree
subparts = [l for l in sorted_labels if 'Subpart' in l]
non_subparts = [l for l in sorted_labels if 'Subpart' not in l]
return subparts + non_subparts
def replace_node_field(reg, label, change):
""" Call one of the field appropriate methods if we're changing just
a field on a node. """
if change['action'] == 'PUT' and change['field'] == '[text]':
reg.replace_node_text(label, change)
elif change['action'] == 'PUT' and change['field'] == '[title]':
reg.replace_node_title(label, change)
elif change['action'] == 'PUT' and change['field'] == '[heading]':
reg.replace_node_heading(label, change)
def one_change(reg, label, change):
"""Notices are generally composed of many changes; this method handles a
single change to the tree."""
field_list = ['[text]', '[title]', '[heading]']
replace_subtree = 'field' not in change
if change['action'] == 'PUT' and replace_subtree:
node = dict_to_node(change['node'])
reg.replace_node_and_subtree(node)
elif change['action'] == 'PUT' and change['field'] in field_list:
replace_node_field(reg, label, change)
elif change['action'] == 'POST':
node = dict_to_node(change['node'])
if 'subpart' in change and len(node.label) == 2:
reg.add_section(node, change['subpart'])
else:
reg.add_node(node)
elif change['action'] == 'DESIGNATE':
if 'Subpart' in change['destination']:
reg.move_to_subpart(label, change['destination'])
elif change['action'] == 'MOVE':
reg.move(label, change['destination'])
elif change['action'] == 'DELETE':
reg.delete(label)<|fim▁hole|> else:
print "%s: %s" % (change['action'], label)
def _needs_delay(reg, change):
"""Determine whether we should delay processing this change. This will
be used in a second pass when compiling the reg"""
action = change['action']
if action == 'MOVE':
return reg.contains(change['destination'])
if action == 'POST':
existing = reg.find_node(change['node']['label'])
return existing and not is_reserved_node(existing)
return False
def compile_regulation(previous_tree, notice_changes):
""" Given a last full regulation tree, and the set of changes from the
next final notice, construct the next full regulation tree. """
reg = RegulationTree(previous_tree)
labels = sort_labels(notice_changes.keys())
reg_part = previous_tree.label[0]
labels = filter(lambda l: l.split('-')[0] == reg_part, labels)
next_pass = [(label, change)
for label in labels
for change in notice_changes[label]]
pass_len = len(next_pass) + 1
reg.keep(l for l, change in next_pass if change['action'] == Verb.KEEP)
next_pass = [pair for pair in next_pass if pair[1]['action'] != Verb.KEEP]
# Monotonically decreasing length - guarantees we'll end
while pass_len > len(next_pass):
pass_len = len(next_pass)
current_pass, next_pass = next_pass, []
for label, change in current_pass:
if _needs_delay(reg, change):
next_pass.append((label, change))
else:
one_change(reg, label, change)
# Force any remaining changes -- generally means something went wrong
for label, change in next_pass:
logging.warning('Conflicting Change: %s:%s', label, change['action'])
one_change(reg, label, change)
return reg.tree<|fim▁end|> | elif change['action'] == 'RESERVE':
node = dict_to_node(change['node'])
reg.reserve(label, node) |
<|file_name|>removed-syntax-extern-const.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z parse-only<|fim▁hole|> const i: isize;
//~^ ERROR expected one of `fn`, `pub`, `static`, `unsafe`, or `}`, found `const`
}<|fim▁end|> |
extern { |
<|file_name|>no_0064_minimum_path_sum.rs<|end_file_name|><|fim▁begin|>struct Solution;
impl Solution {
pub fn min_path_sum(grid: Vec<Vec<i32>>) -> i32 {
if grid.is_empty() {<|fim▁hole|> return 0;
}
let (m, n) = (grid.len(), grid[0].len());
let mut arr = vec![std::i32::MAX; n];
arr[0] = 0;
for i in 0..m {
for j in 0..n {
// 选择上面(arr[j])和左面(arr[j-1])最小的那个
if j == 0 {
// 左边没有,只有上面的元素
arr[j] = arr[j] + grid[i][j];
} else {
// 有左边和上边
arr[j] = arr[j].min(arr[j - 1]) + grid[i][j];
}
}
}
arr[n - 1]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_min_path_sum() {
let grid = vec![vec![1, 3, 1], vec![1, 5, 1], vec![4, 2, 1]];
assert_eq!(Solution::min_path_sum(grid), 7);
}
}<|fim▁end|> | |
<|file_name|>arc065c.cpp<|end_file_name|><|fim▁begin|>#include <bits/stdc++.h>
using namespace std;
void solve() {
string s; cin >> s;
reverse(s.begin(), s.end());
vector<string> pre = {"dream", "dreamer", "erase", "eraser"};
for (auto& x: pre) {
reverse(x.begin(), x.end());
}
int i = 0, n = s.size();
while (i < n) {
if (s.substr(i, 5) == pre[0] || s.substr(i, 5) == pre[2])
i += 5;
else if (s.substr(i, 6) == pre[3])
i += 6;
else if (s.substr(i, 7) == pre[1])
i += 7;
else {
cout << "NO"; return;
}
}
cout << "YES";
}<|fim▁hole|> ios_base::sync_with_stdio(false);
cin.tie(NULL);
solve();
cout << endl;
}<|fim▁end|> |
int main() { |
<|file_name|>overlay_nested2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import warnings as _warnings
_warnings.resetwarnings()
_warnings.filterwarnings('error')
from tdi import html
template = html.from_string("""
<node tdi="item">
<znode tdi="nested" tdi:overlay="foo">
<ynode tdi="subnested"></ynode>
</znode>
<xnode tdi="a"></xnode>
</node>
""".lstrip()).overlay(html.from_string("""
<anode tdi="grumpf" tdi:overlay="foo">
<bnode tdi:overlay="bar"></bnode>
</anode>
<anode tdi="zonk" tdi:overlay="bar">
<bnode tdi="schnick"></bnode>
</anode>
""".lstrip())).overlay(html.from_string("""
<anode tdi="zonk" tdi:overlay="bar">
<bnode tdi="schnick"></bnode>
</anode>
""".lstrip()))
class Model(object):<|fim▁hole|> def render_schnick(self, node):
node.content = u"something"
model = Model()
template.render(model)<|fim▁end|> | def render_nested(self, node):
node['been'] = u'here'
|
<|file_name|>db_update.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import argparse
import sys
import re
import settings
import psycopg2
import imp
########################################################################################
class UpdSource():
def __init__(self,in_source,last_update):
self.upd_dict=dict()
self.source=in_source
self.src_handler=None
self.last_update=last_update
if self.source=='file':
self._parse_file()
def _parse_file(self):
try:
src=open(settings.repo_file_name,'r')
except IOError as e:
print "ERROR! Cannot open file"
sys.exit(1)
try:
repo=imp.load_source('repo','.',src)
except Exception as e:
print "Error happened: {0}".format(e)
sys.exit(1)
for var in dir(repo):
if re.match('^upd_\d',var):
self.upd_dict[int(var[4:])]=eval('repo.'+str(var))
# lines=[]
# for line in src:
# if re.match('^upd_\d',line):
# lines.append(line.strip())
# elif len(line.strip()) == 0:
# continue
# else:
# lines[-1] += ' '+line.strip()
#
# for upd in lines:
# a=upd.split('=',1)
# if re.match('^upd_\d$',a[0]) and int(a[0][4:]) > self.last_update:
# self.upd_dict[int(a[0][4:])]=a[1][1:-1]
def get_upd_dict(self):
return self.upd_dict
def get_total_updates(self):
return len(self.upd_dict)
class DbState():
def __init__(self,in_db_conn):
self.db_conn=in_db_conn
self.last_applied=None
self.installed=-1
# self.check_installed()
def __del__(self):
if not self.db_conn.closed:
self.db_conn.close()
def get_last_applied(self):
if not self.last_applied:
cur=self.db_conn.cursor()
try:
cur.execute(settings.get_last_applied_stmt)
except Exception as e:
print "Error! Cannot get last applied update! {0}".format(e.pgerror)
return -1
self.last_applied=cur.fetchone()[0]
cur.close()
return self.last_applied
def _check_installed(self):
cur=self.db_conn.cursor()
try:
cur.execute(settings.get_install_check_stmt)
except Exception as e:
print "ERROR! Cannot determine installed! {0}".format(e.pgerror)
return False
self.installed=cur.fetchone()[0]
return True
def install(self):
cur=self.db_conn.cursor()
try:
cur.execute(settings.install_stmt)
except Exception as e:
print "ERROR! Cannot create db_update table!{0}".format(e.pgerror)
return False
else:
self.db_conn.commit()
print "Application successfully installed"
return True
def get_installed(self):
if self.installed == -1:
self._check_installed()
return self.installed
class Apply():
def __init__(self,in_db_conn):
self.db_conn=in_db_conn
self.num_applied=0
def __del__(self):
if not self.db_conn.closed:
self.db_conn.close()
def _apply_one_update(self,number,stmt,dry_run):
cur=self.db_conn.cursor()
try:
cur.mogrify(stmt)
except Exception as e:
print "ERROR! Mistake in update {0}{1}".format(number,e.pgerror)
return False
if dry_run:
print "upd_{0} => {1}".format(number,stmt)<|fim▁hole|> except Exception as e:
print "ERROR! Cannot run update {0}\n{1}".format(number,e.pgerror)
# print "\n"+stmt+"\n"
return False
return True
def iterate_over(self,in_upd_dict,dry_run):
for num,stmt in sorted(us.get_upd_dict().iteritems()):
res=self._apply_one_update(num,stmt,dry_run)
if res and not dry_run:
self.db_conn.commit()
self.num_applied += 1
if args.verbose:
print "Update number {0} applied successfully"
elif not res:
break
def get_num_applied(self):
return self.num_applied
#########################################################################
parser = argparse.ArgumentParser(description='Database changes and updates tracking system')
parser.add_argument('-r',action='store_true',dest='dry_run',default=False,help="Show updates and exit")
parser.add_argument('-i','--install',action='store_true',default=False,help="Install application and go to dry_run mode")
parser.add_argument('-s',nargs=1,choices=['file','SQLite'],default='file',dest='source',
help="Source for updates. SQLite is not supported currently")
parser.add_argument('-v','--verbose',action='store_true',default=False,help="Show additional info on terminal")
parser.add_argument('-l','--last',dest='last_applied',action='store_true',default=False,
help="Show last applied update number and exit")
args=parser.parse_args()
if args.install:
args.dry_run=True
try:
conn=psycopg2.connect(settings.custom_dsn('db_handler_1'))
except Exception as e:
print "ERROR! Cannot connect to database {0}".format(e)
sys.exit(1)
db_st=DbState(conn)
installed=db_st.get_installed()
#last_applied=db_st.get_last_applied()
#if last_applied == -1:
# conn.close()
# sys.exit()
if installed == 0:
install=db_st.install()
if not install:
conn.close()
sys.exit(1)
elif installed == 1:
if args.install:
print "Application already installed"
elif installed == -1:
conn.close()
sys.exit(1)
if args.install:
conn.close()
sys.exit(1)
last_applied=db_st.get_last_applied()
if args.last_applied:
if last_applied == 0:
print "No updates applied"
else:
print "Last applied update: upd_{0}".format(last_applied)
conn.close()
sys.exit()
us=UpdSource(args.source,last_applied)
upd_dict=us.get_upd_dict()
ap=Apply(conn)
ap.iterate_over(upd_dict,args.dry_run)
if not args.dry_run:
print "Applied {0} updates out of {1}".format(ap.get_num_applied(),us.get_total_updates())<|fim▁end|> | else:
try:
cur.execute(stmt)
cur.execute(settings.confirm_stmt,(number,stmt)) |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""course_discovery URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
import os
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import logout
from django.core.urlresolvers import reverse_lazy
from django.views.generic import RedirectView
from course_discovery.apps.core import views as core_views
admin.autodiscover()
# pylint: disable=invalid-name
# Always login via edX OpenID Connect
login = RedirectView.as_view(url=reverse_lazy('social:begin', args=['edx-oidc']), permanent=False, query_string=True)
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include('course_discovery.apps.api.urls', namespace='api')),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^auto_auth/$', core_views.AutoAuth.as_view(), name='auto_auth'),
url(r'^health/$', core_views.health, name='health'),
url(r'^login/$', login, name='login'),
url(r'^logout/$', logout, name='logout'),
url('', include('social.apps.django_app.urls', namespace='social')),
]
<|fim▁hole|><|fim▁end|> | if settings.DEBUG and os.environ.get('ENABLE_DJANGO_TOOLBAR', False): # pragma: no cover
import debug_toolbar # pylint: disable=import-error
urlpatterns.append(url(r'^__debug__/', include(debug_toolbar.urls))) |
<|file_name|>FilterSetUnit.test.tsx<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import React from 'react';
import { render, screen } from 'spec/helpers/testing-library';
import { mockStore } from 'spec/fixtures/mockStore';
import { Provider } from 'react-redux';
import userEvent from '@testing-library/user-event';
import FilterSetUnit, { FilterSetUnitProps } from './FilterSetUnit';
const createProps = () => ({
editMode: true,
setFilterSetName: jest.fn(),<|fim▁hole|> onRebuild: jest.fn(),
});
function openDropdown() {
const dropdownIcon = screen.getByRole('img', { name: 'ellipsis' });
userEvent.click(dropdownIcon);
}
const setup = (props: FilterSetUnitProps) => (
<Provider store={mockStore}>
<FilterSetUnit {...props} />
</Provider>
);
test('should render', () => {
const mockedProps = createProps();
const { container } = render(setup(mockedProps));
expect(container).toBeInTheDocument();
});
test('should render the edit button', () => {
const mockedProps = createProps();
const editModeOffProps = {
...mockedProps,
editMode: false,
};
render(setup(editModeOffProps));
expect(screen.getByRole('button', { name: 'Edit' })).toBeInTheDocument();
});
test('should render the menu', () => {
const mockedProps = createProps();
render(setup(mockedProps));
openDropdown();
expect(screen.getByRole('menu')).toBeInTheDocument();
expect(screen.getAllByRole('menuitem')).toHaveLength(3);
expect(screen.getByText('Edit')).toBeInTheDocument();
expect(screen.getByText('Rebuild')).toBeInTheDocument();
expect(screen.getByText('Delete')).toBeInTheDocument();
});
test('should edit', () => {
const mockedProps = createProps();
render(setup(mockedProps));
openDropdown();
const editBtn = screen.getByText('Edit');
expect(mockedProps.onEdit).not.toHaveBeenCalled();
userEvent.click(editBtn);
expect(mockedProps.onEdit).toHaveBeenCalled();
});
test('should delete', () => {
const mockedProps = createProps();
render(setup(mockedProps));
openDropdown();
const deleteBtn = screen.getByText('Delete');
expect(mockedProps.onDelete).not.toHaveBeenCalled();
userEvent.click(deleteBtn);
expect(mockedProps.onDelete).toHaveBeenCalled();
});
test('should rebuild', () => {
const mockedProps = createProps();
render(setup(mockedProps));
openDropdown();
const rebuildBtn = screen.getByText('Rebuild');
expect(mockedProps.onRebuild).not.toHaveBeenCalled();
userEvent.click(rebuildBtn);
expect(mockedProps.onRebuild).toHaveBeenCalled();
});<|fim▁end|> | onDelete: jest.fn(),
onEdit: jest.fn(), |
<|file_name|>proxy.go<|end_file_name|><|fim▁begin|>package proxy
import (
"context"
"net"
"net/http"
"strings"
"time"
gmux "github.com/gorilla/mux"
v3 "github.com/rancher/rancher/pkg/generated/controllers/management.cattle.io/v3"
managementv3 "github.com/rancher/rancher/pkg/generated/norman/management.cattle.io/v3"
"github.com/rancher/rancher/pkg/settings"
"github.com/rancher/remotedialer"
"github.com/rancher/steve/pkg/auth"
"github.com/rancher/steve/pkg/proxy"
authzv1 "k8s.io/api/authorization/v1"
"k8s.io/apiserver/pkg/authentication/user"
"k8s.io/apiserver/pkg/authorization/authorizer"
"k8s.io/apiserver/pkg/authorization/authorizerfactory"
"k8s.io/apiserver/pkg/endpoints/request"
v1 "k8s.io/client-go/kubernetes/typed/authorization/v1"
"k8s.io/client-go/rest"
)
type Handler struct {
authorizer authorizer.Authorizer
dialerFactory ClusterDialerFactory
clusters v3.ClusterCache
}
type ClusterDialerFactory func(clusterID string) remotedialer.Dialer
func RewriteLocalCluster(next http.Handler) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
if strings.HasPrefix(req.URL.Path, "/k8s/clusters/local") {
req.URL.Path = strings.TrimPrefix(req.URL.Path, "/k8s/clusters/local")
if req.URL.Path == "" {
req.URL.Path = "/"
}
}
next.ServeHTTP(rw, req)
})
}
func NewProxyMiddleware(sar v1.SubjectAccessReviewInterface,
dialerFactory ClusterDialerFactory,
clusters v3.ClusterCache,
localSupport bool,
localCluster http.Handler) (func(http.Handler) http.Handler, error) {
cfg := authorizerfactory.DelegatingAuthorizerConfig{
SubjectAccessReviewClient: sar,
AllowCacheTTL: time.Second * time.Duration(settings.AuthorizationCacheTTLSeconds.GetInt()),
DenyCacheTTL: time.Second * time.Duration(settings.AuthorizationDenyCacheTTLSeconds.GetInt()),
WebhookRetryBackoff: &auth.WebhookBackoff,
}
authorizer, err := cfg.New()
if err != nil {
return nil, err
}
proxyHandler := NewProxyHandler(authorizer, dialerFactory, clusters)
mux := gmux.NewRouter()
mux.UseEncodedPath()
mux.Path("/v1/management.cattle.io.clusters/{clusterID}").Queries("link", "shell").HandlerFunc(routeToShellProxy("link", "shell", localSupport, localCluster, mux, proxyHandler))
mux.Path("/v1/management.cattle.io.clusters/{clusterID}").Queries("action", "apply").HandlerFunc(routeToShellProxy("action", "apply", localSupport, localCluster, mux, proxyHandler))
mux.Path("/v3/clusters/{clusterID}").Queries("shell", "true").HandlerFunc(routeToShellProxy("link", "shell", localSupport, localCluster, mux, proxyHandler))
mux.Path("/{prefix:k8s/clusters/[^/]+}{suffix:/v1.*}").MatcherFunc(proxyHandler.MatchNonLegacy("/k8s/clusters/")).Handler(proxyHandler)
return func(handler http.Handler) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
mux.NotFoundHandler = handler
mux.ServeHTTP(rw, req)
})
}, nil
}
func routeToShellProxy(key, value string, localSupport bool, localCluster http.Handler, mux *gmux.Router, proxyHandler *Handler) func(rw http.ResponseWriter, r *http.Request) {
return func(rw http.ResponseWriter, r *http.Request) {
vars := gmux.Vars(r)
cluster := vars["clusterID"]
if cluster == "local" {
if localSupport {
q := r.URL.Query()
q.Set(key, value)
r.URL.RawQuery = q.Encode()
r.URL.Path = "/v1/management.cattle.io.clusters/local"
localCluster.ServeHTTP(rw, r)
} else {
mux.NotFoundHandler.ServeHTTP(rw, r)
}
return
}
vars["prefix"] = "k8s/clusters/" + cluster
vars["suffix"] = "/v1/management.cattle.io.clusters/local"
q := r.URL.Query()
q.Set(key, value)
r.URL.RawQuery = q.Encode()
r.URL.Path = "/k8s/clusters/" + cluster + "/v1/management.cattle.io.clusters/local"
proxyHandler.ServeHTTP(rw, r)
}
}
func NewProxyHandler(authorizer authorizer.Authorizer,
dialerFactory ClusterDialerFactory,
clusters v3.ClusterCache) *Handler {
return &Handler{
authorizer: authorizer,
dialerFactory: dialerFactory,
clusters: clusters,
}
}
func (h *Handler) MatchNonLegacy(prefix string) gmux.MatcherFunc {
return func(req *http.Request, match *gmux.RouteMatch) bool {
clusterID := strings.TrimPrefix(req.URL.Path, prefix)
clusterID = strings.SplitN(clusterID, "/", 2)[0]
if match.Vars == nil {
match.Vars = map[string]string{}
}
match.Vars["clusterID"] = clusterID
return true
}
}
func (h *Handler) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
user, ok := request.UserFrom(req.Context())
if !ok {
rw.WriteHeader(http.StatusUnauthorized)
return
}
prefix := "/" + gmux.Vars(req)["prefix"]
clusterID := gmux.Vars(req)["clusterID"]
if !h.canAccess(req.Context(), user, clusterID) {
rw.WriteHeader(http.StatusUnauthorized)
return
}
handler, err := h.next(clusterID, prefix)
if err != nil {
rw.WriteHeader(http.StatusInternalServerError)
rw.Write([]byte(err.Error()))
return
}
handler.ServeHTTP(rw, req)
}
func (h *Handler) dialer(ctx context.Context, network, address string) (net.Conn, error) {
host, _, err := net.SplitHostPort(address)
if err != nil {
return nil, err
}
dialer := h.dialerFactory("steve-cluster-" + host)
return dialer(ctx, network, "127.0.0.1:6080")
}
func (h *Handler) next(clusterID, prefix string) (http.Handler, error) {
cfg := &rest.Config{
// this is bogus, the dialer will change it to 127.0.0.1:6080, but the clusterID is used to lookup the tunnel
// connect
Host: "http://" + clusterID,
UserAgent: rest.DefaultKubernetesUserAgent() + " cluster " + clusterID,
Transport: &http.Transport{
DialContext: h.dialer,
},
}
next := proxy.ImpersonatingHandler(prefix, cfg)
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
req.Header.Set("X-API-URL-Prefix", prefix)
next.ServeHTTP(rw, req)
}), nil<|fim▁hole|>}
func (h *Handler) canAccess(ctx context.Context, user user.Info, clusterID string) bool {
extra := map[string]authzv1.ExtraValue{}
for k, v := range user.GetExtra() {
extra[k] = v
}
resp, _, err := h.authorizer.Authorize(ctx, authorizer.AttributesRecord{
ResourceRequest: true,
User: user,
Verb: "get",
APIGroup: managementv3.GroupName,
APIVersion: managementv3.Version,
Resource: "clusters",
Name: clusterID,
})
return err == nil && resp == authorizer.DecisionAllow
}<|fim▁end|> | |
<|file_name|>restore.py<|end_file_name|><|fim▁begin|>import os<|fim▁hole|>from archie import helpers
def find_backup(cfg):
files = []
rcfiles = cfg.options('rcfiles')
for rc in rcfiles:
backup = helpers.get_backupfile(cfg, rc)
rcfile = helpers.get_rcfile(cfg, rc)
if os.path.lexists(backup) and tarfile.is_tarfile(backup):
files.append((backup, rcfile))
return files
def gunzip_and_restore(cfg, backupfiles):
for backup, rc in backupfiles:
if os.path.islink(rc):
os.unlink(rc)
with closing(tarfile.open(backup, 'r:gz')) as tar:
tar.extractall('/')
return backupfiles
def Restore(cfg):
backupfiles = find_backup(cfg)
return gunzip_and_restore(cfg, backupfiles)<|fim▁end|> | import tarfile
from contextlib import closing |
<|file_name|>dynamic.js<|end_file_name|><|fim▁begin|>import React from 'react';
import Sortable from '../../src/';
import DemoItem from '../components/DemoItem';
export default class Dynamic extends React.Component {
constructor() {
super();
this.state = {
arr: [998, 225, 13]
};<|fim▁hole|> arr: sortedArray
});
}
handleAddElement() {
this.setState({
arr: this.state.arr.concat(Math.round(Math.random() * 1000))
});
}
handleRemoveElement(index) {
const newArr = this.state.arr.slice();
newArr.splice(index, 1);
this.setState({
arr: newArr
});
}
render() {
function renderItem(num, index) {
return (
<DemoItem key={num} className="dynamic-item" sortData={num}>
{num}
<span className="delete"
onClick={this.handleRemoveElement.bind(this, index)}
>×</span>
</DemoItem>
);
}
return (
<div className="demo-container">
<h4 className="demo-title">
Dynamically adding/removing children
<a href="https://github.com/jasonslyvia/react-anything-sortable/tree/master/demo/pages/dynamic.js" target="_blank">source</a>
</h4>
<div className="dynamic-demo">
<button onClick={::this.handleAddElement}>Add 1 element</button>
<Sortable onSort={::this.handleSort} dynamic>
{this.state.arr.map(renderItem, this)}
</Sortable>
</div>
</div>
);
}
}<|fim▁end|> | }
handleSort(sortedArray) {
this.setState({ |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3<|fim▁hole|># for more info, see github.com/qguv/loadaverage
from load.loadaverage import main<|fim▁end|> | |
<|file_name|>person-editor.component.ts<|end_file_name|><|fim▁begin|>import {Component, OnInit, Input, OnChanges} from '@angular/core';
import {Validators, FormControl, FormGroup, FormBuilder} from '@angular/forms';
import {Location} from '@angular/common';
import {Message} from 'primeng/components/common/api';
import {ActivatedRoute, Router, ParamMap, UrlSegment} from '@angular/router';
import {PersonsService} from './persons.service';
import {Person} from '../model';
import 'rxjs/add/operator/switchMap';
import {Observable} from 'rxjs/Observable';
const enum Mode {
Edit,
Create
}
@Component({
selector: 'app-person-editor',
templateUrl: './person-editor.component.html'
})
export class PersonEditorComponent implements OnInit, OnChanges {
mode: Mode;
title: string;
@Input() person: Person;
msgs: Message[] = [];
fbgroup: FormGroup;
submitted: boolean;
constructor(private fb: FormBuilder, private route: ActivatedRoute,
private router: Router, private location: Location,
private ps: PersonsService) {}
ngOnInit() {
this.fbgroup = this.fb.group({
'name': new FormControl('', [Validators.required, Validators.minLength(3)]),
'mail': new FormControl('', [Validators.required]),
'age': new FormControl('', [Validators.required, Validators.min(18), Validators.max(99)])
});
this.route.url.subscribe(u => {
console.log(u[u.length - 1].path);
if (u[u.length - 1].path === 'new') {
this.mode = Mode.Create;
this.title = 'Create new person';
} else {
this.mode = Mode.Edit;
this.title = 'Edit person';
this.route.paramMap
.switchMap((params: ParamMap) =>
this.ps.findById(+params.get('id')))
.subscribe((person: Person) => {
this.person = person;
// why doesn't that happen automatically?
this.ngOnChanges();
});
}
});
}
ngOnChanges() {
this.fbgroup.reset({
name: this.person.name,
mail: this.person.email,
age: this.person.age
});
}
onSubmit() {
this.person = this.prepareSave();
let req: Observable<any>;
switch (this.mode) {
case Mode.Edit: {
req = this.ps.update(this.person.id, this.person);
break;
}
case Mode.Create: {
req = this.ps.submit(this.person);
break;
}
}
req.subscribe(p => {<|fim▁hole|> });
// onError
// this.msgs = [];
// this.msgs.push({severity: 'info', summary: 'Success', detail: 'Person saved: ' + p.name});
// this.ngOnChanges();
}
onCancel() {
this.location.back();
}
prepareSave(): Person {
const formModel = this.fbgroup.value;
// return new `Person` object containing a combination of original person value(s)
// and copies of changed form model values
const savePerson: Person = {
id: null,
name: formModel.name as string,
email: formModel.mail as string,
age: formModel.age as number
};
if (this.mode === Mode.Edit) {
savePerson.id = this.person.id;
}
return savePerson;
}
get diagnostic() {return JSON.stringify(this.fbgroup.value); }
}<|fim▁end|> | this.location.back(); |
<|file_name|>test_wizard_param_service.js<|end_file_name|><|fim▁begin|>'use strict';
concertoPanel.service('TestWizardParam', ["$filter",
function ($filter) {
this.getTypeName = function (type) {
switch (parseInt(type)) {
case 0:
return Trans.TEST_WIZARD_PARAM_TYPE_SINGLE_LINE_TEXT;
case 1:
return Trans.TEST_WIZARD_PARAM_TYPE_MULTI_LINE_TEXT;
case 2:
return Trans.TEST_WIZARD_PARAM_TYPE_HTML;
case 3:
return Trans.TEST_WIZARD_PARAM_TYPE_SELECT;
case 4:
return Trans.TEST_WIZARD_PARAM_TYPE_CHECKBOX;
case 5:
return Trans.TEST_WIZARD_PARAM_TYPE_VIEW;
case 6:
return Trans.TEST_WIZARD_PARAM_TYPE_TABLE;
case 7:
return Trans.TEST_WIZARD_PARAM_TYPE_COLUMN;
case 8:
return Trans.TEST_WIZARD_PARAM_TYPE_TEST;
case 9:
return Trans.TEST_WIZARD_PARAM_TYPE_GROUP;
case 10:
return Trans.TEST_WIZARD_PARAM_TYPE_LIST;
case 11:
return Trans.TEST_WIZARD_PARAM_TYPE_R;
case 12:
return Trans.TEST_WIZARD_PARAM_TYPE_COLUMN_MAP;
case 13:
return Trans.TEST_WIZARD_PARAM_TYPE_WIZARD;
}
return type;
};
this.getDefinerTitle = function (param) {
if (!param)
return "";
var info = param.label ? param.label : this.getTypeName(param.type);
switch (parseInt(param.type)) {
case 0:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_SINGLE_LINE.pf(info);
case 1:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_MULTI_LINE.pf(info);
case 2:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_HTML.pf(info);
case 3:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_SELECT.pf(info);
case 4:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_CHECKBOX.pf(info);
case 5:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_TEMPLATE.pf(info);
case 6:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_TABLE.pf(info);
case 8:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_TEST.pf(info);
case 9:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_GROUP.pf(info);
case 10:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_LIST.pf(info);
case 11:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_R_CODE.pf(info);
case 12:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_COLUMN_MAP.pf(info);
case 13:
return Trans.TEST_WIZARD_PARAM_DEFINER_TITLES_WIZARD.pf(info);
}
return "";
};
this.getSetterTitle = function (param) {
if (!param)<|fim▁hole|> case 1:
return Trans.TEST_WIZARD_PARAM_SETTER_TITLES_TEXTAREA.pf(param.label);
case 2:
return Trans.TEST_WIZARD_PARAM_SETTER_TITLES_HTML.pf(param.label);
case 7:
return Trans.TEST_WIZARD_PARAM_SETTER_TITLES_COLUMN.pf(param.label);
case 9:
return Trans.TEST_WIZARD_PARAM_SETTER_TITLES_GROUP.pf(param.label);
case 10:
return Trans.TEST_WIZARD_PARAM_SETTER_TITLES_LIST.pf(param.label);
case 11:
return Trans.TEST_WIZARD_PARAM_SETTER_TITLES_R.pf(param.label);
case 12:
return Trans.TEST_WIZARD_PARAM_SETTER_TITLES_COLUMN_MAP.pf(param.label);
case 13:
return Trans.TEST_WIZARD_PARAM_SETTER_TITLES_WIZARD.pf(param.label);
}
return "";
};
this.getDefinerSummary = function (param) {
if (!param)
return "";
switch (parseInt(param.type)) {
case 3:
if (param.definition == undefined || !param.definition.options)
return "";
var info = param.definition.options.length + " - [";
for (var i = 0; i < param.definition.options.length; i++) {
if (i > 0)
info += ",";
info += param.definition.options[i].label;
}
info += "]";
return Trans.TEST_WIZARD_PARAM_DEFINER_SUMMARIES_SELECT.pf(info);
case 9:
if (param.definition == undefined || !param.definition.fields)
return "";
var info = param.definition.fields.length + " - [";
for (var i = 0; i < param.definition.fields.length; i++) {
if (i > 0)
info += ",";
info += param.definition.fields[i].name;
}
info += "]";
return Trans.TEST_WIZARD_PARAM_DEFINER_SUMMARIES_GROUP.pf(info);
case 10:
if (param.definition == undefined || param.definition.element == undefined)
return "";
var info = this.getTypeName(param.definition.element.type);
return Trans.TEST_WIZARD_PARAM_DEFINER_SUMMARIES_LIST.pf(info);
case 12:
if (param.definition == undefined || !param.definition.cols)
return "";
var info = param.definition.cols.length + " - [";
for (var i = 0; i < param.definition.cols.length; i++) {
if (i > 0)
info += ",";
info += param.definition.cols[i].name;
}
info += "]";
return Trans.TEST_WIZARD_PARAM_DEFINER_SUMMARIES_COLUMN_MAP.pf(info);
}
return "";
};
this.getSetterSummary = function (param, output) {
if (!param || !output)
return "";
switch (parseInt(param.type)) {
case 1:
var summary = output;
if (summary.length > 100) {
summary = summary.substring(0, 97) + "...";
}
return Trans.TEST_WIZARD_PARAM_SETTER_SUMMARIES_TEXTAREA.pf(summary);
case 2:
var summary = output;
if (summary.length > 100) {
summary = summary.substring(0, 97) + "...";
}
return Trans.TEST_WIZARD_PARAM_SETTER_SUMMARIES_HTML.pf(summary);
case 7:
return Trans.TEST_WIZARD_PARAM_SETTER_SUMMARIES_COLUMN.pf(output.table, output.column);
case 9:
return Trans.TEST_WIZARD_PARAM_SETTER_SUMMARIES_GROUP.pf(this.getDefinerSummary(param));
case 10:
return Trans.TEST_WIZARD_PARAM_SETTER_SUMMARIES_LIST.pf(output.length);
case 11:
var summary = output;
if (summary.length > 100) {
summary = summary.substring(0, 97) + "...";
}
return Trans.TEST_WIZARD_PARAM_SETTER_SUMMARIES_R.pf(summary);
case 12:
if (!param.definition.cols)
return "";
var info = param.definition.cols.length + " - [";
for (var i = 0; i < param.definition.cols.length; i++) {
if (i > 0)
info += ",";
var dst = "?";
if (output.columns != null && output.columns != undefined) {
var map = output.columns[param.definition.cols[i].name];
if (map !== null && map != undefined)
dst = map;
}
info += param.definition.cols[i].name + "->" + dst;
}
info += "]";
return Trans.TEST_WIZARD_PARAM_SETTER_SUMMARIES_COLUMN_MAP.pf(info);
}
return "";
};
this.wizardParamsToTestVariables = function (test, steps, vars) {
for (var j = 0; j < steps.length; j++) {
for (var k = 0; k < steps[j].params.length; k++) {
var param = steps[j].params[k];
this.serializeParamValue(param);
var found = false;
for (var i = 0; i < vars.length; i++) {
var variable = vars[i];
if (param.name === variable.name) {
variable.value = param.value;
found = true;
break;
}
}
if (!found) {
vars.push({
id: 0,
name: param.name,
test: test.id,
type: 0,
description: param.description,
value: param.value,
passableThroughUrl: param.passableThroughUrl,
parentVariable: param.testVariable
});
}
}
}
};
this.testVariablesToWizardParams = function (vars, steps) {
for (var i = 0; i < vars.length; i++) {
var variable = vars[i];
for (var j = 0; j < steps.length; j++) {
for (var k = 0; k < steps[j].params.length; k++) {
var param = steps[j].params[k];
if (variable.name === param.name && variable.type == 0) {
param.value = variable.value;
param.exposed = variable.exposed;
this.unserializeParamValue(param);
break;
}
}
}
}
};
this.serializeParamValue = function (param) {
try {
if (param.type == 7 || param.type == 9 || param.type == 10 || param.type == 12 || param.type == 13) {
this.deobjectifyListElements(param, param.output);
param.value = angular.toJson(param.output);
} else
param.value = param.output;
if (param.value === null) {
throw "null param value (" + param.label + ")";
}
} catch (err) {
switch (parseInt(param.type)) {
case 4:
param.value = "0";
break;
case 7:
case 9:
case 12:
case 13:
param.value = "{}";
break;
case 10:
param.value = "[]";
break;
default:
param.value = "";
break;
}
}
};
this.unserializeParamValue = function (param) {
var setDefault = false;
if (param.value === null) {
setDefault = true;
} else {
try {
if (!this.isSimpleType(param.type)) {
param.output = angular.fromJson(param.value);
this.objectifyListElements(param, param.output);
} else {
param.output = param.value;
}
if (!this.validateUnserializedParamOutput(param.output, param.type)) {
throw "invalid unserialized param value (" + param.label + ")";
}
} catch (err) {
setDefault = true;
}
}
if (setDefault) param.output = this.getParamOutputDefault(param);
};
this.getParamOutputDefault = function (param) {
let result = null;
switch (parseInt(param.type)) {
case 0: //single line
case 1: //multi line
case 2: //HTML
case 3: //select
case 5: //view template
case 6: //data table
case 8: //test
case 11: //R
result = param.definition.defvalue;
if (!this.validateUnserializedParamOutput(result, param.type)) result = "";
break;
case 4: //checkbox
result = param.definition.defvalue;
if (!this.validateUnserializedParamOutput(result, param.type)) result = "0";
break;
case 7: //data table column
case 12: //column map
case 13: //test wizard
result = {};
break;
case 9: //group
result = {};
for (let i = 0; i < param.definition.fields.length; i++) {
let field = param.definition.fields[i];
result[field.name] = this.getParamOutputDefault(field);
}
break;
case 10: //list
result = [];
break;
}
return result;
};
this.validateUnserializedParamOutput = function (output, type) {
if (output === null || output === undefined) return false;
switch (parseInt(type)) {
case 0: //single line
case 1: //multi line
case 2: //HTML
case 3: //select
case 4: //checkbox
case 5: //view template
case 6: //data table
case 8: //test
case 11: //R
if (typeof output === 'object') return false;
break;
case 7: //data table column
case 9: //group
case 12: //column map
case 13: //test wizard
if (typeof output !== 'object' || output.constructor === Array) return false;
break;
case 10: //list
if (typeof output !== 'object' || output.constructor !== Array) return false;
break;
}
return true;
};
this.isParamVisible = function (param, parent, grandParent, values) {
try {
if (!param.hideCondition || param.hideCondition === undefined) {
return true;
}
var res = eval(param.hideCondition);
if (res === true) {
return false;
}
} catch (err) {
}
return true;
};
this.objectifyListElements = function (param, output) {
switch (parseInt(param.type)) {
case 9: {
for (let i = 0; i < param.definition.fields.length; i++) {
let field = param.definition.fields[i];
this.objectifyListElements(field, output[field.name]);
}
break;
}
case 10: {
if (this.isSimpleType(param.definition.element.type)) {
for (let i = 0; i < output.length; i++) {
if (typeof output[i] !== 'object') output[i] = {value: output[i]};
}
}
break;
}
}
};
this.deobjectifyListElements = function (param, output) {
switch (parseInt(param.type)) {
case 9: {
for (let i = 0; i < param.definition.fields.length; i++) {
let field = param.definition.fields[i];
this.deobjectifyListElements(field, output[field.name]);
}
break;
}
case 10: {
if (this.isSimpleType(param.definition.element.type)) {
for (let i = 0; i < output.length; i++) {
output[i] = output[i].value;
}
}
break;
}
}
};
this.isSimpleType = function (type) {
let validSimpleTypes = [0, 1, 2, 3, 4, 5, 6, 8, 11];
return validSimpleTypes.indexOf(parseInt(type)) !== -1;
}
}
]);<|fim▁end|> | return "";
switch (parseInt(param.type)) { |
<|file_name|>MenuMapper.java<|end_file_name|><|fim▁begin|>package com.jflyfox.dudu.module.system.dao;
<|fim▁hole|>import com.jflyfox.util.StrUtils;
import org.apache.ibatis.annotations.SelectProvider;
import org.apache.ibatis.jdbc.SQL;
import java.util.List;
/**
* 菜单 数据层
*
* @author flyfox [email protected] on 2017-06-20.
*/
public interface MenuMapper extends BaseMapper<SysMenu> {
@SelectProvider(type = SqlBuilder.class, method = "selectMenuPage")
List<SysMenu> selectMenuPage(Query query);
class SqlBuilder {
public String selectMenuPage(Query query) {
String sqlColumns = "t.id,t.parentid,t.name,t.urlkey,t.url,t.status,t.type,t.sort,t.level,t.enable,t.update_time as updateTime,t.update_id as updateId,t.create_time as createTime,t.create_id as createId";
return new SQL() {{
SELECT(sqlColumns +
" ,p.name as parentName,uu.username as updateName,uc.username as createName");
FROM(" sys_menu t ");
LEFT_OUTER_JOIN(" sys_user uu on t.update_id = uu.id ");
LEFT_OUTER_JOIN(" sys_user uc on t.create_id = uc.id ");
LEFT_OUTER_JOIN(" sys_menu p on t.parentid = p.id ");
if (StrUtils.isNotEmpty(query.getStr("name"))) {
WHERE(" t.name like concat('%',#{name},'%')");
}
if (StrUtils.isNotEmpty(query.getOrderBy())) {
ORDER_BY(query.getOrderBy());
} else {
ORDER_BY(" t.id desc");
}
}}.toString();
}
}
}<|fim▁end|> | import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.jflyfox.dudu.component.model.Query;
import com.jflyfox.dudu.module.system.model.SysMenu; |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from flask import render_template, flash, request, redirect, url_for
from flask_login import login_required
from kernel import agileCalendar
from kernel.DataBoard import Data
from kernel.NM_Aggregates import WorkBacklog, DevBacklog, RiskBacklog
from kconfig import coordinationBookByName
from . import coordination
__author__ = 'Manuel Escriche'
@coordination.route("/")
@coordination.route("/overview")
@login_required
def overview():
return redirect(url_for('coordination.delivery'))
@coordination.route("/success-stories")
@login_required
def success_stories():
cmp = coordinationBookByName['SuccessStories']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/success_stories.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/friendliness")
@login_required
def friendliness():
cmp = coordinationBookByName['Friendliness']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/friendliness.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/qualityassurance")
@login_required
def qualityassurance():
cmp = coordinationBookByName['QualityAssurance']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/quality_assurance.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/issues")
@login_required
def issues():
cmp = coordinationBookByName['Issues']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'<|fim▁hole|> comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/risks")
@login_required
def risks():
cmp = coordinationBookByName['Risks']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/risks.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/delivery")
@login_required
def delivery():
cmp = coordinationBookByName['Deliverables']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/delivery.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/docs")
@login_required
def docs():
cmp = coordinationBookByName['Documentation']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/docs.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/agile")
@login_required
def agile():
cmp = coordinationBookByName['Agile']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/agile.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/scrum-master")
@login_required
def scrumtools():
cmp = coordinationBookByName['SMTools']
backlog = DevBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/scrum_tools.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)<|fim▁end|> |
return render_template('coordination/issues.html', |
<|file_name|>cover.py<|end_file_name|><|fim▁begin|>"""Support for ISY994 covers."""<|fim▁hole|>from pyisy.constants import ISY_VALUE_UNKNOWN
from homeassistant.components.cover import (
ATTR_POSITION,
DOMAIN as COVER,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
CoverEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import (
_LOGGER,
DOMAIN as ISY994_DOMAIN,
ISY994_NODES,
ISY994_PROGRAMS,
UOM_8_BIT_RANGE,
UOM_BARRIER,
)
from .entity import ISYNodeEntity, ISYProgramEntity
from .helpers import migrate_old_unique_ids
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the ISY994 cover platform."""
hass_isy_data = hass.data[ISY994_DOMAIN][entry.entry_id]
entities: list[ISYCoverEntity | ISYCoverProgramEntity] = []
for node in hass_isy_data[ISY994_NODES][COVER]:
entities.append(ISYCoverEntity(node))
for name, status, actions in hass_isy_data[ISY994_PROGRAMS][COVER]:
entities.append(ISYCoverProgramEntity(name, status, actions))
await migrate_old_unique_ids(hass, COVER, entities)
async_add_entities(entities)
class ISYCoverEntity(ISYNodeEntity, CoverEntity):
"""Representation of an ISY994 cover device."""
@property
def current_cover_position(self) -> int | None:
"""Return the current cover position."""
if self._node.status == ISY_VALUE_UNKNOWN:
return None
if self._node.uom == UOM_8_BIT_RANGE:
return round(self._node.status * 100.0 / 255.0)
return int(sorted((0, self._node.status, 100))[1])
@property
def is_closed(self) -> bool | None:
"""Get whether the ISY994 cover device is closed."""
if self._node.status == ISY_VALUE_UNKNOWN:
return None
return bool(self._node.status == 0)
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
async def async_open_cover(self, **kwargs: Any) -> None:
"""Send the open cover command to the ISY994 cover device."""
val = 100 if self._node.uom == UOM_BARRIER else None
if not await self._node.turn_on(val=val):
_LOGGER.error("Unable to open the cover")
async def async_close_cover(self, **kwargs: Any) -> None:
"""Send the close cover command to the ISY994 cover device."""
if not await self._node.turn_off():
_LOGGER.error("Unable to close the cover")
async def async_set_cover_position(self, **kwargs: Any) -> None:
"""Move the cover to a specific position."""
position = kwargs[ATTR_POSITION]
if self._node.uom == UOM_8_BIT_RANGE:
position = round(position * 255.0 / 100.0)
if not await self._node.turn_on(val=position):
_LOGGER.error("Unable to set cover position")
class ISYCoverProgramEntity(ISYProgramEntity, CoverEntity):
"""Representation of an ISY994 cover program."""
@property
def is_closed(self) -> bool:
"""Get whether the ISY994 cover program is closed."""
return bool(self._node.status)
async def async_open_cover(self, **kwargs: Any) -> None:
"""Send the open cover command to the ISY994 cover program."""
if not await self._actions.run_then():
_LOGGER.error("Unable to open the cover")
async def async_close_cover(self, **kwargs: Any) -> None:
"""Send the close cover command to the ISY994 cover program."""
if not await self._actions.run_else():
_LOGGER.error("Unable to close the cover")<|fim▁end|> | from __future__ import annotations
from typing import Any
|
<|file_name|>rpcwallet.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <boost/assign/list_of.hpp>
#include "wallet.h"
#include "walletdb.h"
#include "bitcoinrpc.h"
#include "init.h"
#include "base58.h"
using namespace std;
using namespace boost;
using namespace boost::assign;
using namespace json_spirit;
int64 nWalletUnlockTime;
static CCriticalSection cs_nWalletUnlockTime;
std::string HelpRequiringPassphrase()
{
return pwalletMain->IsCrypted()
? "\nrequires wallet passphrase to be set with walletpassphrase first"
: "";
}
void EnsureWalletIsUnlocked()
{
if (pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Please enter the wallet passphrase with walletpassphrase first.");
}
void WalletTxToJSON(const CWalletTx& wtx, Object& entry)
{
int confirms = wtx.GetDepthInMainChain();
entry.push_back(Pair("confirmations", confirms));
if (wtx.IsCoinBase())
entry.push_back(Pair("generated", true));
if (confirms)
{
entry.push_back(Pair("blockhash", wtx.hashBlock.GetHex()));
entry.push_back(Pair("blockindex", wtx.nIndex));
entry.push_back(Pair("blocktime", (boost::int64_t)(mapBlockIndex[wtx.hashBlock]->nTime)));
}
entry.push_back(Pair("txid", wtx.GetHash().GetHex()));
entry.push_back(Pair("time", (boost::int64_t)wtx.GetTxTime()));
entry.push_back(Pair("timereceived", (boost::int64_t)wtx.nTimeReceived));
BOOST_FOREACH(const PAIRTYPE(string,string)& item, wtx.mapValue)
entry.push_back(Pair(item.first, item.second));
}
string AccountFromValue(const Value& value)
{
string strAccount = value.get_str();
if (strAccount == "*")
throw JSONRPCError(RPC_WALLET_INVALID_ACCOUNT_NAME, "Invalid account name");
return strAccount;
}
Value getinfo(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getinfo\n"
"Returns an object containing various state info.");
proxyType proxy;
GetProxy(NET_IPV4, proxy);
Object obj;
obj.push_back(Pair("version", (int)CLIENT_VERSION));
obj.push_back(Pair("protocolversion",(int)PROTOCOL_VERSION));
obj.push_back(Pair("walletversion", pwalletMain->GetVersion()));
obj.push_back(Pair("balance", ValueFromAmount(pwalletMain->GetBalance())));
obj.push_back(Pair("blocks", (int)nBestHeight));
obj.push_back(Pair("timeoffset", (boost::int64_t)GetTimeOffset()));
obj.push_back(Pair("connections", (int)vNodes.size()));
obj.push_back(Pair("proxy", (proxy.first.IsValid() ? proxy.first.ToStringIPPort() : string())));
obj.push_back(Pair("difficulty", (double)GetDifficulty()));
obj.push_back(Pair("testnet", TestNet()));
obj.push_back(Pair("keypoololdest", (boost::int64_t)pwalletMain->GetOldestKeyPoolTime()));
obj.push_back(Pair("keypoolsize", (int)pwalletMain->GetKeyPoolSize()));
obj.push_back(Pair("paytxfee", ValueFromAmount(nTransactionFee)));
if (pwalletMain->IsCrypted())
obj.push_back(Pair("unlocked_until", (boost::int64_t)nWalletUnlockTime));
obj.push_back(Pair("errors", GetWarnings("statusbar")));
return obj;
}
Value getnewaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"getnewaddress [account]\n"
"Returns a new Bitcoin address for receiving payments. "
"If [account] is specified (recommended), it is added to the address book "
"so payments received with the address will be credited to [account].");
// Parse the account first so we don't generate a key if there's an error
string strAccount;
if (params.size() > 0)
strAccount = AccountFromValue(params[0]);
if (!pwalletMain->IsLocked())
pwalletMain->TopUpKeyPool();
// Generate a new key that is added to wallet
CPubKey newKey;
if (!pwalletMain->GetKeyFromPool(newKey))
throw JSONRPCError(RPC_WALLET_KEYPOOL_RAN_OUT, "Error: Keypool ran out, please call keypoolrefill first");
CKeyID keyID = newKey.GetID();
pwalletMain->SetAddressBook(keyID, strAccount, "receive");
return CBitcoinAddress(keyID).ToString();
}
CBitcoinAddress GetAccountAddress(string strAccount, bool bForceNew=false)
{
CWalletDB walletdb(pwalletMain->strWalletFile);
CAccount account;
walletdb.ReadAccount(strAccount, account);
bool bKeyUsed = false;
// Check if the current key has been used
if (account.vchPubKey.IsValid())
{
CScript scriptPubKey;
scriptPubKey.SetDestination(account.vchPubKey.GetID());
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin();
it != pwalletMain->mapWallet.end() && account.vchPubKey.IsValid();
++it)
{
const CWalletTx& wtx = (*it).second;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
if (txout.scriptPubKey == scriptPubKey)
bKeyUsed = true;
}
}
// Generate a new key
if (!account.vchPubKey.IsValid() || bForceNew || bKeyUsed)
{
if (!pwalletMain->GetKeyFromPool(account.vchPubKey))
throw JSONRPCError(RPC_WALLET_KEYPOOL_RAN_OUT, "Error: Keypool ran out, please call keypoolrefill first");
pwalletMain->SetAddressBook(account.vchPubKey.GetID(), strAccount, "receive");
walletdb.WriteAccount(strAccount, account);
}
return CBitcoinAddress(account.vchPubKey.GetID());
}
Value getaccountaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"getaccountaddress <account>\n"
"Returns the current Bitcoin address for receiving payments to this account.");
// Parse the account first so we don't generate a key if there's an error
string strAccount = AccountFromValue(params[0]);
Value ret;
ret = GetAccountAddress(strAccount).ToString();
return ret;
}
Value getrawchangeaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"getrawchangeaddress\n"
"Returns a new Bitcoin address, for receiving change. "
"This is for use with raw transactions, NOT normal use.");
if (!pwalletMain->IsLocked())
pwalletMain->TopUpKeyPool();
CReserveKey reservekey(pwalletMain);
CPubKey vchPubKey;
if (!reservekey.GetReservedKey(vchPubKey))
throw JSONRPCError(RPC_WALLET_ERROR, "Error: Unable to obtain key for change");
reservekey.KeepKey();
CKeyID keyID = vchPubKey.GetID();
return CBitcoinAddress(keyID).ToString();
}
Value setaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"setaccount <pangubiaddress> <account>\n"
"Sets the account associated with the given address.");
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Bitcoin address");
string strAccount;
if (params.size() > 1)
strAccount = AccountFromValue(params[1]);
// Detect when changing the account of an address that is the 'unused current key' of another account:
if (pwalletMain->mapAddressBook.count(address.Get()))
{
string strOldAccount = pwalletMain->mapAddressBook[address.Get()].name;
if (address == GetAccountAddress(strOldAccount))
GetAccountAddress(strOldAccount, true);
}
pwalletMain->SetAddressBook(address.Get(), strAccount, "receive");
return Value::null;
}
Value getaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"getaccount <pangubiaddress>\n"
"Returns the account associated with the given address.");
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Bitcoin address");
string strAccount;
map<CTxDestination, CAddressBookData>::iterator mi = pwalletMain->mapAddressBook.find(address.Get());
if (mi != pwalletMain->mapAddressBook.end() && !(*mi).second.name.empty())
strAccount = (*mi).second.name;
return strAccount;
}
Value getaddressesbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"getaddressesbyaccount <account>\n"
"Returns the list of addresses for the given account.");
string strAccount = AccountFromValue(params[0]);
// Find all addresses that have the given account
Array ret;
BOOST_FOREACH(const PAIRTYPE(CBitcoinAddress, CAddressBookData)& item, pwalletMain->mapAddressBook)
{
const CBitcoinAddress& address = item.first;
const string& strName = item.second.name;
if (strName == strAccount)
ret.push_back(address.ToString());
}
return ret;
}
Value sendtoaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 4)
throw runtime_error(
"sendtoaddress <pangubiaddress> <amount> [comment] [comment-to]\n"
"<amount> is a real and is rounded to the nearest 0.00000001"
+ HelpRequiringPassphrase());
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Bitcoin address");
// Amount
int64 nAmount = AmountFromValue(params[1]);
// Wallet comments
CWalletTx wtx;
if (params.size() > 2 && params[2].type() != null_type && !params[2].get_str().empty())
wtx.mapValue["comment"] = params[2].get_str();
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
wtx.mapValue["to"] = params[3].get_str();
if (pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Please enter the wallet passphrase with walletpassphrase first.");
string strError = pwalletMain->SendMoneyToDestination(address.Get(), nAmount, wtx);
if (strError != "")
throw JSONRPCError(RPC_WALLET_ERROR, strError);
return wtx.GetHash().GetHex();
}
Value listaddressgroupings(const Array& params, bool fHelp)
{
if (fHelp)
throw runtime_error(
"listaddressgroupings\n"
"Lists groups of addresses which have had their common ownership\n"
"made public by common use as inputs or as the resulting change\n"
"in past transactions");
Array jsonGroupings;
map<CTxDestination, int64> balances = pwalletMain->GetAddressBalances();
BOOST_FOREACH(set<CTxDestination> grouping, pwalletMain->GetAddressGroupings())
{
Array jsonGrouping;
BOOST_FOREACH(CTxDestination address, grouping)
{
Array addressInfo;
addressInfo.push_back(CBitcoinAddress(address).ToString());
addressInfo.push_back(ValueFromAmount(balances[address]));
{
LOCK(pwalletMain->cs_wallet);
if (pwalletMain->mapAddressBook.find(CBitcoinAddress(address).Get()) != pwalletMain->mapAddressBook.end())
addressInfo.push_back(pwalletMain->mapAddressBook.find(CBitcoinAddress(address).Get())->second.name);
}
jsonGrouping.push_back(addressInfo);
}
jsonGroupings.push_back(jsonGrouping);
}
return jsonGroupings;
}
Value signmessage(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 2)
throw runtime_error(
"signmessage <pangubiaddress> <message>\n"
"Sign a message with the private key of an address");
EnsureWalletIsUnlocked();
string strAddress = params[0].get_str();
string strMessage = params[1].get_str();
CBitcoinAddress addr(strAddress);
if (!addr.IsValid())
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid address");
CKeyID keyID;
if (!addr.GetKeyID(keyID))
throw JSONRPCError(RPC_TYPE_ERROR, "Address does not refer to key");
CKey key;
if (!pwalletMain->GetKey(keyID, key))
throw JSONRPCError(RPC_WALLET_ERROR, "Private key not available");
CHashWriter ss(SER_GETHASH, 0);
ss << strMessageMagic;
ss << strMessage;
vector<unsigned char> vchSig;
if (!key.SignCompact(ss.GetHash(), vchSig))
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Sign failed");
return EncodeBase64(&vchSig[0], vchSig.size());
}
Value verifymessage(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 3)
throw runtime_error(
"verifymessage <pangubiaddress> <signature> <message>\n"
"Verify a signed message");
string strAddress = params[0].get_str();
string strSign = params[1].get_str();
string strMessage = params[2].get_str();
CBitcoinAddress addr(strAddress);
if (!addr.IsValid())
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid address");
CKeyID keyID;
if (!addr.GetKeyID(keyID))
throw JSONRPCError(RPC_TYPE_ERROR, "Address does not refer to key");
bool fInvalid = false;
vector<unsigned char> vchSig = DecodeBase64(strSign.c_str(), &fInvalid);
if (fInvalid)
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Malformed base64 encoding");
CHashWriter ss(SER_GETHASH, 0);
ss << strMessageMagic;
ss << strMessage;
CPubKey pubkey;
if (!pubkey.RecoverCompact(ss.GetHash(), vchSig))
return false;
return (pubkey.GetID() == keyID);
}
Value getreceivedbyaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getreceivedbyaddress <pangubiaddress> [minconf=1]\n"
"Returns the total amount received by <pangubiaddress> in transactions with at least [minconf] confirmations.");
// Bitcoin address
CBitcoinAddress address = CBitcoinAddress(params[0].get_str());
CScript scriptPubKey;
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Bitcoin address");
scriptPubKey.SetDestination(address.Get());
if (!IsMine(*pwalletMain,scriptPubKey))
return (double)0.0;
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
// Tally
int64 nAmount = 0;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || !IsFinalTx(wtx))
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
if (txout.scriptPubKey == scriptPubKey)
if (wtx.GetDepthInMainChain() >= nMinDepth)
nAmount += txout.nValue;
}
return ValueFromAmount(nAmount);
}
Value getreceivedbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getreceivedbyaccount <account> [minconf=1]\n"
"Returns the total amount received by addresses with <account> in transactions with at least [minconf] confirmations.");
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
// Get the set of pub keys assigned to account
string strAccount = AccountFromValue(params[0]);
set<CTxDestination> setAddress = pwalletMain->GetAccountAddresses(strAccount);
// Tally
int64 nAmount = 0;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || !IsFinalTx(wtx))
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
{
CTxDestination address;
if (ExtractDestination(txout.scriptPubKey, address) && IsMine(*pwalletMain, address) && setAddress.count(address))
if (wtx.GetDepthInMainChain() >= nMinDepth)
nAmount += txout.nValue;
}
}
return (double)nAmount / (double)COIN;
}
int64 GetAccountBalance(CWalletDB& walletdb, const string& strAccount, int nMinDepth)
{
int64 nBalance = 0;
// Tally wallet transactions
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (!IsFinalTx(wtx))
continue;
int64 nReceived, nSent, nFee;
wtx.GetAccountAmounts(strAccount, nReceived, nSent, nFee);
if (nReceived != 0 && wtx.GetDepthInMainChain() >= nMinDepth)
nBalance += nReceived;
nBalance -= nSent + nFee;
}
// Tally internal accounting entries
nBalance += walletdb.GetAccountCreditDebit(strAccount);
return nBalance;
}
int64 GetAccountBalance(const string& strAccount, int nMinDepth)
{
CWalletDB walletdb(pwalletMain->strWalletFile);
return GetAccountBalance(walletdb, strAccount, nMinDepth);
}
Value getbalance(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"getbalance [account] [minconf=1]\n"
"If [account] is not specified, returns the server's total available balance.\n"
"If [account] is specified, returns the balance in the account.");
if (params.size() == 0)
return ValueFromAmount(pwalletMain->GetBalance());
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
if (params[0].get_str() == "*") {
// Calculate total balance a different way from GetBalance()
// (GetBalance() sums up all unspent TxOuts)
// getbalance and getbalance '*' 0 should return the same number
int64 nBalance = 0;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (!wtx.IsConfirmed())
continue;
int64 allFee;
string strSentAccount;
list<pair<CTxDestination, int64> > listReceived;
list<pair<CTxDestination, int64> > listSent;
wtx.GetAmounts(listReceived, listSent, allFee, strSentAccount);
if (wtx.GetDepthInMainChain() >= nMinDepth)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination,int64)& r, listReceived)
nBalance += r.second;
}
BOOST_FOREACH(const PAIRTYPE(CTxDestination,int64)& r, listSent)
nBalance -= r.second;
nBalance -= allFee;
}
return ValueFromAmount(nBalance);
}
string strAccount = AccountFromValue(params[0]);
int64 nBalance = GetAccountBalance(strAccount, nMinDepth);
return ValueFromAmount(nBalance);
}
Value movecmd(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 3 || params.size() > 5)
throw runtime_error(
"move <fromaccount> <toaccount> <amount> [minconf=1] [comment]\n"
"Move from one account in your wallet to another.");
string strFrom = AccountFromValue(params[0]);
string strTo = AccountFromValue(params[1]);
int64 nAmount = AmountFromValue(params[2]);
if (params.size() > 3)
// unused parameter, used to be nMinDepth, keep type-checking it though
(void)params[3].get_int();
string strComment;
if (params.size() > 4)
strComment = params[4].get_str();
CWalletDB walletdb(pwalletMain->strWalletFile);
if (!walletdb.TxnBegin())
throw JSONRPCError(RPC_DATABASE_ERROR, "database error");
int64 nNow = GetAdjustedTime();
// Debit
CAccountingEntry debit;
debit.nOrderPos = pwalletMain->IncOrderPosNext(&walletdb);
debit.strAccount = strFrom;
debit.nCreditDebit = -nAmount;
debit.nTime = nNow;
debit.strOtherAccount = strTo;
debit.strComment = strComment;
walletdb.WriteAccountingEntry(debit);
// Credit
CAccountingEntry credit;
credit.nOrderPos = pwalletMain->IncOrderPosNext(&walletdb);
credit.strAccount = strTo;
credit.nCreditDebit = nAmount;
credit.nTime = nNow;
credit.strOtherAccount = strFrom;
credit.strComment = strComment;
walletdb.WriteAccountingEntry(credit);
if (!walletdb.TxnCommit())
throw JSONRPCError(RPC_DATABASE_ERROR, "database error");
return true;
}
Value sendfrom(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 3 || params.size() > 6)
throw runtime_error(
"sendfrom <fromaccount> <topangubiaddress> <amount> [minconf=1] [comment] [comment-to]\n"
"<amount> is a real and is rounded to the nearest 0.00000001"
+ HelpRequiringPassphrase());
string strAccount = AccountFromValue(params[0]);
CBitcoinAddress address(params[1].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Bitcoin address");
int64 nAmount = AmountFromValue(params[2]);
int nMinDepth = 1;
if (params.size() > 3)
nMinDepth = params[3].get_int();
CWalletTx wtx;
wtx.strFromAccount = strAccount;
if (params.size() > 4 && params[4].type() != null_type && !params[4].get_str().empty())
wtx.mapValue["comment"] = params[4].get_str();
if (params.size() > 5 && params[5].type() != null_type && !params[5].get_str().empty())
wtx.mapValue["to"] = params[5].get_str();
EnsureWalletIsUnlocked();
// Check funds
int64 nBalance = GetAccountBalance(strAccount, nMinDepth);
if (nAmount > nBalance)
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, "Account has insufficient funds");
// Send
string strError = pwalletMain->SendMoneyToDestination(address.Get(), nAmount, wtx);
if (strError != "")
throw JSONRPCError(RPC_WALLET_ERROR, strError);
return wtx.GetHash().GetHex();
}
Value sendmany(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 4)
throw runtime_error(
"sendmany <fromaccount> {address:amount,...} [minconf=1] [comment]\n"
"amounts are double-precision floating point numbers"
+ HelpRequiringPassphrase());
string strAccount = AccountFromValue(params[0]);
Object sendTo = params[1].get_obj();
int nMinDepth = 1;
if (params.size() > 2)
nMinDepth = params[2].get_int();
CWalletTx wtx;
wtx.strFromAccount = strAccount;
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
wtx.mapValue["comment"] = params[3].get_str();
set<CBitcoinAddress> setAddress;
vector<pair<CScript, int64> > vecSend;
int64 totalAmount = 0;
BOOST_FOREACH(const Pair& s, sendTo)
{
CBitcoinAddress address(s.name_);
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid Bitcoin address: ")+s.name_);
if (setAddress.count(address))
throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+s.name_);
setAddress.insert(address);
CScript scriptPubKey;
scriptPubKey.SetDestination(address.Get());
int64 nAmount = AmountFromValue(s.value_);
totalAmount += nAmount;
vecSend.push_back(make_pair(scriptPubKey, nAmount));
}
EnsureWalletIsUnlocked();
// Check funds
int64 nBalance = GetAccountBalance(strAccount, nMinDepth);
if (totalAmount > nBalance)
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, "Account has insufficient funds");
// Send
CReserveKey keyChange(pwalletMain);
int64 nFeeRequired = 0;
string strFailReason;
bool fCreated = pwalletMain->CreateTransaction(vecSend, wtx, keyChange, nFeeRequired, strFailReason);
if (!fCreated)
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, strFailReason);
if (!pwalletMain->CommitTransaction(wtx, keyChange))
throw JSONRPCError(RPC_WALLET_ERROR, "Transaction commit failed");
return wtx.GetHash().GetHex();
}
//
// Used by addmultisigaddress / createmultisig:
//
static CScript _createmultisig(const Array& params)
{
int nRequired = params[0].get_int();
const Array& keys = params[1].get_array();
// Gather public keys
if (nRequired < 1)
throw runtime_error("a multisignature address must require at least one key to redeem");
if ((int)keys.size() < nRequired)
throw runtime_error(
strprintf("not enough keys supplied "
"(got %"PRIszu" keys, but need at least %d to redeem)", keys.size(), nRequired));
std::vector<CPubKey> pubkeys;
pubkeys.resize(keys.size());
for (unsigned int i = 0; i < keys.size(); i++)
{
const std::string& ks = keys[i].get_str();
// Case 1: Bitcoin address and we have full public key:
CBitcoinAddress address(ks);
if (address.IsValid())
{
CKeyID keyID;
if (!address.GetKeyID(keyID))
throw runtime_error(
strprintf("%s does not refer to a key",ks.c_str()));
CPubKey vchPubKey;
if (!pwalletMain->GetPubKey(keyID, vchPubKey))
throw runtime_error(
strprintf("no full public key for address %s",ks.c_str()));
if (!vchPubKey.IsFullyValid())
throw runtime_error(" Invalid public key: "+ks);
pubkeys[i] = vchPubKey;
}
// Case 2: hex public key
else if (IsHex(ks))
{
CPubKey vchPubKey(ParseHex(ks));
if (!vchPubKey.IsFullyValid())
throw runtime_error(" Invalid public key: "+ks);
pubkeys[i] = vchPubKey;
}
else
{
throw runtime_error(" Invalid public key: "+ks);
}
}
CScript result;
result.SetMultisig(nRequired, pubkeys);
return result;
}
Value addmultisigaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 3)
{
string msg = "addmultisigaddress <nrequired> <'[\"key\",\"key\"]'> [account]\n"
"Add a nrequired-to-sign multisignature address to the wallet\"\n"
"each key is a Bitcoin address or hex-encoded public key\n"
"If [account] is specified, assign address to [account].";
throw runtime_error(msg);
}
string strAccount;
if (params.size() > 2)
strAccount = AccountFromValue(params[2]);
// Construct using pay-to-script-hash:
CScript inner = _createmultisig(params);
CScriptID innerID = inner.GetID();
pwalletMain->AddCScript(inner);
pwalletMain->SetAddressBook(innerID, strAccount, "send");
return CBitcoinAddress(innerID).ToString();
}
Value createmultisig(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 2)
{
string msg = "createmultisig <nrequired> <'[\"key\",\"key\"]'>\n"
"Creates a multi-signature address and returns a json object\n"
"with keys:\n"
"address : bitcoin address\n"
"redeemScript : hex-encoded redemption script";
throw runtime_error(msg);
}
// Construct using pay-to-script-hash:
CScript inner = _createmultisig(params);
CScriptID innerID = inner.GetID();
CBitcoinAddress address(innerID);
Object result;
result.push_back(Pair("address", address.ToString()));
result.push_back(Pair("redeemScript", HexStr(inner.begin(), inner.end())));
return result;
}
struct tallyitem
{
int64 nAmount;
int nConf;
vector<uint256> txids;
tallyitem()
{
nAmount = 0;
nConf = std::numeric_limits<int>::max();
}
};
Value ListReceived(const Array& params, bool fByAccounts)
{
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
// Whether to include empty accounts
bool fIncludeEmpty = false;
if (params.size() > 1)
fIncludeEmpty = params[1].get_bool();
// Tally
map<CBitcoinAddress, tallyitem> mapTally;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || !IsFinalTx(wtx))
continue;
int nDepth = wtx.GetDepthInMainChain();
if (nDepth < nMinDepth)
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
{
CTxDestination address;
if (!ExtractDestination(txout.scriptPubKey, address) || !IsMine(*pwalletMain, address))
continue;
tallyitem& item = mapTally[address];
item.nAmount += txout.nValue;
item.nConf = min(item.nConf, nDepth);
item.txids.push_back(wtx.GetHash());
}
}
// Reply
Array ret;
map<string, tallyitem> mapAccountTally;
BOOST_FOREACH(const PAIRTYPE(CBitcoinAddress, CAddressBookData)& item, pwalletMain->mapAddressBook)
{
const CBitcoinAddress& address = item.first;
const string& strAccount = item.second.name;
map<CBitcoinAddress, tallyitem>::iterator it = mapTally.find(address);
if (it == mapTally.end() && !fIncludeEmpty)
continue;
int64 nAmount = 0;
int nConf = std::numeric_limits<int>::max();
if (it != mapTally.end())
{
nAmount = (*it).second.nAmount;
nConf = (*it).second.nConf;
}
if (fByAccounts)
{
tallyitem& item = mapAccountTally[strAccount];
item.nAmount += nAmount;
item.nConf = min(item.nConf, nConf);
}
else
{
Object obj;
obj.push_back(Pair("address", address.ToString()));
obj.push_back(Pair("account", strAccount));
obj.push_back(Pair("amount", ValueFromAmount(nAmount)));
obj.push_back(Pair("confirmations", (nConf == std::numeric_limits<int>::max() ? 0 : nConf)));
Array transactions;
if (it != mapTally.end())
{
BOOST_FOREACH(const uint256& item, (*it).second.txids)
{
transactions.push_back(item.GetHex());
}
}
obj.push_back(Pair("txids", transactions));
ret.push_back(obj);
}
}
if (fByAccounts)
{
for (map<string, tallyitem>::iterator it = mapAccountTally.begin(); it != mapAccountTally.end(); ++it)
{
int64 nAmount = (*it).second.nAmount;
int nConf = (*it).second.nConf;
Object obj;
obj.push_back(Pair("account", (*it).first));
obj.push_back(Pair("amount", ValueFromAmount(nAmount)));
obj.push_back(Pair("confirmations", (nConf == std::numeric_limits<int>::max() ? 0 : nConf)));
ret.push_back(obj);
}
}
return ret;
}
Value listreceivedbyaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"listreceivedbyaddress [minconf=1] [includeempty=false]\n"
"[minconf] is the minimum number of confirmations before payments are included.\n"
"[includeempty] whether to include addresses that haven't received any payments.\n"
"Returns an array of objects containing:\n"
" \"address\" : receiving address\n"
" \"account\" : the account of the receiving address\n"
" \"amount\" : total amount received by the address\n"
" \"confirmations\" : number of confirmations of the most recent transaction included\n"
" \"txids\" : list of transactions with outputs to the address\n");
return ListReceived(params, false);
}
Value listreceivedbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"listreceivedbyaccount [minconf=1] [includeempty=false]\n"
"[minconf] is the minimum number of confirmations before payments are included.\n"
"[includeempty] whether to include accounts that haven't received any payments.\n"
"Returns an array of objects containing:\n"
" \"account\" : the account of the receiving addresses\n"
" \"amount\" : total amount received by addresses with this account\n"
" \"confirmations\" : number of confirmations of the most recent transaction included");
return ListReceived(params, true);
}
void ListTransactions(const CWalletTx& wtx, const string& strAccount, int nMinDepth, bool fLong, Array& ret)
{
int64 nFee;
string strSentAccount;
list<pair<CTxDestination, int64> > listReceived;
list<pair<CTxDestination, int64> > listSent;
wtx.GetAmounts(listReceived, listSent, nFee, strSentAccount);
bool fAllAccounts = (strAccount == string("*"));
// Sent
if ((!listSent.empty() || nFee != 0) && (fAllAccounts || strAccount == strSentAccount))
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64)& s, listSent)
{
Object entry;
entry.push_back(Pair("account", strSentAccount));
entry.push_back(Pair("address", CBitcoinAddress(s.first).ToString()));
entry.push_back(Pair("category", "send"));
entry.push_back(Pair("amount", ValueFromAmount(-s.second)));
entry.push_back(Pair("fee", ValueFromAmount(-nFee)));
if (fLong)
WalletTxToJSON(wtx, entry);
ret.push_back(entry);
}
}
// Received
if (listReceived.size() > 0 && wtx.GetDepthInMainChain() >= nMinDepth)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64)& r, listReceived)
{
string account;
if (pwalletMain->mapAddressBook.count(r.first))
account = pwalletMain->mapAddressBook[r.first].name;
if (fAllAccounts || (account == strAccount))
{
Object entry;
entry.push_back(Pair("account", account));
entry.push_back(Pair("address", CBitcoinAddress(r.first).ToString()));
if (wtx.IsCoinBase())
{
if (wtx.GetDepthInMainChain() < 1)
entry.push_back(Pair("category", "orphan"));
else if (wtx.GetBlocksToMaturity() > 0)
entry.push_back(Pair("category", "immature"));
else
entry.push_back(Pair("category", "generate"));
}
else
entry.push_back(Pair("category", "receive"));
entry.push_back(Pair("amount", ValueFromAmount(r.second)));
if (fLong)
WalletTxToJSON(wtx, entry);
ret.push_back(entry);
}
}
}
}
void AcentryToJSON(const CAccountingEntry& acentry, const string& strAccount, Array& ret)
{
bool fAllAccounts = (strAccount == string("*"));
if (fAllAccounts || acentry.strAccount == strAccount)
{
Object entry;
entry.push_back(Pair("account", acentry.strAccount));
entry.push_back(Pair("category", "move"));
entry.push_back(Pair("time", (boost::int64_t)acentry.nTime));
entry.push_back(Pair("amount", ValueFromAmount(acentry.nCreditDebit)));
entry.push_back(Pair("otheraccount", acentry.strOtherAccount));
entry.push_back(Pair("comment", acentry.strComment));
ret.push_back(entry);
}
}
Value listtransactions(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 3)
throw runtime_error(
"listtransactions [account] [count=10] [from=0]\n"
"Returns up to [count] most recent transactions skipping the first [from] transactions for account [account].");
string strAccount = "*";
if (params.size() > 0)
strAccount = params[0].get_str();
int nCount = 10;
if (params.size() > 1)
nCount = params[1].get_int();
int nFrom = 0;
if (params.size() > 2)
nFrom = params[2].get_int();
if (nCount < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Negative count");
if (nFrom < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Negative from");
Array ret;
std::list<CAccountingEntry> acentries;
CWallet::TxItems txOrdered = pwalletMain->OrderedTxItems(acentries, strAccount);
// iterate backwards until we have nCount items to return:
for (CWallet::TxItems::reverse_iterator it = txOrdered.rbegin(); it != txOrdered.rend(); ++it)
{
CWalletTx *const pwtx = (*it).second.first;
if (pwtx != 0)
ListTransactions(*pwtx, strAccount, 0, true, ret);
CAccountingEntry *const pacentry = (*it).second.second;
if (pacentry != 0)
AcentryToJSON(*pacentry, strAccount, ret);
if ((int)ret.size() >= (nCount+nFrom)) break;
}
// ret is newest to oldest
if (nFrom > (int)ret.size())
nFrom = ret.size();
if ((nFrom + nCount) > (int)ret.size())
nCount = ret.size() - nFrom;
Array::iterator first = ret.begin();
std::advance(first, nFrom);
Array::iterator last = ret.begin();
std::advance(last, nFrom+nCount);
if (last != ret.end()) ret.erase(last, ret.end());
if (first != ret.begin()) ret.erase(ret.begin(), first);
std::reverse(ret.begin(), ret.end()); // Return oldest to newest
return ret;
}
Value listaccounts(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"listaccounts [minconf=1]\n"
"Returns Object that has account names as keys, account balances as values.");
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
map<string, int64> mapAccountBalances;
BOOST_FOREACH(const PAIRTYPE(CTxDestination, CAddressBookData)& entry, pwalletMain->mapAddressBook) {
if (IsMine(*pwalletMain, entry.first)) // This address belongs to me
mapAccountBalances[entry.second.name] = 0;
}
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
int64 nFee;
string strSentAccount;
list<pair<CTxDestination, int64> > listReceived;
list<pair<CTxDestination, int64> > listSent;
wtx.GetAmounts(listReceived, listSent, nFee, strSentAccount);
mapAccountBalances[strSentAccount] -= nFee;
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64)& s, listSent)
mapAccountBalances[strSentAccount] -= s.second;
if (wtx.GetDepthInMainChain() >= nMinDepth)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64)& r, listReceived)
if (pwalletMain->mapAddressBook.count(r.first))
mapAccountBalances[pwalletMain->mapAddressBook[r.first].name] += r.second;
else
mapAccountBalances[""] += r.second;
}
}
list<CAccountingEntry> acentries;
CWalletDB(pwalletMain->strWalletFile).ListAccountCreditDebit("*", acentries);
BOOST_FOREACH(const CAccountingEntry& entry, acentries)
mapAccountBalances[entry.strAccount] += entry.nCreditDebit;
Object ret;
BOOST_FOREACH(const PAIRTYPE(string, int64)& accountBalance, mapAccountBalances) {
ret.push_back(Pair(accountBalance.first, ValueFromAmount(accountBalance.second)));
}
return ret;
}
<|fim▁hole|>Value listsinceblock(const Array& params, bool fHelp)
{
if (fHelp)
throw runtime_error(
"listsinceblock [blockhash] [target-confirmations]\n"
"Get all wallet transactions in blocks since block [blockhash], or all wallet transactions if omitted");
CBlockIndex *pindex = NULL;
int target_confirms = 1;
if (params.size() > 0)
{
uint256 blockId = 0;
blockId.SetHex(params[0].get_str());
pindex = CBlockLocator(blockId).GetBlockIndex();
}
if (params.size() > 1)
{
target_confirms = params[1].get_int();
if (target_confirms < 1)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter");
}
int depth = pindex ? (1 + nBestHeight - pindex->nHeight) : -1;
Array transactions;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); it++)
{
CWalletTx tx = (*it).second;
if (depth == -1 || tx.GetDepthInMainChain() < depth)
ListTransactions(tx, "*", 0, true, transactions);
}
uint256 lastblock;
if (target_confirms == 1)
{
lastblock = hashBestChain;
}
else
{
int target_height = pindexBest->nHeight + 1 - target_confirms;
CBlockIndex *block;
for (block = pindexBest;
block && block->nHeight > target_height;
block = block->pprev) { }
lastblock = block ? block->GetBlockHash() : 0;
}
Object ret;
ret.push_back(Pair("transactions", transactions));
ret.push_back(Pair("lastblock", lastblock.GetHex()));
return ret;
}
Value gettransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"gettransaction <txid>\n"
"Get detailed information about in-wallet transaction <txid>");
uint256 hash;
hash.SetHex(params[0].get_str());
Object entry;
if (!pwalletMain->mapWallet.count(hash))
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid or non-wallet transaction id");
const CWalletTx& wtx = pwalletMain->mapWallet[hash];
int64 nCredit = wtx.GetCredit();
int64 nDebit = wtx.GetDebit();
int64 nNet = nCredit - nDebit;
int64 nFee = (wtx.IsFromMe() ? GetValueOut(wtx) - nDebit : 0);
entry.push_back(Pair("amount", ValueFromAmount(nNet - nFee)));
if (wtx.IsFromMe())
entry.push_back(Pair("fee", ValueFromAmount(nFee)));
WalletTxToJSON(wtx, entry);
Array details;
ListTransactions(wtx, "*", 0, false, details);
entry.push_back(Pair("details", details));
return entry;
}
Value backupwallet(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"backupwallet <destination>\n"
"Safely copies wallet.dat to destination, which can be a directory or a path with filename.");
string strDest = params[0].get_str();
if (!BackupWallet(*pwalletMain, strDest))
throw JSONRPCError(RPC_WALLET_ERROR, "Error: Wallet backup failed!");
return Value::null;
}
Value keypoolrefill(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"keypoolrefill [new-size]\n"
"Fills the keypool."
+ HelpRequiringPassphrase());
unsigned int kpSize = max(GetArg("-keypool", 100), 0LL);
if (params.size() > 0) {
if (params[0].get_int() < 0)
throw JSONRPCError(-8, "Invalid parameter, expected valid size");
kpSize = (unsigned int) params[0].get_int();
}
EnsureWalletIsUnlocked();
pwalletMain->TopUpKeyPool(kpSize);
if (pwalletMain->GetKeyPoolSize() < kpSize)
throw JSONRPCError(RPC_WALLET_ERROR, "Error refreshing keypool.");
return Value::null;
}
static void LockWallet(CWallet* pWallet)
{
LOCK(cs_nWalletUnlockTime);
nWalletUnlockTime = 0;
pWallet->Lock();
}
Value walletpassphrase(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() != 2))
throw runtime_error(
"walletpassphrase <passphrase> <timeout>\n"
"Stores the wallet decryption key in memory for <timeout> seconds.");
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletpassphrase was called.");
// Note that the walletpassphrase is stored in params[0] which is not mlock()ed
SecureString strWalletPass;
strWalletPass.reserve(100);
// TODO: get rid of this .c_str() by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
strWalletPass = params[0].get_str().c_str();
if (strWalletPass.length() > 0)
{
if (!pwalletMain->Unlock(strWalletPass))
throw JSONRPCError(RPC_WALLET_PASSPHRASE_INCORRECT, "Error: The wallet passphrase entered was incorrect.");
}
else
throw runtime_error(
"walletpassphrase <passphrase> <timeout>\n"
"Stores the wallet decryption key in memory for <timeout> seconds.");
pwalletMain->TopUpKeyPool();
int64 nSleepTime = params[1].get_int64();
LOCK(cs_nWalletUnlockTime);
nWalletUnlockTime = GetTime() + nSleepTime;
RPCRunLater("lockwallet", boost::bind(LockWallet, pwalletMain), nSleepTime);
return Value::null;
}
Value walletpassphrasechange(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() != 2))
throw runtime_error(
"walletpassphrasechange <oldpassphrase> <newpassphrase>\n"
"Changes the wallet passphrase from <oldpassphrase> to <newpassphrase>.");
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletpassphrasechange was called.");
// TODO: get rid of these .c_str() calls by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
SecureString strOldWalletPass;
strOldWalletPass.reserve(100);
strOldWalletPass = params[0].get_str().c_str();
SecureString strNewWalletPass;
strNewWalletPass.reserve(100);
strNewWalletPass = params[1].get_str().c_str();
if (strOldWalletPass.length() < 1 || strNewWalletPass.length() < 1)
throw runtime_error(
"walletpassphrasechange <oldpassphrase> <newpassphrase>\n"
"Changes the wallet passphrase from <oldpassphrase> to <newpassphrase>.");
if (!pwalletMain->ChangeWalletPassphrase(strOldWalletPass, strNewWalletPass))
throw JSONRPCError(RPC_WALLET_PASSPHRASE_INCORRECT, "Error: The wallet passphrase entered was incorrect.");
return Value::null;
}
Value walletlock(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() != 0))
throw runtime_error(
"walletlock\n"
"Removes the wallet encryption key from memory, locking the wallet.\n"
"After calling this method, you will need to call walletpassphrase again\n"
"before being able to call any methods which require the wallet to be unlocked.");
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletlock was called.");
{
LOCK(cs_nWalletUnlockTime);
pwalletMain->Lock();
nWalletUnlockTime = 0;
}
return Value::null;
}
Value encryptwallet(const Array& params, bool fHelp)
{
if (!pwalletMain->IsCrypted() && (fHelp || params.size() != 1))
throw runtime_error(
"encryptwallet <passphrase>\n"
"Encrypts the wallet with <passphrase>.");
if (fHelp)
return true;
if (pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an encrypted wallet, but encryptwallet was called.");
// TODO: get rid of this .c_str() by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
SecureString strWalletPass;
strWalletPass.reserve(100);
strWalletPass = params[0].get_str().c_str();
if (strWalletPass.length() < 1)
throw runtime_error(
"encryptwallet <passphrase>\n"
"Encrypts the wallet with <passphrase>.");
if (!pwalletMain->EncryptWallet(strWalletPass))
throw JSONRPCError(RPC_WALLET_ENCRYPTION_FAILED, "Error: Failed to encrypt the wallet.");
// BDB seems to have a bad habit of writing old data into
// slack space in .dat files; that is bad if the old data is
// unencrypted private keys. So:
StartShutdown();
return "wallet encrypted; Bitcoin server stopping, restart to run with encrypted wallet. The keypool has been flushed, you need to make a new backup.";
}
class DescribeAddressVisitor : public boost::static_visitor<Object>
{
public:
Object operator()(const CNoDestination &dest) const { return Object(); }
Object operator()(const CKeyID &keyID) const {
Object obj;
CPubKey vchPubKey;
pwalletMain->GetPubKey(keyID, vchPubKey);
obj.push_back(Pair("isscript", false));
obj.push_back(Pair("pubkey", HexStr(vchPubKey)));
obj.push_back(Pair("iscompressed", vchPubKey.IsCompressed()));
return obj;
}
Object operator()(const CScriptID &scriptID) const {
Object obj;
obj.push_back(Pair("isscript", true));
CScript subscript;
pwalletMain->GetCScript(scriptID, subscript);
std::vector<CTxDestination> addresses;
txnouttype whichType;
int nRequired;
ExtractDestinations(subscript, whichType, addresses, nRequired);
obj.push_back(Pair("script", GetTxnOutputType(whichType)));
Array a;
BOOST_FOREACH(const CTxDestination& addr, addresses)
a.push_back(CBitcoinAddress(addr).ToString());
obj.push_back(Pair("addresses", a));
if (whichType == TX_MULTISIG)
obj.push_back(Pair("sigsrequired", nRequired));
return obj;
}
};
Value validateaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"validateaddress <pangubiaddress>\n"
"Return information about <pangubiaddress>.");
CBitcoinAddress address(params[0].get_str());
bool isValid = address.IsValid();
Object ret;
ret.push_back(Pair("isvalid", isValid));
if (isValid)
{
CTxDestination dest = address.Get();
string currentAddress = address.ToString();
ret.push_back(Pair("address", currentAddress));
bool fMine = IsMine(*pwalletMain, dest);
ret.push_back(Pair("ismine", fMine));
if (fMine) {
Object detail = boost::apply_visitor(DescribeAddressVisitor(), dest);
ret.insert(ret.end(), detail.begin(), detail.end());
}
if (pwalletMain->mapAddressBook.count(dest))
ret.push_back(Pair("account", pwalletMain->mapAddressBook[dest].name));
}
return ret;
}
Value lockunspent(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"lockunspent unlock? [array-of-Objects]\n"
"Updates list of temporarily unspendable outputs.");
if (params.size() == 1)
RPCTypeCheck(params, list_of(bool_type));
else
RPCTypeCheck(params, list_of(bool_type)(array_type));
bool fUnlock = params[0].get_bool();
if (params.size() == 1) {
if (fUnlock)
pwalletMain->UnlockAllCoins();
return true;
}
Array outputs = params[1].get_array();
BOOST_FOREACH(Value& output, outputs)
{
if (output.type() != obj_type)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, expected object");
const Object& o = output.get_obj();
RPCTypeCheck(o, map_list_of("txid", str_type)("vout", int_type));
string txid = find_value(o, "txid").get_str();
if (!IsHex(txid))
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, expected hex txid");
int nOutput = find_value(o, "vout").get_int();
if (nOutput < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, vout must be positive");
COutPoint outpt(uint256(txid), nOutput);
if (fUnlock)
pwalletMain->UnlockCoin(outpt);
else
pwalletMain->LockCoin(outpt);
}
return true;
}
Value listlockunspent(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 0)
throw runtime_error(
"listlockunspent\n"
"Returns list of temporarily unspendable outputs.");
vector<COutPoint> vOutpts;
pwalletMain->ListLockedCoins(vOutpts);
Array ret;
BOOST_FOREACH(COutPoint &outpt, vOutpts) {
Object o;
o.push_back(Pair("txid", outpt.hash.GetHex()));
o.push_back(Pair("vout", (int)outpt.n));
ret.push_back(o);
}
return ret;
}<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>import operator
from turbion.bits.antispam import Filter
urlpatterns = reduce(
operator.add,
[filter.urlpatterns for name, filter in Filter.manager.all() if hasattr(filter, 'urlpatterns')],
[]<|fim▁hole|><|fim▁end|> | ) |
<|file_name|>appfwlearningsettings.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class appfwlearningsettings(base_resource) :
""" Configuration for learning settings resource. """
def __init__(self) :
self._profilename = ""
self._starturlminthreshold = 0
self._starturlpercentthreshold = 0
self._cookieconsistencyminthreshold = 0
self._cookieconsistencypercentthreshold = 0
self._csrftagminthreshold = 0
self._csrftagpercentthreshold = 0
self._fieldconsistencyminthreshold = 0
self._fieldconsistencypercentthreshold = 0
self._crosssitescriptingminthreshold = 0
self._crosssitescriptingpercentthreshold = 0
self._sqlinjectionminthreshold = 0
self._sqlinjectionpercentthreshold = 0
self._fieldformatminthreshold = 0
self._fieldformatpercentthreshold = 0
self._xmlwsiminthreshold = 0
self._xmlwsipercentthreshold = 0
self._xmlattachmentminthreshold = 0
self._xmlattachmentpercentthreshold = 0
self.___count = 0
@property
def profilename(self) :
ur"""Name of the profile.<br/>Minimum length = 1.
"""
try :
return self._profilename
except Exception as e:
raise e
@profilename.setter
def profilename(self, profilename) :
ur"""Name of the profile.<br/>Minimum length = 1
"""
try :
self._profilename = profilename
except Exception as e:
raise e
@property
def starturlminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn start URLs.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._starturlminthreshold
except Exception as e:
raise e
@starturlminthreshold.setter
def starturlminthreshold(self, starturlminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn start URLs.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._starturlminthreshold = starturlminthreshold
except Exception as e:
raise e
@property
def starturlpercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular start URL pattern for the learning engine to learn that start URL.<br/>Default value: 0<br/>Maximum length = 100.<|fim▁hole|> except Exception as e:
raise e
@starturlpercentthreshold.setter
def starturlpercentthreshold(self, starturlpercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular start URL pattern for the learning engine to learn that start URL.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._starturlpercentthreshold = starturlpercentthreshold
except Exception as e:
raise e
@property
def cookieconsistencyminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn cookies.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._cookieconsistencyminthreshold
except Exception as e:
raise e
@cookieconsistencyminthreshold.setter
def cookieconsistencyminthreshold(self, cookieconsistencyminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn cookies.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._cookieconsistencyminthreshold = cookieconsistencyminthreshold
except Exception as e:
raise e
@property
def cookieconsistencypercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular cookie pattern for the learning engine to learn that cookie.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._cookieconsistencypercentthreshold
except Exception as e:
raise e
@cookieconsistencypercentthreshold.setter
def cookieconsistencypercentthreshold(self, cookieconsistencypercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular cookie pattern for the learning engine to learn that cookie.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._cookieconsistencypercentthreshold = cookieconsistencypercentthreshold
except Exception as e:
raise e
@property
def csrftagminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn cross-site request forgery (CSRF) tags.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._csrftagminthreshold
except Exception as e:
raise e
@csrftagminthreshold.setter
def csrftagminthreshold(self, csrftagminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn cross-site request forgery (CSRF) tags.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._csrftagminthreshold = csrftagminthreshold
except Exception as e:
raise e
@property
def csrftagpercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular CSRF tag for the learning engine to learn that CSRF tag.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._csrftagpercentthreshold
except Exception as e:
raise e
@csrftagpercentthreshold.setter
def csrftagpercentthreshold(self, csrftagpercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular CSRF tag for the learning engine to learn that CSRF tag.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._csrftagpercentthreshold = csrftagpercentthreshold
except Exception as e:
raise e
@property
def fieldconsistencyminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn field consistency information.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._fieldconsistencyminthreshold
except Exception as e:
raise e
@fieldconsistencyminthreshold.setter
def fieldconsistencyminthreshold(self, fieldconsistencyminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn field consistency information.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._fieldconsistencyminthreshold = fieldconsistencyminthreshold
except Exception as e:
raise e
@property
def fieldconsistencypercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular field consistency pattern for the learning engine to learn that field consistency pattern.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._fieldconsistencypercentthreshold
except Exception as e:
raise e
@fieldconsistencypercentthreshold.setter
def fieldconsistencypercentthreshold(self, fieldconsistencypercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular field consistency pattern for the learning engine to learn that field consistency pattern.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._fieldconsistencypercentthreshold = fieldconsistencypercentthreshold
except Exception as e:
raise e
@property
def crosssitescriptingminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn HTML cross-site scripting patterns.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._crosssitescriptingminthreshold
except Exception as e:
raise e
@crosssitescriptingminthreshold.setter
def crosssitescriptingminthreshold(self, crosssitescriptingminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn HTML cross-site scripting patterns.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._crosssitescriptingminthreshold = crosssitescriptingminthreshold
except Exception as e:
raise e
@property
def crosssitescriptingpercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular cross-site scripting pattern for the learning engine to learn that cross-site scripting pattern.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._crosssitescriptingpercentthreshold
except Exception as e:
raise e
@crosssitescriptingpercentthreshold.setter
def crosssitescriptingpercentthreshold(self, crosssitescriptingpercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular cross-site scripting pattern for the learning engine to learn that cross-site scripting pattern.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._crosssitescriptingpercentthreshold = crosssitescriptingpercentthreshold
except Exception as e:
raise e
@property
def sqlinjectionminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn HTML SQL injection patterns.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._sqlinjectionminthreshold
except Exception as e:
raise e
@sqlinjectionminthreshold.setter
def sqlinjectionminthreshold(self, sqlinjectionminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn HTML SQL injection patterns.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._sqlinjectionminthreshold = sqlinjectionminthreshold
except Exception as e:
raise e
@property
def sqlinjectionpercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular HTML SQL injection pattern for the learning engine to learn that HTML SQL injection pattern.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._sqlinjectionpercentthreshold
except Exception as e:
raise e
@sqlinjectionpercentthreshold.setter
def sqlinjectionpercentthreshold(self, sqlinjectionpercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular HTML SQL injection pattern for the learning engine to learn that HTML SQL injection pattern.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._sqlinjectionpercentthreshold = sqlinjectionpercentthreshold
except Exception as e:
raise e
@property
def fieldformatminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn field formats.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._fieldformatminthreshold
except Exception as e:
raise e
@fieldformatminthreshold.setter
def fieldformatminthreshold(self, fieldformatminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn field formats.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._fieldformatminthreshold = fieldformatminthreshold
except Exception as e:
raise e
@property
def fieldformatpercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular web form field pattern for the learning engine to recommend a field format for that form field.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._fieldformatpercentthreshold
except Exception as e:
raise e
@fieldformatpercentthreshold.setter
def fieldformatpercentthreshold(self, fieldformatpercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular web form field pattern for the learning engine to recommend a field format for that form field.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._fieldformatpercentthreshold = fieldformatpercentthreshold
except Exception as e:
raise e
@property
def xmlwsiminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn web services interoperability (WSI) information.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._xmlwsiminthreshold
except Exception as e:
raise e
@xmlwsiminthreshold.setter
def xmlwsiminthreshold(self, xmlwsiminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn web services interoperability (WSI) information.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._xmlwsiminthreshold = xmlwsiminthreshold
except Exception as e:
raise e
@property
def xmlwsipercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular pattern for the learning engine to learn a web services interoperability (WSI) pattern.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._xmlwsipercentthreshold
except Exception as e:
raise e
@xmlwsipercentthreshold.setter
def xmlwsipercentthreshold(self, xmlwsipercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular pattern for the learning engine to learn a web services interoperability (WSI) pattern.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._xmlwsipercentthreshold = xmlwsipercentthreshold
except Exception as e:
raise e
@property
def xmlattachmentminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn XML attachment patterns.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._xmlattachmentminthreshold
except Exception as e:
raise e
@xmlattachmentminthreshold.setter
def xmlattachmentminthreshold(self, xmlattachmentminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn XML attachment patterns.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._xmlattachmentminthreshold = xmlattachmentminthreshold
except Exception as e:
raise e
@property
def xmlattachmentpercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular XML attachment pattern for the learning engine to learn that XML attachment pattern.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._xmlattachmentpercentthreshold
except Exception as e:
raise e
@xmlattachmentpercentthreshold.setter
def xmlattachmentpercentthreshold(self, xmlattachmentpercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular XML attachment pattern for the learning engine to learn that XML attachment pattern.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._xmlattachmentpercentthreshold = xmlattachmentpercentthreshold
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(appfwlearningsettings_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.appfwlearningsettings
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.profilename is not None :
return str(self.profilename)
return None
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
ur""" Use this API to update appfwlearningsettings.
"""
try :
if type(resource) is not list :
updateresource = appfwlearningsettings()
updateresource.profilename = resource.profilename
updateresource.starturlminthreshold = resource.starturlminthreshold
updateresource.starturlpercentthreshold = resource.starturlpercentthreshold
updateresource.cookieconsistencyminthreshold = resource.cookieconsistencyminthreshold
updateresource.cookieconsistencypercentthreshold = resource.cookieconsistencypercentthreshold
updateresource.csrftagminthreshold = resource.csrftagminthreshold
updateresource.csrftagpercentthreshold = resource.csrftagpercentthreshold
updateresource.fieldconsistencyminthreshold = resource.fieldconsistencyminthreshold
updateresource.fieldconsistencypercentthreshold = resource.fieldconsistencypercentthreshold
updateresource.crosssitescriptingminthreshold = resource.crosssitescriptingminthreshold
updateresource.crosssitescriptingpercentthreshold = resource.crosssitescriptingpercentthreshold
updateresource.sqlinjectionminthreshold = resource.sqlinjectionminthreshold
updateresource.sqlinjectionpercentthreshold = resource.sqlinjectionpercentthreshold
updateresource.fieldformatminthreshold = resource.fieldformatminthreshold
updateresource.fieldformatpercentthreshold = resource.fieldformatpercentthreshold
updateresource.xmlwsiminthreshold = resource.xmlwsiminthreshold
updateresource.xmlwsipercentthreshold = resource.xmlwsipercentthreshold
updateresource.xmlattachmentminthreshold = resource.xmlattachmentminthreshold
updateresource.xmlattachmentpercentthreshold = resource.xmlattachmentpercentthreshold
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ appfwlearningsettings() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].profilename = resource[i].profilename
updateresources[i].starturlminthreshold = resource[i].starturlminthreshold
updateresources[i].starturlpercentthreshold = resource[i].starturlpercentthreshold
updateresources[i].cookieconsistencyminthreshold = resource[i].cookieconsistencyminthreshold
updateresources[i].cookieconsistencypercentthreshold = resource[i].cookieconsistencypercentthreshold
updateresources[i].csrftagminthreshold = resource[i].csrftagminthreshold
updateresources[i].csrftagpercentthreshold = resource[i].csrftagpercentthreshold
updateresources[i].fieldconsistencyminthreshold = resource[i].fieldconsistencyminthreshold
updateresources[i].fieldconsistencypercentthreshold = resource[i].fieldconsistencypercentthreshold
updateresources[i].crosssitescriptingminthreshold = resource[i].crosssitescriptingminthreshold
updateresources[i].crosssitescriptingpercentthreshold = resource[i].crosssitescriptingpercentthreshold
updateresources[i].sqlinjectionminthreshold = resource[i].sqlinjectionminthreshold
updateresources[i].sqlinjectionpercentthreshold = resource[i].sqlinjectionpercentthreshold
updateresources[i].fieldformatminthreshold = resource[i].fieldformatminthreshold
updateresources[i].fieldformatpercentthreshold = resource[i].fieldformatpercentthreshold
updateresources[i].xmlwsiminthreshold = resource[i].xmlwsiminthreshold
updateresources[i].xmlwsipercentthreshold = resource[i].xmlwsipercentthreshold
updateresources[i].xmlattachmentminthreshold = resource[i].xmlattachmentminthreshold
updateresources[i].xmlattachmentpercentthreshold = resource[i].xmlattachmentpercentthreshold
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def unset(cls, client, resource, args) :
ur""" Use this API to unset the properties of appfwlearningsettings resource.
Properties that need to be unset are specified in args array.
"""
try :
if type(resource) is not list :
unsetresource = appfwlearningsettings()
if type(resource) != type(unsetresource):
unsetresource.profilename = resource
else :
unsetresource.profilename = resource.profilename
return unsetresource.unset_resource(client, args)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
unsetresources = [ appfwlearningsettings() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].profilename = resource[i]
else :
if (resource and len(resource) > 0) :
unsetresources = [ appfwlearningsettings() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].profilename = resource[i].profilename
result = cls.unset_bulk_request(client, unsetresources, args)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
ur""" Use this API to fetch all the appfwlearningsettings resources that are configured on netscaler.
"""
try :
if not name :
obj = appfwlearningsettings()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = appfwlearningsettings()
obj.profilename = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [appfwlearningsettings() for _ in range(len(name))]
obj = [appfwlearningsettings() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = appfwlearningsettings()
obj[i].profilename = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
ur""" Use this API to fetch filtered set of appfwlearningsettings resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = appfwlearningsettings()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
ur""" Use this API to count the appfwlearningsettings resources configured on NetScaler.
"""
try :
obj = appfwlearningsettings()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
ur""" Use this API to count filtered the set of appfwlearningsettings resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = appfwlearningsettings()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class appfwlearningsettings_response(base_response) :
def __init__(self, length=1) :
self.appfwlearningsettings = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.appfwlearningsettings = [appfwlearningsettings() for _ in range(length)]<|fim▁end|> | """
try :
return self._starturlpercentthreshold |
<|file_name|>dont_promote_unstable_const_fn.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![unstable(feature = "humans",
reason = "who ever let humans program computers,
we're apparently really bad at it",
issue = "0")]
#![feature(rustc_const_unstable, const_fn)]
#![feature(staged_api)]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature="foo")]
const fn foo() -> u32 { 42 }
fn meh() -> u32 { 42 }
const fn bar() -> u32 { foo() } //~ ERROR `foo` is not yet stable as a const fn
fn a() {
let _: &'static u32 = &foo(); //~ ERROR does not live long enough
}
fn main() {
let _: &'static u32 = &meh(); //~ ERROR does not live long enough
let x: &'static _ = &std::time::Duration::from_millis(42).subsec_millis();
//~^ ERROR does not live long enough
}<|fim▁end|> | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
# Register your models here.
from .models import Environment,EnvironmentAdmin,Component,ComponentAdmin,Environment_property,Environment_propertyAdmin,Component_attribute,Component_attributeAdmin
admin.site.register(Environment,EnvironmentAdmin)<|fim▁hole|>admin.site.register(Environment_property,Environment_propertyAdmin)
admin.site.register(Component_attribute,Component_attributeAdmin)<|fim▁end|> | admin.site.register(Component,ComponentAdmin) |
<|file_name|>profit_and_loss_statement.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _<|fim▁hole|>
def execute(filters=None):
period_list = get_period_list(filters.fiscal_year, filters.periodicity)
income = get_data(filters.company, "Income", "Credit", period_list, ignore_closing_entries=True)
expense = get_data(filters.company, "Expense", "Debit", period_list, ignore_closing_entries=True)
net_profit_loss = get_net_profit_loss(income, expense, period_list, filters.company)
data = []
data.extend(income or [])
data.extend(expense or [])
if net_profit_loss:
data.append(net_profit_loss)
columns = get_columns(period_list, filters.company)
return columns, data
def get_net_profit_loss(income, expense, period_list, company):
if income and expense:
net_profit_loss = {
"account_name": "'" + _("Net Profit / Loss") + "'",
"account": None,
"warn_if_negative": True,
"currency": frappe.db.get_value("Company", company, "default_currency")
}
for period in period_list:
net_profit_loss[period.key] = flt(income[-2][period.key] - expense[-2][period.key], 3)
return net_profit_loss<|fim▁end|> | from frappe.utils import flt
from erpnext.accounts.report.financial_statements import (get_period_list, get_columns, get_data) |
<|file_name|>series-combo.js<|end_file_name|><|fim▁begin|>YUI.add('series-combo', function (Y, NAME) {
/**
* Provides functionality for creating a combo series.
*
* @module charts
* @submodule series-combo
*/
/**
* The ComboSeries class renders a combination of lines, plots and area fills in a single series.
* Each series type has a corresponding boolean attribute indicating if it is rendered. By default,
* lines and plots are rendered and area is not.
*
* @class ComboSeries
* @extends CartesianSeries
* @uses Fills
* @uses Lines
* @uses Plots
* @constructor
* @param {Object} config (optional) Configuration parameters.
* @submodule series-combo
*/
Y.ComboSeries = Y.Base.create("comboSeries", Y.CartesianSeries, [Y.Fills, Y.Lines, Y.Plots], {
/**
* @protected
*
* Draws the series.
*
* @method drawSeries
*/<|fim▁hole|> {
this.drawFill.apply(this, this._getClosingPoints());
}
if(this.get("showLines"))
{
this.drawLines();
}
if(this.get("showMarkers"))
{
this.drawPlots();
}
},
/**
* Toggles visibility
*
* @method _toggleVisible
* @param {Boolean} visible indicates visibilitye
* @private
*/
_toggleVisible: function(visible)
{
var markers,
marker,
len,
i;
if(this.get("showAreaFill") && this._path)
{
this._path.set("visible", visible);
}
if(this.get("showLines") && this._lineGraphic)
{
this._lineGraphic.set("visible", visible);
}
if(this.get("showMarkers"))
{
markers = this.get("markers");
if(markers)
{
i = 0;
len = markers.length;
for(; i < len; ++i)
{
marker = markers[i];
if(marker)
{
marker.set("visible", visible);
}
}
}
}
},
/**
* @protected
*
* Returns the default hash for the `styles` attribute.
*
* @method _getDefaultStyles
* @return Object
*/
_getDefaultStyles: function()
{
var styles = Y.ComboSeries.superclass._getDefaultStyles();
styles.line = this._getLineDefaults();
styles.marker = this._getPlotDefaults();
styles.area = this._getAreaDefaults();
return styles;
}
},
{
ATTRS: {
/**
* Read-only attribute indicating the type of series.
*
* @attribute type
* @type String
* @default combo
*/
type: {
value:"combo"
},
/**
* Indicates whether a fill is displayed.
*
* @attribute showAreaFill
* @type Boolean
* @default false
*/
showAreaFill: {
value: false
},
/**
* Indicates whether lines are displayed.
*
* @attribute showLines
* @type Boolean
* @default true
*/
showLines: {
value: true
},
/**
* Indicates whether markers are displayed.
*
* @attribute showMarkers
* @type Boolean
* @default true
*/
showMarkers: {
value: true
},
/**
* Reference to the styles of the markers. These styles can also
* be accessed through the `styles` attribute. Below are default
* values:
* <dl>
* <dt>fill</dt><dd>A hash containing the following values:
* <dl>
* <dt>color</dt><dd>Color of the fill. The default value is determined by the order of the series on the
* graph. The color will be retrieved from the below array:<br/>
* `["#6084d0", "#eeb647", "#6c6b5f", "#d6484f", "#ce9ed1", "#ff9f3b", "#93b7ff", "#e0ddd0", "#94ecba", "#309687"]`
* </dd>
* <dt>alpha</dt><dd>Number from 0 to 1 indicating the opacity of the marker fill. The default value is 1.</dd>
* </dl>
* </dd>
* <dt>border</dt><dd>A hash containing the following values:
* <dl>
* <dt>color</dt><dd>Color of the border. The default value is determined by the order of the series on the graph.
* The color will be retrieved from the below array:<br/>
* `["#205096", "#b38206", "#000000", "#94001e", "#9d6fa0", "#e55b00", "#5e85c9", "#adab9e", "#6ac291", "#006457"]`
* <dt>alpha</dt><dd>Number from 0 to 1 indicating the opacity of the marker border. The default value is 1.</dd>
* <dt>weight</dt><dd>Number indicating the width of the border. The default value is 1.</dd>
* </dl>
* </dd>
* <dt>width</dt><dd>indicates the width of the marker. The default value is 10.</dd>
* <dt>height</dt><dd>indicates the height of the marker The default value is 10.</dd>
* <dt>over</dt><dd>hash containing styles for markers when highlighted by a `mouseover` event. The default
* values for each style is null. When an over style is not set, the non-over value will be used. For example,
* the default value for `marker.over.fill.color` is equivalent to `marker.fill.color`.</dd>
* </dl>
*
* @attribute marker
* @type Object
*/
marker: {
lazyAdd: false,
getter: function()
{
return this.get("styles").marker;
},
setter: function(val)
{
this.set("styles", {marker:val});
}
},
/**
* Reference to the styles of the lines. These styles can also be accessed through the `styles` attribute.
* Below are the default values:
* <dl>
* <dt>color</dt><dd>The color of the line. The default value is determined by the order of the series on the graph. The color
* will be retrieved from the following array:
* `["#426ab3", "#d09b2c", "#000000", "#b82837", "#b384b5", "#ff7200", "#779de3", "#cbc8ba", "#7ed7a6", "#007a6c"]`
* <dt>weight</dt><dd>Number that indicates the width of the line. The default value is 6.</dd>
* <dt>alpha</dt><dd>Number between 0 and 1 that indicates the opacity of the line. The default value is 1.</dd>
* <dt>lineType</dt><dd>Indicates whether the line is solid or dashed. The default value is solid.</dd>
* <dt>dashLength</dt><dd>When the `lineType` is dashed, indicates the length of the dash. The default value is 10.</dd>
* <dt>gapSpace</dt><dd>When the `lineType` is dashed, indicates the distance between dashes. The default value is 10.</dd>
* <dt>connectDiscontinuousPoints</dt><dd>Indicates whether or not to connect lines when there is a missing or null value
* between points. The default value is true.</dd>
* <dt>discontinuousType</dt><dd>Indicates whether the line between discontinuous points is solid or dashed. The default
* value is solid.</dd>
* <dt>discontinuousDashLength</dt><dd>When the `discontinuousType` is dashed, indicates the length of the dash. The default
* value is 10.</dd>
* <dt>discontinuousGapSpace</dt><dd>When the `discontinuousType` is dashed, indicates the distance between dashes. The default
* value is 10.</dd>
* </dl>
*
* @attribute line
* @type Object
*/
line: {
lazyAdd: false,
getter: function()
{
return this.get("styles").line;
},
setter: function(val)
{
this.set("styles", {line:val});
}
},
/**
* Reference to the styles of the area fills. These styles can also be accessed through the `styles` attribute.
* Below are the default values:
*
* <dl>
* <dt>color</dt><dd>The color of the fill. The default value is determined by the order of the series on the
* graph. The color will be retrieved from the following array:
* `["#66007f", "#a86f41", "#295454", "#996ab2", "#e8cdb7", "#90bdbd","#000000","#c3b8ca", "#968373", "#678585"]`
* </dd>
* <dt>alpha</dt><dd>Number between 0 and 1 that indicates the opacity of the fill. The default value is 1</dd>
* </dl>
*
* @attribute area
* @type Object
*/
area: {
lazyAdd: false,
getter: function()
{
return this.get("styles").area;
},
setter: function(val)
{
this.set("styles", {area:val});
}
}
/**
* Style properties for the series. Contains a key indexed hash of the following:
* <dl>
* <dt>marker</dt><dd>Style properties for the markers in the series. Specific style attributes are listed
* <a href="#attr_marker">here</a>.</dd>
* <dt>line</dt><dd>Style properties for the lines in the series. Specific
* style attributes are listed <a href="#attr_line">here</a>.</dd>
* <dt>area</dt><dd>Style properties for the area fills in the series. Specific style attributes are listed
* <a href="#attr_area">here</a>.</dd>
* </dl>
*
* @attribute styles
* @type Object
*/
}
});
}, '3.14.1', {"requires": ["series-cartesian", "series-line-util", "series-plot-util", "series-fill-util"]});<|fim▁end|> | drawSeries: function()
{
if(this.get("showAreaFill")) |
<|file_name|>DetachStaticIpResultJsonUnmarshaller.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lightsail.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.lightsail.model.*;<|fim▁hole|>
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DetachStaticIpResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DetachStaticIpResultJsonUnmarshaller implements Unmarshaller<DetachStaticIpResult, JsonUnmarshallerContext> {
public DetachStaticIpResult unmarshall(JsonUnmarshallerContext context) throws Exception {
DetachStaticIpResult detachStaticIpResult = new DetachStaticIpResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return detachStaticIpResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("operations", targetDepth)) {
context.nextToken();
detachStaticIpResult.setOperations(new ListUnmarshaller<Operation>(OperationJsonUnmarshaller.getInstance()).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return detachStaticIpResult;
}
private static DetachStaticIpResultJsonUnmarshaller instance;
public static DetachStaticIpResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DetachStaticIpResultJsonUnmarshaller();
return instance;
}
}<|fim▁end|> | import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*; |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># coding=utf-8
import datetime
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.core.signing import Signer
from djorm_pgfulltext.models import SearchManager
from djorm_pgfulltext.fields import VectorField
from urllib import quote_plus
from django.utils.translation import ugettext_lazy as _
class Category(models.Model):
name = models.CharField(max_length=255)
def __unicode__(self):
return self.name
class Meta:
ordering = ["name"]
verbose_name_plural = _("categories")
class Submission(models.Model):
def user_display_name(self):
return self.voter.user_display_name()
category = models.ForeignKey(Category)
idea = models.TextField(verbose_name=_('Question'))
headline = models.TextField(null=True, blank=True)
followup = models.TextField(null=True, blank=True)
citation = models.URLField(null=True, blank=True, db_index=True,
verbose_name=_("Optional link to full proposal or reference"))
citation_verified = models.BooleanField(default=False, db_index=True)
voter = models.ForeignKey("Voter")
created_at = models.DateTimeField(db_index=True)
ip_address = models.CharField(max_length=255, db_index=True)
editors_pick = models.BooleanField(default=False)<|fim▁hole|> has_duplicates = models.BooleanField(default=False)
duplicate_of = models.ForeignKey('opendebates.Submission', null=True, blank=True,
related_name="duplicates")
votes = models.IntegerField(default=0, db_index=True)
score = models.FloatField(default=0, db_index=True)
rank = models.FloatField(default=0, db_index=True)
random_id = models.FloatField(default=0, db_index=True)
search_index = VectorField()
keywords = models.TextField(null=True, blank=True)
objects = SearchManager(fields=["idea", "keywords"],
auto_update_search_field=True)
source = models.CharField(max_length=255, null=True, blank=True)
def get_recent_votes(self):
timespan = datetime.datetime.now() - datetime.timedelta(1)
return Vote.objects.filter(submission=self, created_at__gte=timespan).count()
def get_duplicates(self):
if not self.has_duplicates:
return None
return Submission.objects.select_related(
"voter", "category", "voter__user").filter(
approved=True, duplicate_of=self)
def __unicode__(self):
return self.idea
@models.permalink
def get_absolute_url(self):
return "vote", [self.id]
def my_tweet_text(self):
return _(u"Vote for my progressive idea for @ThinkBigUS #BigIdeasProject. 30 leaders in Congress will see top ideas!")
def tweet_text(self):
text = _(u"Let's make sure 30 leaders in Congress see this #BigIdea about %(category_name)s - please vote and RT!" % {"category_name": quote_plus(self.category.name)})
if self.voter.twitter_handle:
text += u" h/t @%s" % self.voter.twitter_handle
return text
def facebook_text(self):
if len(self.idea) > 240:
return self.idea[:240] + u'…'
return self.idea
def facebook_url(self):
return u"https://www.facebook.com/sharer/sharer.php?&u=%(idea_url)s" % {
"idea_url": quote_plus(self.really_absolute_url()),
}
def really_absolute_url(self):
return settings.SITE_DOMAIN_WITH_PROTOCOL + self.get_absolute_url()
def email_subject_text(self):
return _("Vote+for+my+Big+Idea!")
def email_body_text(self):
return _("I+posted+an+idea+on+The+Big+Ideas+Project+--+30+members+of+Congress+will+see+the+top+20+ideas!+Please+click+here+to+see+it+and+vote+on+my+idea+--+and+share+it+with+your+friends!")
def email_url(self):
return u"mailto:?subject=%s&body=%s" % (self.email_subject_text(), self.email_body_text(), self.really_absolute_url())
def twitter_url(self):
return u"https://twitter.com/intent/tweet?url=%(SITE_DOMAIN)s%(idea_url)s&text=%(tweet_text)s" % {
"SITE_DOMAIN": quote_plus(settings.SITE_DOMAIN_WITH_PROTOCOL),
"idea_url": quote_plus(self.get_absolute_url()),
"tweet_text": quote_plus(self.tweet_text()),
}
class ZipCode(models.Model):
zip = models.CharField(max_length=10, unique=True)
city = models.CharField(max_length=255, null=True, blank=True)
state = models.CharField(max_length=255, null=True, blank=True)
class Voter(models.Model):
def user_display_name(self):
voter = self
if voter.display_name:
return voter.display_name
if not voter.user:
name = _(u"Somebody")
else:
user = voter.user
name = u"%s" % user.first_name
if user.last_name:
name = u"%s %s." % (name, user.last_name[0])
if not name or not name.strip():
name = _(u"Somebody")
if voter.state:
name = _(u"%(name)s from %(state)s" % {"name": name, "state": voter.state})
return name
email = models.EmailField(unique=True)
zip = models.CharField(max_length=10, db_index=True)
state = models.CharField(max_length=255, null=True, blank=True)
user = models.OneToOneField(User, null=True, blank=True, related_name="voter")
source = models.CharField(max_length=255, null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
display_name = models.CharField(max_length=255, null=True, blank=True)
twitter_handle = models.CharField(max_length=255, null=True, blank=True)
unsubscribed = models.BooleanField(default=False)
def __unicode__(self):
return self.email
def account_token(self):
return Voter.make_account_token(self.email)
@classmethod
def make_account_token(cls, email):
signer = Signer()
value = signer.sign(email)
return value
class Vote(models.Model):
submission = models.ForeignKey(Submission)
voter = models.ForeignKey(Voter)
ip_address = models.CharField(max_length=255, db_index=True)
request_headers = models.TextField(null=True, blank=True)
original_merged_submission = models.ForeignKey(Submission, null=True, blank=True,
related_name="votes_merged_elsewhere")
class Meta:
unique_together = [("submission", "voter")]
created_at = models.DateTimeField(db_index=True)
source = models.CharField(max_length=255, null=True, blank=True)
class Candidate(models.Model):
first_name = models.CharField(max_length=255, null=True, blank=True)
last_name = models.CharField(max_length=255, null=True, blank=True)
current_title = models.CharField(max_length=255, null=True, blank=True)
bio = models.TextField(default='', null=True, blank=True)
website = models.URLField(null=True, blank=True, db_index=True)
facebook = models.URLField(null=True, blank=True, db_index=True)
twitter_handle = models.CharField(max_length=16, null=True, blank=True)
display_name = models.CharField(max_length=255, null=True, blank=True,
help_text=_("Defaults to first_name last_name."))
created_at = models.DateTimeField(auto_now_add=True)
def save(self, *args, **kwargs):
if not self.display_name:
self.display_name = u'{0} {1}'.format(self.first_name, self.last_name)
super(Candidate, self).save(*args, **kwargs)
def __unicode__(self):
return self.display_name
from djangohelpers.lib import register_admin
register_admin(Category)
register_admin(Submission)
register_admin(Voter)
register_admin(Vote)
register_admin(Candidate)<|fim▁end|> | approved = models.BooleanField(default=False) |
<|file_name|>0009_auto_20160828_1131.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-08-28 18:31<|fim▁hole|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('benchmark', '0008_benchmarkdefinition_commit_keyword_updated'),
]
operations = [
migrations.AlterField(
model_name='benchmarkexecutionentry',
name='status',
field=models.IntegerField(choices=[(0, b'Ready'), (1, b'In_Progress'), (2, b'Finished'), (3, b'Finished_With_Errors')], default=0),
),
]<|fim▁end|> | from __future__ import unicode_literals |
<|file_name|>units.rs<|end_file_name|><|fim▁begin|>// Lumol, an extensible molecular simulation engine
// Copyright (C) Lumol's contributors — BSD license
//! This module allow to convert from and to the internal unit system.
//!
//! Internal units are:
//!
//! - Angstrom (A) for distances;
//! - femtosecond (fs) for time;
//! - Unified atomic mass unit (u or Da) for mass;
//! - Kelvin (K) for temperature;
//! - Number of particles for quantity of matter;
//! - radian (rad) for angles;
//!
//! Other units are derived from these primitives units. For examples, the
//! internal unit for energy is 1e-4 kJ/mol.
use std::error::Error;
use std::fmt;
use std::num;
// Using a separated module because lazy_static does not support pub(crate)
mod detail {
use consts::{BOHR_RADIUS, NA};
use std::collections::BTreeMap;
use std::f64::consts::PI;
// Atomic mass unit in kg
const U_IN_KG: f64 = 1.660538782e-27;
/// Get the conversion factors from a string unit to the internal units.
lazy_static!{
pub static ref FACTORS: BTreeMap<&'static str, f64> = {
let mut map = BTreeMap::new();
// Distances units.
assert!(map.insert("A", 1.0).is_none());
assert!(map.insert("nm", 10.0).is_none());
assert!(map.insert("pm", 1e-2).is_none());
assert!(map.insert("fm", 1e-5).is_none());
assert!(map.insert("m", 1e10).is_none());
assert!(map.insert("bohr", BOHR_RADIUS).is_none());
// Time units.
assert!(map.insert("fs", 1.0).is_none());
assert!(map.insert("ps", 1e3).is_none());
assert!(map.insert("ns", 1e6).is_none());
// Mass units.
assert!(map.insert("u", 1.0).is_none());
assert!(map.insert("Da", 1.0).is_none());
assert!(map.insert("kDa", 1.0).is_none());
assert!(map.insert("g", 1e-3 / U_IN_KG).is_none());
assert!(map.insert("kg", 1.0 / U_IN_KG).is_none());
// Temperature units.
assert!(map.insert("K", 1.0).is_none());
// Quantity of matter units.
assert!(map.insert("mol", NA).is_none());
// Angle units.
assert!(map.insert("rad", 1.0).is_none());
assert!(map.insert("deg", PI / 180.0).is_none());
// Energy units.
assert!(map.insert("J", 1e-10 / U_IN_KG).is_none());
assert!(map.insert("kJ", 1e-7 / U_IN_KG).is_none());
assert!(map.insert("kcal", 4.184 * 1e-7 / U_IN_KG).is_none());
assert!(map.insert("eV", 1.60217653e-19 * 1e-10 / U_IN_KG).is_none());
assert!(map.insert("H", 4.35974417e-18 * 1e-10 / U_IN_KG).is_none());
assert!(map.insert("Ry", 4.35974417e-18 / 2.0 * 1e-10 / U_IN_KG).is_none());
// Force unit.
assert!(map.insert("N", 1e-20 / U_IN_KG).is_none());
// Pressure units.
assert!(map.insert("Pa", 1e-40 / U_IN_KG).is_none());
assert!(map.insert("kPa", 1e-37 / U_IN_KG).is_none());
assert!(map.insert("MPa", 1e-34 / U_IN_KG).is_none());
assert!(map.insert("bar", 1e-35 / U_IN_KG).is_none());
assert!(map.insert("atm", 101325.0 * 1e-40 / U_IN_KG).is_none());
return map;
};
}
}
pub(crate) use self::detail::FACTORS;
/// Possible error causes when parsing an unit string.
#[derive(Debug)]
pub enum ParseError {
/// Error while parsing a power in `x^y` expressions
Power(num::ParseIntError),
/// Error while parsing the value part of an unit string
Value(num::ParseFloatError),
/// Parentheses are not balanced in this unit
ParenthesesMismatch,
/// This unit was not found
NotFound {
/// The unit that created this error
unit: String,
},
/// Any other error
MalformedExpr(String),
}
impl From<num::ParseIntError> for ParseError {
fn from(err: num::ParseIntError) -> ParseError {
ParseError::Power(err)
}
}
impl From<num::ParseFloatError> for ParseError {
fn from(err: num::ParseFloatError) -> ParseError {
ParseError::Value(err)
}
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ParseError::Power(ref err) => err.fmt(f),
ParseError::Value(ref err) => err.fmt(f),
ParseError::ParenthesesMismatch => write!(f, "Parentheses are not equilibrated."),
ParseError::NotFound { ref unit } => write!(f, "Unit '{}' not found.", unit),
ParseError::MalformedExpr(ref err) => write!(f, "Malformed expression: {}", err),
}
}
}
impl Error for ParseError {
fn description(&self) -> &str {
match *self {
ParseError::Power(ref err) => err.description(),
ParseError::Value(ref err) => err.description(),
ParseError::ParenthesesMismatch => "Parentheses are not equilibrated.",
ParseError::NotFound { .. } => "Unit not found.",
ParseError::MalformedExpr(..) => "Malformed expression",
}
}
}
/// Possible tokens in unit strings
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
enum Token {
/// Left parentheses
LParen,
/// Right parentheses
RParen,
/// '*' token
Mul,
/// '/' token
Div,
/// '^' token
Pow,
/// Any other whitespaces separated value
Value(String),
}
impl Token {
/// What is the precedence of a specific token
fn precedence(&self) -> usize {
match *self {
Token::LParen | Token::RParen => 0,
Token::Div | Token::Mul => 10,
Token::Pow => 20,
Token::Value(..) => internal_error!("invalid call to UnitTok::precedence for values"),
}
}
/// Get the string used to build this token in tokenize
fn as_str(&self) -> &str {
match *self {
Token::LParen => "(",
Token::RParen => ")",
Token::Div => "/",
Token::Mul => "*",
Token::Pow => "^",
Token::Value(ref value) => value,
}
}
}
/// Transform a string to a stream of tokens
fn tokenize(unit: &str) -> Vec<Token> {
let mut tokens = Vec::new();
let mut token = String::new();
for c in unit.chars() {
match c {
'*' | '/' | '^' | '(' | ')' => {
if !token.is_empty() {
tokens.push(Token::Value(token.clone()));
token.clear();
}
match c {
'*' => tokens.push(Token::Mul),
'/' => tokens.push(Token::Div),
'^' => tokens.push(Token::Pow),
'(' => tokens.push(Token::LParen),
')' => tokens.push(Token::RParen),
_ => internal_error!("invalid unit operator"),
}
}
other if !other.is_whitespace() => {
token.push(other);
}
_ => assert!(c.is_whitespace()),
}
}
// Last token
if !token.is_empty() {
tokens.push(Token::Value(token));
}
return tokens;
}
static MISSING_OPERATOR: &'static str = "Oops, sorry explorator, but you felt \
in a space-time hole. We are missing an operator here";
/// Create the AST for unit expression using the Shunting-Yard algorithm.<|fim▁hole|>/// See /// https://en.wikipedia.org/wiki/Shunting-yard_algorithm for a
/// description of the algorithm.
#[allow(trivial_casts)]
fn shunting_yard(tokens: Vec<Token>) -> Result<Vec<Token>, ParseError> {
let mut operators = Vec::new();
let mut output = Vec::new();
for token in tokens {
match token {
Token::Value(..) => output.push(token),
Token::Mul | Token::Div | Token::Pow => {
while !operators.is_empty() {
// The cast is useless here, but rustc can't figure out
// the type of the expression after the call to `expect`
let top_operator =
(operators.last().expect(MISSING_OPERATOR) as &Token).clone();
// All the operators are left-associative
if token.precedence() <= top_operator.precedence() {
output.push(operators.pop().expect(MISSING_OPERATOR));
} else {
break;
}
}
operators.push(token);
}
Token::LParen => operators.push(token),
Token::RParen => {
while !operators.is_empty() && operators.last() != Some(&Token::LParen) {
output.push(operators.pop().expect(MISSING_OPERATOR))
}
if operators.is_empty() || operators.last() != Some(&Token::LParen) {
return Err(ParseError::ParenthesesMismatch);
} else {
let _ = operators.pop();
}
}
}
}
while !operators.is_empty() {
match *operators.last().expect(MISSING_OPERATOR) {
Token::LParen | Token::RParen => return Err(ParseError::ParenthesesMismatch),
_ => output.push(operators.pop().expect(MISSING_OPERATOR)),
}
}
return Ok(output);
}
/// Possible members in unit expressions
#[derive(Debug, PartialEq)]
enum UnitExpr {
/// A single value
Val(f64),
/// Multiplication of left-hand side by right-hand side
Mul(Box<UnitExpr>, Box<UnitExpr>),
/// Division of left-hand side by right-hand side
Div(Box<UnitExpr>, Box<UnitExpr>),
/// Take the power of the expr by the `i32` value
Pow(Box<UnitExpr>, i32),
}
impl UnitExpr {
/// Recursively evaluate an unit expression
fn eval(&self) -> f64 {
match *self {
UnitExpr::Val(v) => v,
UnitExpr::Mul(ref lhs, ref rhs) => lhs.eval() * rhs.eval(),
UnitExpr::Div(ref lhs, ref rhs) => lhs.eval() / rhs.eval(),
UnitExpr::Pow(ref expr, pow) => expr.eval().powi(pow),
}
}
/// Parse a string, and generate the corresponding unit expression
fn parse(unit: &str) -> Result<UnitExpr, ParseError> {
let tokens = tokenize(unit);
let mut stream = try!(shunting_yard(tokens));
let ast = try!(read_expr(&mut stream));
if stream.is_empty() {
Ok(ast)
} else {
let remaining = stream.iter().map(|t| t.as_str()).collect::<Vec<_>>().join(" ");
return Err(ParseError::MalformedExpr(
format!("remaining values after the end of the unit: {}", remaining),
));
}
}
}
/// Read and pop (recursively) a single expression from the `stream`.
/// The `stream` must be in reverse polish notation.
fn read_expr(stream: &mut Vec<Token>) -> Result<UnitExpr, ParseError> {
if let Some(token) = stream.pop() {
match token {
Token::Value(unit) => {
match FACTORS.get(&*unit) {
Some(&value) => Ok(UnitExpr::Val(value)),
None => Err(ParseError::NotFound { unit: unit }),
}
}
Token::Mul => {
let rhs = try!(read_expr(stream).map_err(|err| {
ParseError::MalformedExpr(format!("Error in unit at the right of '*': {}", err))
}));
let lhs = try!(read_expr(stream).map_err(|err| {
ParseError::MalformedExpr(format!("Error in unit at the left of '*': {}", err))
}));
Ok(UnitExpr::Mul(Box::new(lhs), Box::new(rhs)))
}
Token::Div => {
let rhs = try!(read_expr(stream).map_err(|err| {
ParseError::MalformedExpr(format!("Error in unit at the right of '/': {}", err))
}));
let lhs = try!(read_expr(stream).map_err(|err| {
ParseError::MalformedExpr(format!("Error in unit at the left of '/': {}", err))
}));
Ok(UnitExpr::Div(Box::new(lhs), Box::new(rhs)))
}
Token::Pow => {
let pow = match stream.pop() {
Some(pow) => {
match pow {
Token::Value(value) => try!(value.parse()),
_ => {
return Err(ParseError::MalformedExpr(
format!("Invalid value after ^: {}", pow.as_str()),
))
}
}
}
None => {
return Err(
ParseError::MalformedExpr(String::from("Missing value after '^'")),
)
}
};
let expr = try!(read_expr(stream).map_err(|err| {
ParseError::MalformedExpr(format!("Error in unit at the left of '*': {}", err))
}));
Ok(UnitExpr::Pow(Box::new(expr), pow))
}
Token::LParen | Token::RParen => {
internal_error!("there should not be any parenthese here")
}
}
} else {
Err(ParseError::MalformedExpr(String::from("missing a value")))
}
}
/// Convert the numeric value `val` from the unit `unit` to the internal unit.
///
/// ```
/// use lumol_core::units;
/// let internal = units::from(10.0, "A").unwrap();
/// assert!(internal == 10.0);
/// ```
pub fn from(value: f64, unit: &str) -> Result<f64, ParseError> {
let unit = try!(UnitExpr::parse(unit));
return Ok(unit.eval() * value);
}
/// Parse the string `val` and convert it to the corresponding internal unit
///
/// ```
/// use lumol_core::units;
/// let internal = units::from_str("10 A").unwrap();
/// assert!(internal == 10.0);
/// ```
pub fn from_str(value: &str) -> Result<f64, ParseError> {
let unit = value.split_whitespace().skip(1).collect::<Vec<&str>>().join(" ");
let unit = if unit.is_empty() {
UnitExpr::Val(1.0)
} else {
try!(UnitExpr::parse(&unit))
};
let value = value.split_whitespace().take(1).collect::<Vec<&str>>()[0];
let value = try!(value.parse::<f64>());
return Ok(unit.eval() * value);
}
/// Convert the numeric value `val` (in internal units) to the unit `unit`.
///
/// ```
/// use lumol_core::units;
/// let real = units::to(10.0, "A").unwrap();
/// assert!(real == 10.0);
/// ```
pub fn to(value: f64, unit: &str) -> Result<f64, ParseError> {
let unit = try!(UnitExpr::parse(unit));
return Ok(value / unit.eval());
}
#[cfg(test)]
mod test {
use super::*;
use super::{Token, UnitExpr};
use super::{shunting_yard, tokenize};
#[test]
fn tokens() {
assert_eq!(tokenize("(")[0], Token::LParen);
assert_eq!(tokenize(")")[0], Token::RParen);
assert_eq!(tokenize("*")[0], Token::Mul);
assert_eq!(tokenize("/")[0], Token::Div);
assert_eq!(tokenize("^")[0], Token::Pow);
assert_eq!(tokenize("foo")[0], Token::Value(String::from("foo")));
assert_eq!(tokenize("45")[0], Token::Value(String::from("45")));
assert_eq!(tokenize("(bar/m").len(), 4);
assert_eq!(tokenize(" ( bar\t/\n m").len(), 4);
}
fn ast_str(unit: &str) -> Result<String, ParseError> {
let tokens = tokenize(unit);
let ast = try!(shunting_yard(tokens));
return Ok(ast.iter().map(|t| t.as_str()).collect::<Vec<_>>().join(" "));
}
#[test]
fn ast() {
assert_eq!(ast_str("").unwrap(), "");
assert_eq!(ast_str("()").unwrap(), "");
assert_eq!(ast_str("foo").unwrap(), "foo");
assert_eq!(ast_str("foo*bar").unwrap(), "foo bar *");
assert_eq!(ast_str("foo / bar").unwrap(), "foo bar /");
assert_eq!(ast_str("foo^4").unwrap(), "foo 4 ^");
assert_eq!(ast_str("bar/foo ^ 4").unwrap(), "bar foo 4 ^ /");
assert_eq!(ast_str("k*bar /foo^ 4").unwrap(), "k bar * foo 4 ^ /");
}
#[test]
fn ast_errors() {
assert!(ast_str("(").is_err());
assert!(ast_str(")").is_err());
assert!(ast_str("(bar/m").is_err());
assert!(ast_str("m/K)").is_err());
}
#[test]
fn eval() {
assert_eq!(UnitExpr::parse("A").unwrap(), UnitExpr::Val(1.0));
assert_eq!(UnitExpr::parse("nm").unwrap(), UnitExpr::Val(10.0));
assert_eq!(UnitExpr::parse("bohr/fs").unwrap().eval(), 0.52917720859);
assert_eq!(UnitExpr::parse("(Ry / rad^-3 )").unwrap().eval(), 0.13127498789124938);
assert_eq!(UnitExpr::parse("bar/(m * fs^2)").unwrap().eval(), 6.0221417942167636e-19);
assert_eq!(UnitExpr::parse("kJ/mol/deg^2").unwrap().eval(), 0.3282806352310398);
assert_ulps_eq!(UnitExpr::parse("kcal/mol/A^2").unwrap().eval(), 4.184e-4, epsilon = 1e-9);
}
#[test]
fn parsing_errrors() {
assert!(UnitExpr::parse("m^4-8").is_err());
assert!(UnitExpr::parse("foo ^ bar").is_err());
assert!(UnitExpr::parse("m^z4").is_err());
assert!(UnitExpr::parse("HJK").is_err());
}
#[test]
fn unit_from_str() {
assert_eq!(from_str("10.0 A").unwrap(), 10.0);
assert_eq!(from_str("10 A").unwrap(), 10.0);
assert_eq!(from_str("1e1 A").unwrap(), 10.0);
assert_eq!(from_str("10").unwrap(), 10.0);
assert!(from_str("10a.0 bar").is_err());
assert!(from_str("h10").is_err());
}
#[test]
fn unit_to() {
assert_eq!(to(25.0, "m").unwrap(), 2.5e-9);
assert_eq!(to(25.0, "bar").unwrap(), 4.1513469550000005e9);
assert_eq!(to(25.0, "kJ/mol").unwrap(), 249999.99982494753);
}
}<|fim▁end|> | /// |
<|file_name|>ng_module_integration_spec.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ANALYZE_FOR_ENTRY_COMPONENTS, CUSTOM_ELEMENTS_SCHEMA, Compiler, Component, ComponentFactoryResolver, Directive, HostBinding, Inject, Injectable, InjectionToken, Injector, Input, NgModule, NgModuleRef, Optional, Pipe, Provider, Self, Type, forwardRef, getModuleFactory} from '@angular/core';
import {Console} from '@angular/core/src/console';
import {ComponentFixture, TestBed, inject} from '@angular/core/testing';
import {expect} from '@angular/platform-browser/testing/src/matchers';
import {InternalNgModuleRef} from '../../src/linker/ng_module_factory';
import {clearModulesForTest} from '../../src/linker/ng_module_factory_loader';
import {stringify} from '../../src/util';
class Engine {}
class BrokenEngine {
constructor() { throw new Error('Broken Engine'); }
}
class DashboardSoftware {}
@Injectable()
class Dashboard {
constructor(software: DashboardSoftware) {}
}
class TurboEngine extends Engine {}
const CARS = new InjectionToken<Car[]>('Cars');
@Injectable()
class Car {
engine: Engine;
constructor(engine: Engine) { this.engine = engine; }
}
@Injectable()
class CarWithOptionalEngine {
engine: Engine;
constructor(@Optional() engine: Engine) { this.engine = engine; }
}
@Injectable()
class CarWithDashboard {
engine: Engine;
dashboard: Dashboard;
constructor(engine: Engine, dashboard: Dashboard) {
this.engine = engine;
this.dashboard = dashboard;
}
}
@Injectable()
class SportsCar extends Car {
engine: Engine;
constructor(engine: Engine) { super(engine); }
}
@Injectable()
class CarWithInject {
engine: Engine;
constructor(@Inject(TurboEngine) engine: Engine) { this.engine = engine; }
}
@Injectable()
class CyclicEngine {
constructor(car: Car) {}
}
class NoAnnotations {
constructor(secretDependency: any) {}
}
function factoryFn(a: any) {}
@Component({selector: 'comp', template: ''})
class SomeComp {
}
@Directive({selector: '[someDir]'})
class SomeDirective {
@HostBinding('title') @Input()
someDir: string;
}
@Pipe({name: 'somePipe'})
class SomePipe {
transform(value: string): any { return `transformed ${value}`; }
}
@Component({selector: 'comp', template: `<div [someDir]="'someValue' | somePipe"></div>`})
class CompUsingModuleDirectiveAndPipe {
}
class DummyConsole implements Console {
public warnings: string[] = [];
log(message: string) {}
warn(message: string) { this.warnings.push(message); }
}
export function main() {
describe('jit', () => { declareTests({useJit: true}); });
describe('no jit', () => { declareTests({useJit: false}); });
}
function declareTests({useJit}: {useJit: boolean}) {
describe('NgModule', () => {
let compiler: Compiler;
let injector: Injector;
let console: DummyConsole;
beforeEach(() => {
console = new DummyConsole();
TestBed.configureCompiler(
{useJit: useJit, providers: [{provide: Console, useValue: console}]});
});
beforeEach(inject([Compiler, Injector], (_compiler: Compiler, _injector: Injector) => {
compiler = _compiler;
injector = _injector;
}));
function createModule<T>(
moduleType: Type<T>, parentInjector?: Injector | null): NgModuleRef<T> {
return compiler.compileModuleSync(moduleType).create(parentInjector || null);
}
function createComp<T>(compType: Type<T>, moduleType: Type<any>): ComponentFixture<T> {
const ngModule = createModule(moduleType, injector);
const cf = ngModule.componentFactoryResolver.resolveComponentFactory(compType) !;
const comp = cf.create(Injector.NULL);
return new ComponentFixture(comp, null !, false);
}
describe('errors', () => {
it('should error when exporting a directive that was neither declared nor imported', () => {
@NgModule({exports: [SomeDirective]})
class SomeModule {
}
expect(() => createModule(SomeModule))
.toThrowError(
`Can't export directive ${stringify(SomeDirective)} from ${stringify(SomeModule)} as it was neither declared nor imported!`);
});
it('should error when exporting a pipe that was neither declared nor imported', () => {
@NgModule({exports: [SomePipe]})
class SomeModule {
}
expect(() => createModule(SomeModule))
.toThrowError(
`Can't export pipe ${stringify(SomePipe)} from ${stringify(SomeModule)} as it was neither declared nor imported!`);
});
it('should error if a directive is declared in more than 1 module', () => {
@NgModule({declarations: [SomeDirective]})
class Module1 {
}
@NgModule({declarations: [SomeDirective]})
class Module2 {
}
createModule(Module1);
expect(() => createModule(Module2))
.toThrowError(
`Type ${stringify(SomeDirective)} is part of the declarations of 2 modules: ${stringify(Module1)} and ${stringify(Module2)}! ` +
`Please consider moving ${stringify(SomeDirective)} to a higher module that imports ${stringify(Module1)} and ${stringify(Module2)}. ` +
`You can also create a new NgModule that exports and includes ${stringify(SomeDirective)} then import that NgModule in ${stringify(Module1)} and ${stringify(Module2)}.`);
});
it('should error if a directive is declared in more than 1 module also if the module declaring it is imported',
() => {
@NgModule({declarations: [SomeDirective], exports: [SomeDirective]})
class Module1 {
}
@NgModule({declarations: [SomeDirective], imports: [Module1]})
class Module2 {
}
expect(() => createModule(Module2))
.toThrowError(
`Type ${stringify(SomeDirective)} is part of the declarations of 2 modules: ${stringify(Module1)} and ${stringify(Module2)}! ` +
`Please consider moving ${stringify(SomeDirective)} to a higher module that imports ${stringify(Module1)} and ${stringify(Module2)}. ` +
`You can also create a new NgModule that exports and includes ${stringify(SomeDirective)} then import that NgModule in ${stringify(Module1)} and ${stringify(Module2)}.`);
});
it('should error if a pipe is declared in more than 1 module', () => {
@NgModule({declarations: [SomePipe]})
class Module1 {
}
@NgModule({declarations: [SomePipe]})
class Module2 {
}
createModule(Module1);
expect(() => createModule(Module2))
.toThrowError(
`Type ${stringify(SomePipe)} is part of the declarations of 2 modules: ${stringify(Module1)} and ${stringify(Module2)}! ` +
`Please consider moving ${stringify(SomePipe)} to a higher module that imports ${stringify(Module1)} and ${stringify(Module2)}. ` +
`You can also create a new NgModule that exports and includes ${stringify(SomePipe)} then import that NgModule in ${stringify(Module1)} and ${stringify(Module2)}.`);
});
it('should error if a pipe is declared in more than 1 module also if the module declaring it is imported',
() => {
@NgModule({declarations: [SomePipe], exports: [SomePipe]})
class Module1 {
}
@NgModule({declarations: [SomePipe], imports: [Module1]})
class Module2 {
}
expect(() => createModule(Module2))
.toThrowError(
`Type ${stringify(SomePipe)} is part of the declarations of 2 modules: ${stringify(Module1)} and ${stringify(Module2)}! ` +
`Please consider moving ${stringify(SomePipe)} to a higher module that imports ${stringify(Module1)} and ${stringify(Module2)}. ` +
`You can also create a new NgModule that exports and includes ${stringify(SomePipe)} then import that NgModule in ${stringify(Module1)} and ${stringify(Module2)}.`);
});
});
describe('schemas', () => {
it('should error on unknown bound properties on custom elements by default', () => {
@Component({template: '<some-element [someUnknownProp]="true"></some-element>'})
class ComponentUsingInvalidProperty {
}
@NgModule({declarations: [ComponentUsingInvalidProperty]})
class SomeModule {
}
expect(() => createModule(SomeModule)).toThrowError(/Can't bind to 'someUnknownProp'/);
});
it('should not error on unknown bound properties on custom elements when using the CUSTOM_ELEMENTS_SCHEMA',
() => {
@Component({template: '<some-element [someUnknownProp]="true"></some-element>'})
class ComponentUsingInvalidProperty {
}
@NgModule(
{schemas: [CUSTOM_ELEMENTS_SCHEMA], declarations: [ComponentUsingInvalidProperty]})
class SomeModule {
}
expect(() => createModule(SomeModule)).not.toThrow();
});
});
describe('id', () => {
const token = 'myid';
@NgModule({id: token})
class SomeModule {
}
@NgModule({id: token})
class SomeOtherModule {
}
afterEach(() => clearModulesForTest());
it('should register loaded modules', () => {
createModule(SomeModule);
const factory = getModuleFactory(token);
expect(factory).toBeTruthy();
expect(factory.moduleType).toBe(SomeModule);
});
it('should throw when registering a duplicate module', () => {
createModule(SomeModule);
expect(() => createModule(SomeOtherModule)).toThrowError(/Duplicate module registered/);
});
});
describe('entryComponents', () => {
it('should create ComponentFactories in root modules', () => {
@NgModule({declarations: [SomeComp], entryComponents: [SomeComp]})
class SomeModule {
}
const ngModule = createModule(SomeModule);
expect(ngModule.componentFactoryResolver.resolveComponentFactory(SomeComp) !.componentType)
.toBe(SomeComp);
expect(ngModule.injector.get(ComponentFactoryResolver)
.resolveComponentFactory(SomeComp)
.componentType)
.toBe(SomeComp);
});
it('should throw if we cannot find a module associated with a module-level entryComponent', () => {
@Component({template: ''})
class SomeCompWithEntryComponents {
}
@NgModule({declarations: [], entryComponents: [SomeCompWithEntryComponents]})
class SomeModule {
}
expect(() => createModule(SomeModule))
.toThrowError(
'Component SomeCompWithEntryComponents is not part of any NgModule or the module has not been imported into your module.');
});
it('should throw if we cannot find a module associated with a component-level entryComponent',
() => {
@Component({template: '', entryComponents: [SomeComp]})
class SomeCompWithEntryComponents {
}
@NgModule({declarations: [SomeCompWithEntryComponents]})
class SomeModule {
}
expect(() => createModule(SomeModule))
.toThrowError(
'Component SomeComp is not part of any NgModule or the module has not been imported into your module.');
});
it('should create ComponentFactories via ANALYZE_FOR_ENTRY_COMPONENTS', () => {
@NgModule({
declarations: [SomeComp],
providers: [{
provide: ANALYZE_FOR_ENTRY_COMPONENTS,
multi: true,
useValue: [{a: 'b', component: SomeComp}]
}]
})
class SomeModule {
}
const ngModule = createModule(SomeModule);
expect(ngModule.componentFactoryResolver.resolveComponentFactory(SomeComp) !.componentType)
.toBe(SomeComp);
expect(ngModule.injector.get(ComponentFactoryResolver)
.resolveComponentFactory(SomeComp)
.componentType)
.toBe(SomeComp);
});
it('should create ComponentFactories in imported modules', () => {
@NgModule({declarations: [SomeComp], entryComponents: [SomeComp]})
class SomeImportedModule {
}
@NgModule({imports: [SomeImportedModule]})
class SomeModule {
}
const ngModule = createModule(SomeModule);
expect(ngModule.componentFactoryResolver.resolveComponentFactory(SomeComp) !.componentType)
.toBe(SomeComp);
expect(ngModule.injector.get(ComponentFactoryResolver)
.resolveComponentFactory(SomeComp)
.componentType)
.toBe(SomeComp);
});
it('should create ComponentFactories if the component was imported', () => {
@NgModule({declarations: [SomeComp], exports: [SomeComp]})
class SomeImportedModule {
}
@NgModule({imports: [SomeImportedModule], entryComponents: [SomeComp]})
class SomeModule {
}
const ngModule = createModule(SomeModule);
expect(ngModule.componentFactoryResolver.resolveComponentFactory(SomeComp) !.componentType)
.toBe(SomeComp);
expect(ngModule.injector.get(ComponentFactoryResolver)
.resolveComponentFactory(SomeComp)
.componentType)
.toBe(SomeComp);
});
});
describe('bootstrap components', () => {
it('should create ComponentFactories', () => {
@NgModule({declarations: [SomeComp], bootstrap: [SomeComp]})
class SomeModule {
}
const ngModule = createModule(SomeModule);
expect(ngModule.componentFactoryResolver.resolveComponentFactory(SomeComp) !.componentType)
.toBe(SomeComp);
});
it('should store the ComponentFactories in the NgModuleInjector', () => {
@NgModule({declarations: [SomeComp], bootstrap: [SomeComp]})
class SomeModule {
}
const ngModule = <InternalNgModuleRef<any>>createModule(SomeModule);
expect(ngModule._bootstrapComponents.length).toBe(1);
expect(ngModule._bootstrapComponents[0]).toBe(SomeComp);
});
});
describe('directives and pipes', () => {
describe('declarations', () => {
it('should be supported in root modules', () => {
@NgModule({
declarations: [CompUsingModuleDirectiveAndPipe, SomeDirective, SomePipe],
entryComponents: [CompUsingModuleDirectiveAndPipe]
})
class SomeModule {
}
const compFixture = createComp(CompUsingModuleDirectiveAndPipe, SomeModule);
compFixture.detectChanges();
expect(compFixture.debugElement.children[0].properties['title'])
.toBe('transformed someValue');
});
it('should be supported in imported modules', () => {
@NgModule({
declarations: [CompUsingModuleDirectiveAndPipe, SomeDirective, SomePipe],
entryComponents: [CompUsingModuleDirectiveAndPipe]
})
class SomeImportedModule {
}
@NgModule({imports: [SomeImportedModule]})
class SomeModule {
}
const compFixture = createComp(CompUsingModuleDirectiveAndPipe, SomeModule);
compFixture.detectChanges();
expect(compFixture.debugElement.children[0].properties['title'])
.toBe('transformed someValue');
});
it('should be supported in nested components', () => {
@Component({
selector: 'parent',
template: '<comp></comp>',
})
class ParentCompUsingModuleDirectiveAndPipe {
}
@NgModule({
declarations: [
ParentCompUsingModuleDirectiveAndPipe, CompUsingModuleDirectiveAndPipe, SomeDirective,
SomePipe
],
entryComponents: [ParentCompUsingModuleDirectiveAndPipe]
})
class SomeModule {
}
const compFixture = createComp(ParentCompUsingModuleDirectiveAndPipe, SomeModule);
compFixture.detectChanges();
expect(compFixture.debugElement.children[0].children[0].properties['title'])
.toBe('transformed someValue');
});
});
describe('import/export', () => {
it('should support exported directives and pipes', () => {
@NgModule({declarations: [SomeDirective, SomePipe], exports: [SomeDirective, SomePipe]})
class SomeImportedModule {
}
@NgModule({
declarations: [CompUsingModuleDirectiveAndPipe],
imports: [SomeImportedModule],
entryComponents: [CompUsingModuleDirectiveAndPipe]
})
class SomeModule {
}
const compFixture = createComp(CompUsingModuleDirectiveAndPipe, SomeModule);
compFixture.detectChanges();
expect(compFixture.debugElement.children[0].properties['title'])
.toBe('transformed someValue');
});
it('should support exported directives and pipes if the module is wrapped into an `ModuleWithProviders`',
() => {
@NgModule(
{declarations: [SomeDirective, SomePipe], exports: [SomeDirective, SomePipe]})
class SomeImportedModule {
}
@NgModule({
declarations: [CompUsingModuleDirectiveAndPipe],
imports: [{ngModule: SomeImportedModule}],
entryComponents: [CompUsingModuleDirectiveAndPipe]
})
class SomeModule {
}
const compFixture = createComp(CompUsingModuleDirectiveAndPipe, SomeModule);
compFixture.detectChanges();
expect(compFixture.debugElement.children[0].properties['title'])
.toBe('transformed someValue');
});
it('should support reexported modules', () => {
@NgModule({declarations: [SomeDirective, SomePipe], exports: [SomeDirective, SomePipe]})
class SomeReexportedModule {
}
@NgModule({exports: [SomeReexportedModule]})
class SomeImportedModule {
}
@NgModule({
declarations: [CompUsingModuleDirectiveAndPipe],
imports: [SomeImportedModule],
entryComponents: [CompUsingModuleDirectiveAndPipe]
})
class SomeModule {
}
const compFixture = createComp(CompUsingModuleDirectiveAndPipe, SomeModule);
compFixture.detectChanges();
expect(compFixture.debugElement.children[0].properties['title'])
.toBe('transformed someValue');
});
it('should support exporting individual directives of an imported module', () => {
@NgModule({declarations: [SomeDirective, SomePipe], exports: [SomeDirective, SomePipe]})
class SomeReexportedModule {
}
@NgModule({imports: [SomeReexportedModule], exports: [SomeDirective, SomePipe]})
class SomeImportedModule {
}
@NgModule({
declarations: [CompUsingModuleDirectiveAndPipe],
imports: [SomeImportedModule],
entryComponents: [CompUsingModuleDirectiveAndPipe]
})
class SomeModule {
}
const compFixture = createComp(CompUsingModuleDirectiveAndPipe, SomeModule);
compFixture.detectChanges();
expect(compFixture.debugElement.children[0].properties['title'])
.toBe('transformed someValue');
});
it('should not use non exported pipes of an imported module', () => {
@NgModule({
declarations: [SomePipe],
})
class SomeImportedModule {
}
@NgModule({
declarations: [CompUsingModuleDirectiveAndPipe],
imports: [SomeImportedModule],
entryComponents: [CompUsingModuleDirectiveAndPipe]
})
class SomeModule {
}
expect(() => createComp(SomeComp, SomeModule))
.toThrowError(/The pipe 'somePipe' could not be found/);
});
it('should not use non exported directives of an imported module', () => {
@NgModule({
declarations: [SomeDirective],
})
class SomeImportedModule {
}
@NgModule({
declarations: [CompUsingModuleDirectiveAndPipe, SomePipe],
imports: [SomeImportedModule],
entryComponents: [CompUsingModuleDirectiveAndPipe]
})
class SomeModule {
}
expect(() => createComp(SomeComp, SomeModule)).toThrowError(/Can't bind to 'someDir'/);
});
});
});
describe('providers', function() {
let moduleType: any = null;
function createInjector(providers: Provider[], parent?: Injector | null): Injector {
@NgModule({providers: providers})
class SomeModule {
}
moduleType = SomeModule;
return createModule(SomeModule, parent).injector;
}
<|fim▁hole|> () => { expect(createInjector([]).get(moduleType)).toBeAnInstanceOf(moduleType); });
it('should instantiate a class without dependencies', () => {
const injector = createInjector([Engine]);
const engine = injector.get(Engine);
expect(engine).toBeAnInstanceOf(Engine);
});
it('should resolve dependencies based on type information', () => {
const injector = createInjector([Engine, Car]);
const car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should resolve dependencies based on @Inject annotation', () => {
const injector = createInjector([TurboEngine, Engine, CarWithInject]);
const car = injector.get(CarWithInject);
expect(car).toBeAnInstanceOf(CarWithInject);
expect(car.engine).toBeAnInstanceOf(TurboEngine);
});
it('should throw when no type and not @Inject (class case)', () => {
expect(() => createInjector([NoAnnotations]))
.toThrowError('Can\'t resolve all parameters for NoAnnotations: (?).');
});
it('should throw when no type and not @Inject (factory case)', () => {
expect(() => createInjector([{provide: 'someToken', useFactory: factoryFn}]))
.toThrowError('Can\'t resolve all parameters for factoryFn: (?).');
});
it('should cache instances', () => {
const injector = createInjector([Engine]);
const e1 = injector.get(Engine);
const e2 = injector.get(Engine);
expect(e1).toBe(e2);
});
it('should provide to a value', () => {
const injector = createInjector([{provide: Engine, useValue: 'fake engine'}]);
const engine = injector.get(Engine);
expect(engine).toEqual('fake engine');
});
it('should provide to a factory', () => {
function sportsCarFactory(e: Engine) { return new SportsCar(e); }
const injector =
createInjector([Engine, {provide: Car, useFactory: sportsCarFactory, deps: [Engine]}]);
const car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should supporting provider to null', () => {
const injector = createInjector([{provide: Engine, useValue: null}]);
const engine = injector.get(Engine);
expect(engine).toBeNull();
});
it('should provide to an alias', () => {
const injector = createInjector([
Engine, {provide: SportsCar, useClass: SportsCar},
{provide: Car, useExisting: SportsCar}
]);
const car = injector.get(Car);
const sportsCar = injector.get(SportsCar);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car).toBe(sportsCar);
});
it('should support multiProviders', () => {
const injector = createInjector([
Engine, {provide: CARS, useClass: SportsCar, multi: true},
{provide: CARS, useClass: CarWithOptionalEngine, multi: true}
]);
const cars = injector.get(CARS);
expect(cars.length).toEqual(2);
expect(cars[0]).toBeAnInstanceOf(SportsCar);
expect(cars[1]).toBeAnInstanceOf(CarWithOptionalEngine);
});
it('should support multiProviders that are created using useExisting', () => {
const injector = createInjector(
[Engine, SportsCar, {provide: CARS, useExisting: SportsCar, multi: true}]);
const cars = injector.get(CARS);
expect(cars.length).toEqual(1);
expect(cars[0]).toBe(injector.get(SportsCar));
});
it('should throw when the aliased provider does not exist', () => {
const injector = createInjector([{provide: 'car', useExisting: SportsCar}]);
const e = `No provider for ${stringify(SportsCar)}!`;
expect(() => injector.get('car')).toThrowError(e);
});
it('should handle forwardRef in useExisting', () => {
const injector = createInjector([
{provide: 'originalEngine', useClass: forwardRef(() => Engine)},
{provide: 'aliasedEngine', useExisting: <any>forwardRef(() => 'originalEngine')}
]);
expect(injector.get('aliasedEngine')).toBeAnInstanceOf(Engine);
});
it('should support overriding factory dependencies', () => {
const injector = createInjector(
[Engine, {provide: Car, useFactory: (e: Engine) => new SportsCar(e), deps: [Engine]}]);
const car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should support optional dependencies', () => {
const injector = createInjector([CarWithOptionalEngine]);
const car = injector.get(CarWithOptionalEngine);
expect(car.engine).toEqual(null);
});
it('should flatten passed-in providers', () => {
const injector = createInjector([[[Engine, Car]]]);
const car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
});
it('should use the last provider when there are multiple providers for same token', () => {
const injector = createInjector(
[{provide: Engine, useClass: Engine}, {provide: Engine, useClass: TurboEngine}]);
expect(injector.get(Engine)).toBeAnInstanceOf(TurboEngine);
});
it('should use non-type tokens', () => {
const injector = createInjector([{provide: 'token', useValue: 'value'}]);
expect(injector.get('token')).toEqual('value');
});
it('should throw when given invalid providers', () => {
expect(() => createInjector(<any>['blah']))
.toThrowError(
`Invalid provider for the NgModule 'SomeModule' - only instances of Provider and Type are allowed, got: [?blah?]`);
});
it('should throw when given blank providers', () => {
expect(() => createInjector(<any>[null, {provide: 'token', useValue: 'value'}]))
.toThrowError(
`Invalid provider for the NgModule 'SomeModule' - only instances of Provider and Type are allowed, got: [?null?, ...]`);
});
it('should provide itself', () => {
const parent = createInjector([]);
const child = createInjector([], parent);
expect(child.get(Injector)).toBe(child);
});
describe('injecting lazy providers into an eager provider via Injector.get', () => {
it('should inject providers that were declared before it', () => {
@NgModule({
providers: [
{provide: 'lazy', useFactory: () => 'lazyValue'},
{
provide: 'eager',
useFactory: (i: Injector) => `eagerValue: ${i.get('lazy')}`,
deps: [Injector]
},
]
})
class MyModule {
// NgModule is eager, which makes all of its deps eager
constructor(@Inject('eager') eager: any) {}
}
expect(createModule(MyModule).injector.get('eager')).toBe('eagerValue: lazyValue');
});
it('should inject providers that were declared after it', () => {
@NgModule({
providers: [
{
provide: 'eager',
useFactory: (i: Injector) => `eagerValue: ${i.get('lazy')}`,
deps: [Injector]
},
{provide: 'lazy', useFactory: () => 'lazyValue'},
]
})
class MyModule {
// NgModule is eager, which makes all of its deps eager
constructor(@Inject('eager') eager: any) {}
}
expect(createModule(MyModule).injector.get('eager')).toBe('eagerValue: lazyValue');
});
});
it('should throw when no provider defined', () => {
const injector = createInjector([]);
expect(() => injector.get('NonExisting')).toThrowError('No provider for NonExisting!');
});
it('should throw when trying to instantiate a cyclic dependency', () => {
expect(() => createInjector([Car, {provide: Engine, useClass: CyclicEngine}]))
.toThrowError(/Cannot instantiate cyclic dependency! Car/g);
});
it('should support null values', () => {
const injector = createInjector([{provide: 'null', useValue: null}]);
expect(injector.get('null')).toBe(null);
});
describe('child', () => {
it('should load instances from parent injector', () => {
const parent = createInjector([Engine]);
const child = createInjector([], parent);
const engineFromParent = parent.get(Engine);
const engineFromChild = child.get(Engine);
expect(engineFromChild).toBe(engineFromParent);
});
it('should not use the child providers when resolving the dependencies of a parent provider',
() => {
const parent = createInjector([Car, Engine]);
const child = createInjector([{provide: Engine, useClass: TurboEngine}], parent);
const carFromChild = child.get(Car);
expect(carFromChild.engine).toBeAnInstanceOf(Engine);
});
it('should create new instance in a child injector', () => {
const parent = createInjector([Engine]);
const child = createInjector([{provide: Engine, useClass: TurboEngine}], parent);
const engineFromParent = parent.get(Engine);
const engineFromChild = child.get(Engine);
expect(engineFromParent).not.toBe(engineFromChild);
expect(engineFromChild).toBeAnInstanceOf(TurboEngine);
});
});
describe('depedency resolution', () => {
describe('@Self()', () => {
it('should return a dependency from self', () => {
const inj = createInjector([
Engine,
{provide: Car, useFactory: (e: Engine) => new Car(e), deps: [[Engine, new Self()]]}
]);
expect(inj.get(Car)).toBeAnInstanceOf(Car);
});
it('should throw when not requested provider on self', () => {
expect(() => createInjector([{
provide: Car,
useFactory: (e: Engine) => new Car(e),
deps: [[Engine, new Self()]]
}]))
.toThrowError(/No provider for Engine/g);
});
});
describe('default', () => {
it('should not skip self', () => {
const parent = createInjector([Engine]);
const child = createInjector(
[
{provide: Engine, useClass: TurboEngine},
{provide: Car, useFactory: (e: Engine) => new Car(e), deps: [Engine]}
],
parent);
expect(child.get(Car).engine).toBeAnInstanceOf(TurboEngine);
});
});
});
describe('lifecycle', () => {
it('should instantiate modules eagerly', () => {
let created = false;
@NgModule()
class ImportedModule {
constructor() { created = true; }
}
@NgModule({imports: [ImportedModule]})
class SomeModule {
}
createModule(SomeModule);
expect(created).toBe(true);
});
it('should instantiate providers that are not used by a module lazily', () => {
let created = false;
createInjector([{
provide: 'someToken',
useFactory: () => {
created = true;
return true;
}
}]);
expect(created).toBe(false);
});
it('should support ngOnDestroy on any provider', () => {
let destroyed = false;
class SomeInjectable {
ngOnDestroy() { destroyed = true; }
}
@NgModule({providers: [SomeInjectable]})
class SomeModule {
// Inject SomeInjectable to make it eager...
constructor(i: SomeInjectable) {}
}
const moduleRef = createModule(SomeModule);
expect(destroyed).toBe(false);
moduleRef.destroy();
expect(destroyed).toBe(true);
});
it('should support ngOnDestroy for lazy providers', () => {
let created = false;
let destroyed = false;
class SomeInjectable {
constructor() { created = true; }
ngOnDestroy() { destroyed = true; }
}
@NgModule({providers: [SomeInjectable]})
class SomeModule {
}
let moduleRef = createModule(SomeModule);
expect(created).toBe(false);
expect(destroyed).toBe(false);
// no error if the provider was not yet created
moduleRef.destroy();
expect(created).toBe(false);
expect(destroyed).toBe(false);
moduleRef = createModule(SomeModule);
moduleRef.injector.get(SomeInjectable);
expect(created).toBe(true);
moduleRef.destroy();
expect(destroyed).toBe(true);
});
});
describe('imported and exported modules', () => {
it('should add the providers of imported modules', () => {
@NgModule({providers: [{provide: 'token1', useValue: 'imported'}]})
class ImportedModule {
}
@NgModule({imports: [ImportedModule]})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get(SomeModule)).toBeAnInstanceOf(SomeModule);
expect(injector.get(ImportedModule)).toBeAnInstanceOf(ImportedModule);
expect(injector.get('token1')).toBe('imported');
});
it('should add the providers of imported ModuleWithProviders', () => {
@NgModule()
class ImportedModule {
}
@NgModule({
imports: [
{ngModule: ImportedModule, providers: [{provide: 'token1', useValue: 'imported'}]}
]
})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get(SomeModule)).toBeAnInstanceOf(SomeModule);
expect(injector.get(ImportedModule)).toBeAnInstanceOf(ImportedModule);
expect(injector.get('token1')).toBe('imported');
});
it('should overwrite the providers of imported modules', () => {
@NgModule({providers: [{provide: 'token1', useValue: 'imported'}]})
class ImportedModule {
}
@NgModule(
{providers: [{provide: 'token1', useValue: 'direct'}], imports: [ImportedModule]})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get('token1')).toBe('direct');
});
it('should overwrite the providers of imported ModuleWithProviders', () => {
@NgModule()
class ImportedModule {
}
@NgModule({
providers: [{provide: 'token1', useValue: 'direct'}],
imports: [
{ngModule: ImportedModule, providers: [{provide: 'token1', useValue: 'imported'}]}
]
})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get('token1')).toBe('direct');
});
it('should overwrite the providers of imported modules on the second import level', () => {
@NgModule({providers: [{provide: 'token1', useValue: 'imported'}]})
class ImportedModuleLevel2 {
}
@NgModule({
providers: [{provide: 'token1', useValue: 'direct'}],
imports: [ImportedModuleLevel2]
})
class ImportedModuleLevel1 {
}
@NgModule({imports: [ImportedModuleLevel1]})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get('token1')).toBe('direct');
});
it('should add the providers of exported modules', () => {
@NgModule({providers: [{provide: 'token1', useValue: 'exported'}]})
class ExportedValue {
}
@NgModule({exports: [ExportedValue]})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get(SomeModule)).toBeAnInstanceOf(SomeModule);
expect(injector.get(ExportedValue)).toBeAnInstanceOf(ExportedValue);
expect(injector.get('token1')).toBe('exported');
});
it('should overwrite the providers of exported modules', () => {
@NgModule({providers: [{provide: 'token1', useValue: 'exported'}]})
class ExportedModule {
}
@NgModule(
{providers: [{provide: 'token1', useValue: 'direct'}], exports: [ExportedModule]})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get('token1')).toBe('direct');
});
it('should overwrite the providers of imported modules by following imported modules',
() => {
@NgModule({providers: [{provide: 'token1', useValue: 'imported1'}]})
class ImportedModule1 {
}
@NgModule({providers: [{provide: 'token1', useValue: 'imported2'}]})
class ImportedModule2 {
}
@NgModule({imports: [ImportedModule1, ImportedModule2]})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get('token1')).toBe('imported2');
});
it('should overwrite the providers of exported modules by following exported modules',
() => {
@NgModule({providers: [{provide: 'token1', useValue: 'exported1'}]})
class ExportedModule1 {
}
@NgModule({providers: [{provide: 'token1', useValue: 'exported2'}]})
class ExportedModule2 {
}
@NgModule({exports: [ExportedModule1, ExportedModule2]})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get('token1')).toBe('exported2');
});
it('should overwrite the providers of imported modules by exported modules', () => {
@NgModule({providers: [{provide: 'token1', useValue: 'imported'}]})
class ImportedModule {
}
@NgModule({providers: [{provide: 'token1', useValue: 'exported'}]})
class ExportedModule {
}
@NgModule({imports: [ImportedModule], exports: [ExportedModule]})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get('token1')).toBe('exported');
});
it('should not overwrite the providers if a module was already used on the same level',
() => {
@NgModule({providers: [{provide: 'token1', useValue: 'imported1'}]})
class ImportedModule1 {
}
@NgModule({providers: [{provide: 'token1', useValue: 'imported2'}]})
class ImportedModule2 {
}
@NgModule({imports: [ImportedModule1, ImportedModule2, ImportedModule1]})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get('token1')).toBe('imported2');
});
it('should not overwrite the providers if a module was already used on a child level',
() => {
@NgModule({providers: [{provide: 'token1', useValue: 'imported1'}]})
class ImportedModule1 {
}
@NgModule({imports: [ImportedModule1]})
class ImportedModule3 {
}
@NgModule({providers: [{provide: 'token1', useValue: 'imported2'}]})
class ImportedModule2 {
}
@NgModule({imports: [ImportedModule3, ImportedModule2, ImportedModule1]})
class SomeModule {
}
const injector = createModule(SomeModule).injector;
expect(injector.get('token1')).toBe('imported2');
});
it('should throw when given invalid providers in an imported ModuleWithProviders', () => {
@NgModule()
class ImportedModule1 {
}
@NgModule({imports: [{ngModule: ImportedModule1, providers: [<any>'broken']}]})
class SomeModule {
}
expect(() => createModule(SomeModule).injector)
.toThrowError(
`Invalid provider for the NgModule 'ImportedModule1' - only instances of Provider and Type are allowed, got: [?broken?]`);
});
});
});
});
}<|fim▁end|> | it('should provide the module', |
<|file_name|>wifi_data_provider.cc<|end_file_name|><|fim▁begin|>// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "services/device/geolocation/wifi_data_provider.h"
#include "base/bind.h"
#include "base/callback.h"
#include "base/location.h"
#include "base/threading/thread_task_runner_handle.h"
namespace device {
WifiDataProvider::WifiDataProvider()
: client_task_runner_(base::ThreadTaskRunnerHandle::Get()) {
DCHECK(client_task_runner_);
}
WifiDataProvider::~WifiDataProvider() = default;
void WifiDataProvider::AddCallback(WifiDataUpdateCallback* callback) {
callbacks_.insert(callback);
}
bool WifiDataProvider::RemoveCallback(WifiDataUpdateCallback* callback) {
return callbacks_.erase(callback) == 1;
}
bool WifiDataProvider::has_callbacks() const {
return !callbacks_.empty();
}<|fim▁hole|>}
bool WifiDataProvider::CalledOnClientThread() const {
return client_task_runner()->BelongsToCurrentThread();
}
void WifiDataProvider::DoRunCallbacks() {
// It's possible that all the callbacks went away whilst this task was
// pending. This is fine; the loop will be a no-op.
CallbackSet::const_iterator iter = callbacks_.begin();
while (iter != callbacks_.end()) {
WifiDataUpdateCallback* callback = *iter;
++iter; // Advance iter before running, in case callback unregisters.
callback->Run();
}
}
} // namespace device<|fim▁end|> |
void WifiDataProvider::RunCallbacks() {
client_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WifiDataProvider::DoRunCallbacks, this)); |
<|file_name|>test_certificates.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
"""
Certificates Tests.
"""
import itertools
import json
import ddt
import mock
import six
from django.conf import settings
from django.test.utils import override_settings
from opaque_keys.edx.keys import AssetKey
from six.moves import range
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
from cms.djangoapps.contentstore.utils import get_lms_link_for_certificate_web_view, reverse_course_url
from common.djangoapps.course_modes.tests.factories import CourseModeFactory
from common.djangoapps.student.models import CourseEnrollment
from common.djangoapps.student.roles import CourseInstructorRole, CourseStaffRole
from common.djangoapps.student.tests.factories import UserFactory
from common.djangoapps.util.testing import EventTestMixin, UrlResetMixin
from xmodule.contentstore.content import StaticContent
from xmodule.contentstore.django import contentstore
from xmodule.exceptions import NotFoundError
from ..certificates import CERTIFICATE_SCHEMA_VERSION, CertificateManager
FEATURES_WITH_CERTS_ENABLED = settings.FEATURES.copy()
FEATURES_WITH_CERTS_ENABLED['CERTIFICATES_HTML_VIEW'] = True
CERTIFICATE_JSON = {
u'name': u'Test certificate',
u'description': u'Test description',
u'is_active': True,
u'version': CERTIFICATE_SCHEMA_VERSION,
}
CERTIFICATE_JSON_WITH_SIGNATORIES = {
u'name': u'Test certificate',
u'description': u'Test description',
u'version': CERTIFICATE_SCHEMA_VERSION,
u'course_title': 'Course Title Override',
u'is_active': True,
u'signatories': [
{
"name": "Bob Smith",
"title": "The DEAN.",
"signature_image_path": "/c4x/test/CSS101/asset/Signature.png"
}
]
}
C4X_SIGNATORY_PATH = '/c4x/test/CSS101/asset/Signature{}.png'
SIGNATORY_PATH = 'asset-v1:test+CSS101+SP2017+type@asset+block@Signature{}.png'
# pylint: disable=no-member
class HelperMethods(object):
"""
Mixin that provides useful methods for certificate configuration tests.
"""
def _create_fake_images(self, asset_keys):
"""
Creates fake image files for a list of asset_keys.
"""
for asset_key_string in asset_keys:
asset_key = AssetKey.from_string(asset_key_string)
content = StaticContent(
asset_key, "Fake asset", "image/png", "data",
)
contentstore().save(content)
def _add_course_certificates(self, count=1, signatory_count=0, is_active=False,
asset_path_format=C4X_SIGNATORY_PATH):
"""
Create certificate for the course.
"""
signatories = [
{
'name': 'Name ' + str(i),
'title': 'Title ' + str(i),
'signature_image_path': asset_path_format.format(i),
'id': i
} for i in range(signatory_count)
]
# create images for signatory signatures except the last signatory
self._create_fake_images(signatory['signature_image_path'] for signatory in signatories[:-1])
certificates = [
{
'id': i,
'name': 'Name ' + str(i),
'description': 'Description ' + str(i),
'signatories': signatories,
'version': CERTIFICATE_SCHEMA_VERSION,
'is_active': is_active
} for i in range(count)
]
self.course.certificates = {'certificates': certificates}
self.save_course()
# pylint: disable=no-member
class CertificatesBaseTestCase(object):
"""
Mixin with base test cases for the certificates.
"""
def _remove_ids(self, content):
"""
Remove ids from the response. We cannot predict IDs, because they're
generated randomly.
We use this method to clean up response when creating new certificate.
"""
certificate_id = content.pop("id")
return certificate_id
def test_required_fields_are_absent(self):
"""
Test required fields are absent.
"""
bad_jsons = [
# must have name of the certificate
{
u'description': 'Test description',
u'version': CERTIFICATE_SCHEMA_VERSION
},
# an empty json
{},
]
for bad_json in bad_jsons:
response = self.client.post(
self._url(),
data=json.dumps(bad_json),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEqual(response.status_code, 400)
self.assertNotIn("Location", response)
content = json.loads(response.content.decode('utf-8'))
self.assertIn("error", content)
def test_invalid_json(self):
"""
Test invalid json handling.
"""
# Invalid JSON.
invalid_json = u"{u'name': 'Test Name', u'description': 'Test description'," \
u" u'version': " + str(CERTIFICATE_SCHEMA_VERSION) + ", []}"
response = self.client.post(
self._url(),
data=invalid_json,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEqual(response.status_code, 400)
self.assertNotIn("Location", response)
content = json.loads(response.content.decode('utf-8'))
self.assertIn("error", content)
def test_certificate_data_validation(self):
#Test certificate schema version
json_data_1 = {
u'version': 100,
u'name': u'Test certificate',
u'description': u'Test description'
}
with self.assertRaises(Exception) as context:
CertificateManager.validate(json_data_1)
self.assertIn(
"Unsupported certificate schema version: 100. Expected version: 1.",
str(context.exception)
)
#Test certificate name is missing
json_data_2 = {
u'version': CERTIFICATE_SCHEMA_VERSION,
u'description': u'Test description'
}
with self.assertRaises(Exception) as context:
CertificateManager.validate(json_data_2)
self.assertIn('must have name of the certificate', str(context.exception))
@ddt.ddt
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
class CertificatesListHandlerTestCase(
EventTestMixin, CourseTestCase, CertificatesBaseTestCase, HelperMethods, UrlResetMixin
):
"""
Test cases for certificates_list_handler.
"""
def setUp(self): # lint-amnesty, pylint: disable=arguments-differ
"""
Set up CertificatesListHandlerTestCase.
"""
super(CertificatesListHandlerTestCase, self).setUp('cms.djangoapps.contentstore.views.certificates.tracker') # lint-amnesty, pylint: disable=super-with-arguments
self.reset_urls()
def _url(self):
"""
Return url for the handler.
"""
return reverse_course_url('certificates_list_handler', self.course.id)
def test_can_create_certificate(self):
"""
Test that you can create a certificate.
"""
expected = {
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'Test certificate',
u'description': u'Test description',
u'is_active': True,
u'signatories': []
}
response = self.client.ajax_post(
self._url(),
data=CERTIFICATE_JSON
)
self.assertEqual(response.status_code, 201)
self.assertIn("Location", response)
content = json.loads(response.content.decode('utf-8'))
certificate_id = self._remove_ids(content)
self.assertEqual(content, expected)
self.assert_event_emitted(
'edx.certificate.configuration.created',
course_id=six.text_type(self.course.id),
configuration_id=certificate_id,
)
def test_cannot_create_certificate_if_user_has_no_write_permissions(self):
"""
Tests user without write permissions on course should not able to create certificate
"""
user = UserFactory()
self.client.login(username=user.username, password='test')
response = self.client.ajax_post(
self._url(),
data=CERTIFICATE_JSON
)
self.assertEqual(response.status_code, 403)
@override_settings(LMS_BASE=None)
def test_no_lms_base_for_certificate_web_view_link(self):
test_link = get_lms_link_for_certificate_web_view(
course_key=self.course.id,
mode='honor'
)
self.assertEqual(test_link, None)
@override_settings(LMS_BASE="lms_base_url")
def test_lms_link_for_certificate_web_view(self):
test_url = "//lms_base_url/certificates/" \
"course/" + six.text_type(self.course.id) + '?preview=honor'
link = get_lms_link_for_certificate_web_view(
course_key=self.course.id,
mode='honor'
)
self.assertEqual(link, test_url)
@mock.patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': True})
def test_certificate_info_in_response(self):
"""
Test that certificate has been created and rendered properly with non-audit course mode.
"""
CourseModeFactory.create(course_id=self.course.id, mode_slug='verified')
response = self.client.ajax_post(
self._url(),
data=CERTIFICATE_JSON_WITH_SIGNATORIES
)
self.assertEqual(response.status_code, 201)
# in html response
result = self.client.get_html(self._url())
self.assertContains(result, 'Test certificate')
self.assertContains(result, 'Test description')
# in JSON response
response = self.client.get_json(self._url())
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['name'], 'Test certificate')
self.assertEqual(data[0]['description'], 'Test description')
self.assertEqual(data[0]['version'], CERTIFICATE_SCHEMA_VERSION)
@mock.patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': True})
def test_certificate_info_not_in_response(self):
"""
Test that certificate has not been rendered audit only course mode.
"""
response = self.client.ajax_post(
self._url(),
data=CERTIFICATE_JSON_WITH_SIGNATORIES
)
self.assertEqual(response.status_code, 201)
# in html response
result = self.client.get_html(self._url())
self.assertNotContains(result, 'Test certificate')
def test_unsupported_http_accept_header(self):
"""
Test if not allowed header present in request.
"""
response = self.client.get(
self._url(),
HTTP_ACCEPT="text/plain",
)
self.assertEqual(response.status_code, 406)
def test_certificate_unsupported_method(self):
"""
Unit Test: test_certificate_unsupported_method
"""
resp = self.client.put(self._url())
self.assertEqual(resp.status_code, 405)
def test_not_permitted(self):
"""
Test that when user has not read access to course then permission denied exception should raised.
"""
test_user_client, test_user = self.create_non_staff_authed_user_client()
CourseEnrollment.enroll(test_user, self.course.id)
response = test_user_client.ajax_post(
self._url(),
data=CERTIFICATE_JSON
)
self.assertContains(response, "error", status_code=403)
def test_audit_course_mode_is_skipped(self):
"""
Tests audit course mode is skipped when rendering certificates page.
"""
CourseModeFactory.create(course_id=self.course.id)
CourseModeFactory.create(course_id=self.course.id, mode_slug='verified')
response = self.client.get_html(
self._url(),
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'verified')
self.assertNotContains(response, 'audit')
def test_audit_only_disables_cert(self):
"""
Tests audit course mode is skipped when rendering certificates page.
"""<|fim▁hole|> self._url(),
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'This course does not use a mode that offers certificates.')
self.assertNotContains(response, 'This module is not enabled.')
self.assertNotContains(response, 'Loading')
@ddt.data(
['audit', 'verified'],
['verified'],
['audit', 'verified', 'credit'],
['verified', 'credit'],
['professional']
)
def test_non_audit_enables_cert(self, slugs):
"""
Tests audit course mode is skipped when rendering certificates page.
"""
for slug in slugs:
CourseModeFactory.create(course_id=self.course.id, mode_slug=slug)
response = self.client.get_html(
self._url(),
)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'This course does not use a mode that offers certificates.')
self.assertNotContains(response, 'This module is not enabled.')
self.assertContains(response, 'Loading')
def test_assign_unique_identifier_to_certificates(self):
"""
Test certificates have unique ids
"""
self._add_course_certificates(count=2)
json_data = {
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'New test certificate',
u'description': u'New test description',
u'is_active': True,
u'signatories': []
}
response = self.client.post(
self._url(),
data=json.dumps(json_data),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
new_certificate = json.loads(response.content.decode('utf-8'))
for prev_certificate in self.course.certificates['certificates']:
self.assertNotEqual(new_certificate.get('id'), prev_certificate.get('id'))
@ddt.ddt
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
class CertificatesDetailHandlerTestCase(
EventTestMixin, CourseTestCase, CertificatesBaseTestCase, HelperMethods, UrlResetMixin
):
"""
Test cases for CertificatesDetailHandlerTestCase.
"""
_id = 0
def setUp(self): # pylint: disable=arguments-differ
"""
Set up CertificatesDetailHandlerTestCase.
"""
super(CertificatesDetailHandlerTestCase, self).setUp('cms.djangoapps.contentstore.views.certificates.tracker') # lint-amnesty, pylint: disable=super-with-arguments
self.reset_urls()
def _url(self, cid=-1):
"""
Return url for the handler.
"""
cid = cid if cid > 0 else self._id
return reverse_course_url(
'certificates_detail_handler',
self.course.id,
kwargs={'certificate_id': cid},
)
def test_can_create_new_certificate_if_it_does_not_exist(self):
"""
PUT/POST new certificate.
"""
expected = {
u'id': 666,
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'Test certificate',
u'description': u'Test description',
u'is_active': True,
u'course_title': u'Course Title Override',
u'signatories': []
}
response = self.client.put(
self._url(cid=666),
data=json.dumps(expected),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(content, expected)
self.assert_event_emitted(
'edx.certificate.configuration.created',
course_id=six.text_type(self.course.id),
configuration_id=666,
)
def test_can_edit_certificate(self):
"""
Edit certificate, check its id and modified fields.
"""
self._add_course_certificates(count=2)
expected = {
u'id': 1,
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'New test certificate',
u'description': u'New test description',
u'is_active': True,
u'course_title': u'Course Title Override',
u'signatories': []
}
response = self.client.put(
self._url(cid=1),
data=json.dumps(expected),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(content, expected)
self.assert_event_emitted(
'edx.certificate.configuration.modified',
course_id=six.text_type(self.course.id),
configuration_id=1,
)
self.reload_course()
# Verify that certificate is properly updated in the course.
course_certificates = self.course.certificates['certificates']
self.assertEqual(len(course_certificates), 2)
self.assertEqual(course_certificates[1].get('name'), u'New test certificate')
self.assertEqual(course_certificates[1].get('description'), 'New test description')
def test_can_edit_certificate_without_is_active(self):
"""
Tests user should be able to edit certificate, if is_active attribute is not present
for given certificate. Old courses might not have is_active attribute in certificate data.
"""
certificates = [
{
'id': 1,
'name': 'certificate with is_active',
'description': 'Description ',
'signatories': [],
'version': CERTIFICATE_SCHEMA_VERSION,
}
]
self.course.certificates = {'certificates': certificates}
self.save_course()
expected = {
u'id': 1,
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'New test certificate',
u'description': u'New test description',
u'is_active': True,
u'course_title': u'Course Title Override',
u'signatories': []
}
response = self.client.post(
self._url(cid=1),
data=json.dumps(expected),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(content, expected)
@ddt.data(C4X_SIGNATORY_PATH, SIGNATORY_PATH)
def test_can_delete_certificate_with_signatories(self, signatory_path):
"""
Delete certificate
"""
self._add_course_certificates(count=2, signatory_count=1, asset_path_format=signatory_path)
response = self.client.delete(
self._url(cid=1),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
self.assert_event_emitted(
'edx.certificate.configuration.deleted',
course_id=six.text_type(self.course.id),
configuration_id='1',
)
self.reload_course()
# Verify that certificates are properly updated in the course.
certificates = self.course.certificates['certificates']
self.assertEqual(len(certificates), 1)
self.assertEqual(certificates[0].get('name'), 'Name 0')
self.assertEqual(certificates[0].get('description'), 'Description 0')
def test_can_delete_certificate_with_slash_prefix_signatory(self):
"""
Delete certificate
"""
self._add_course_certificates(count=2, signatory_count=1, asset_path_format="/" + SIGNATORY_PATH)
response = self.client.delete(
self._url(cid=1),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
self.assert_event_emitted(
'edx.certificate.configuration.deleted',
course_id=six.text_type(self.course.id),
configuration_id='1',
)
self.reload_course()
# Verify that certificates are properly updated in the course.
certificates = self.course.certificates['certificates']
self.assertEqual(len(certificates), 1)
self.assertEqual(certificates[0].get('name'), 'Name 0')
self.assertEqual(certificates[0].get('description'), 'Description 0')
@ddt.data("not_a_valid_asset_key{}.png", "/not_a_valid_asset_key{}.png")
def test_can_delete_certificate_with_invalid_signatory(self, signatory_path):
"""
Delete certificate
"""
self._add_course_certificates(count=2, signatory_count=1, asset_path_format=signatory_path)
response = self.client.delete(
self._url(cid=1),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
self.assert_event_emitted(
'edx.certificate.configuration.deleted',
course_id=six.text_type(self.course.id),
configuration_id='1',
)
self.reload_course()
# Verify that certificates are properly updated in the course.
certificates = self.course.certificates['certificates']
self.assertEqual(len(certificates), 1)
self.assertEqual(certificates[0].get('name'), 'Name 0')
self.assertEqual(certificates[0].get('description'), 'Description 0')
@ddt.data(C4X_SIGNATORY_PATH, SIGNATORY_PATH)
def test_delete_certificate_without_write_permissions(self, signatory_path):
"""
Tests certificate deletion without write permission on course.
"""
self._add_course_certificates(count=2, signatory_count=1, asset_path_format=signatory_path)
user = UserFactory()
self.client.login(username=user.username, password='test')
response = self.client.delete(
self._url(cid=1),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 403)
@ddt.data(C4X_SIGNATORY_PATH, SIGNATORY_PATH)
def test_delete_certificate_without_global_staff_permissions(self, signatory_path):
"""
Tests deletion of an active certificate without global staff permission on course.
"""
self._add_course_certificates(count=2, signatory_count=1, is_active=True, asset_path_format=signatory_path)
user = UserFactory()
for role in [CourseInstructorRole, CourseStaffRole]:
role(self.course.id).add_users(user)
self.client.login(username=user.username, password='test')
response = self.client.delete(
self._url(cid=1),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 403)
@ddt.data(C4X_SIGNATORY_PATH, SIGNATORY_PATH)
def test_update_active_certificate_without_global_staff_permissions(self, signatory_path):
"""
Tests update of an active certificate without global staff permission on course.
"""
self._add_course_certificates(count=2, signatory_count=1, is_active=True, asset_path_format=signatory_path)
cert_data = {
u'id': 1,
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'New test certificate',
u'description': u'New test description',
u'course_title': u'Course Title Override',
u'org_logo_path': '',
u'is_active': False,
u'signatories': []
}
user = UserFactory()
for role in [CourseInstructorRole, CourseStaffRole]:
role(self.course.id).add_users(user)
self.client.login(username=user.username, password='test')
response = self.client.put(
self._url(cid=1),
data=json.dumps(cert_data),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 403)
def test_delete_non_existing_certificate(self):
"""
Try to delete a non existing certificate. It should return status code 404 Not found.
"""
self._add_course_certificates(count=2)
response = self.client.delete(
self._url(cid=100),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 404)
@ddt.data(C4X_SIGNATORY_PATH, SIGNATORY_PATH)
def test_can_delete_signatory(self, signatory_path):
"""
Delete an existing certificate signatory
"""
self._add_course_certificates(count=2, signatory_count=3, asset_path_format=signatory_path)
certificates = self.course.certificates['certificates']
signatory = certificates[1].get("signatories")[1]
image_asset_location = AssetKey.from_string(signatory['signature_image_path'])
content = contentstore().find(image_asset_location)
self.assertIsNotNone(content)
test_url = '{}/signatories/1'.format(self._url(cid=1))
response = self.client.delete(
test_url,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
self.reload_course()
# Verify that certificates are properly updated in the course.
certificates = self.course.certificates['certificates']
self.assertEqual(len(certificates[1].get("signatories")), 2)
# make sure signatory signature image is deleted too
self.assertRaises(NotFoundError, contentstore().find, image_asset_location)
@ddt.data(C4X_SIGNATORY_PATH, SIGNATORY_PATH)
def test_deleting_signatory_without_signature(self, signatory_path):
"""
Delete an signatory whose signature image is already removed or does not exist
"""
self._add_course_certificates(count=2, signatory_count=4, asset_path_format=signatory_path)
test_url = '{}/signatories/3'.format(self._url(cid=1))
response = self.client.delete(
test_url,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
def test_delete_signatory_non_existing_certificate(self):
"""
Try to delete a non existing certificate signatory. It should return status code 404 Not found.
"""
self._add_course_certificates(count=2)
test_url = '{}/signatories/1'.format(self._url(cid=100))
response = self.client.delete(
test_url,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 404)
@ddt.data(C4X_SIGNATORY_PATH, SIGNATORY_PATH)
def test_certificate_activation_success(self, signatory_path):
"""
Activate and Deactivate the course certificate
"""
test_url = reverse_course_url('certificate_activation_handler', self.course.id)
self._add_course_certificates(count=1, signatory_count=2, asset_path_format=signatory_path)
is_active = True
for i in range(2):
if i == 1:
is_active = not is_active
response = self.client.post(
test_url,
data=json.dumps({"is_active": is_active}),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEqual(response.status_code, 200)
course = self.store.get_course(self.course.id)
certificates = course.certificates['certificates']
self.assertEqual(certificates[0].get('is_active'), is_active)
cert_event_type = 'activated' if is_active else 'deactivated'
self.assert_event_emitted(
'.'.join(['edx.certificate.configuration', cert_event_type]),
course_id=six.text_type(self.course.id),
)
@ddt.data(*itertools.product([True, False], [C4X_SIGNATORY_PATH, SIGNATORY_PATH]))
@ddt.unpack
def test_certificate_activation_without_write_permissions(self, activate, signatory_path):
"""
Tests certificate Activate and Deactivate should not be allowed if user
does not have write permissions on course.
"""
test_url = reverse_course_url('certificate_activation_handler', self.course.id)
self._add_course_certificates(count=1, signatory_count=2, asset_path_format=signatory_path)
user = UserFactory()
self.client.login(username=user.username, password='test')
response = self.client.post(
test_url,
data=json.dumps({"is_active": activate}),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEqual(response.status_code, 403)
@ddt.data(C4X_SIGNATORY_PATH, SIGNATORY_PATH)
def test_certificate_activation_failure(self, signatory_path):
"""
Certificate activation should fail when user has not read access to course then permission denied exception
should raised.
"""
test_url = reverse_course_url('certificate_activation_handler', self.course.id)
test_user_client, test_user = self.create_non_staff_authed_user_client()
CourseEnrollment.enroll(test_user, self.course.id)
self._add_course_certificates(count=1, signatory_count=2, asset_path_format=signatory_path)
response = test_user_client.post(
test_url,
data=json.dumps({"is_active": True}),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 403)
course = self.store.get_course(self.course.id)
certificates = course.certificates['certificates']
self.assertEqual(certificates[0].get('is_active'), False)<|fim▁end|> | CourseModeFactory.create(course_id=self.course.id, mode_slug='audit')
response = self.client.get_html( |
<|file_name|>RuleAdministratorImpl.java<|end_file_name|><|fim▁begin|>package com.zxinsight.classifier.ruleengine.admin;
import java.rmi.RemoteException;
import java.util.Map;
import javax.rules.admin.LocalRuleExecutionSetProvider;
import javax.rules.admin.RuleAdministrator;
import javax.rules.admin.RuleExecutionSet;
import javax.rules.admin.RuleExecutionSetDeregistrationException;
import javax.rules.admin.RuleExecutionSetProvider;
import javax.rules.admin.RuleExecutionSetRegisterException;
@SuppressWarnings("rawtypes")
public class RuleAdministratorImpl implements RuleAdministrator {
@Override
public void deregisterRuleExecutionSet(String bindUri, Map properties)
throws RuleExecutionSetDeregistrationException, RemoteException {
RuleExecutionSetRepository repository = RuleExecutionSetRepository
.getInstance();
if (repository.getRuleExecutionSet(bindUri) == null) {
throw new RuleExecutionSetDeregistrationException(
"no execution set bound to: " + bindUri);
}
repository.unregisterRuleExecutionSet(bindUri);
}
@Override
public LocalRuleExecutionSetProvider getLocalRuleExecutionSetProvider(
Map properties) throws RemoteException {
return new LocalRuleExecutionSetProviderImple();
}
<|fim▁hole|> @Override
public RuleExecutionSetProvider getRuleExecutionSetProvider(Map properties)
throws RemoteException {
return new RuleExecutionSetProviderImpl();
}
@Override
public void registerRuleExecutionSet(String bindUri,
RuleExecutionSet ruleExecutionSet, Map properties)
throws RuleExecutionSetRegisterException, RemoteException {
RuleExecutionSetRepository repository = RuleExecutionSetRepository
.getInstance();
repository.registerRuleExecutionSet(bindUri, ruleExecutionSet);
}
}<|fim▁end|> | |
<|file_name|>test_errors.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from corehq.apps.receiverwrapper import submit_form_locally
from couchforms.models import XFormError
class CaseProcessingErrorsTest(TestCase):
def test_no_case_id(self):
"""
submit form with a case block that has no case_id
check that
- it errors
- the form is not saved under its original id
- an XFormError is saved with the original id as orig_id
- the error was logged (<-- is this hard to test?)
<data xmlns="example.com/foo"><|fim▁hole|> </data>
"""
submit_form_locally(
"""<data xmlns="example.com/foo">
<meta>
<instanceID>abc-easy-as-123</instanceID>
</meta>
<case case_id="" xmlns="http://commcarehq.org/case/transaction/v2">
<update><foo>bar</foo></update>
</case>
</data>""",
'my_very_special_domain',
)
xform_errors = XFormError.view(
'domain/docs',
startkey=['my_very_special_domain', 'XFormError'],
endkey=['my_very_special_domain', 'XFormError', {}],
reduce=False,
include_docs=True,
).all()
related_errors = [xform_error for xform_error in xform_errors
if xform_error.get_id == 'abc-easy-as-123']
self.assertEqual(len(related_errors), 1)
related_error = related_errors[0]
self.assertEqual(related_error.problem,
'IllegalCaseId: case_id must not be empty')
def test_uses_referrals(self):
"""
submit form with a case block that uses referrals
check that
- it errors
- the form is not saved under its original id
- an XFormError is saved with the original id as orig_id
"""
submit_form_locally(
"""<data xmlns="example.com/foo">
<meta>
<instanceID>abc-easy-as-456</instanceID>
</meta>
<case case_id="123" xmlns="http://commcarehq.org/case/transaction/v2">
<referral>
<referral_id>456</referral_id>
<open>
<referral_types>t1 t2</referral_types>
</open>
</referral>
</case>
</data>""",
'my_very_special_domain',
)
xform_errors = XFormError.view(
'domain/docs',
startkey=['my_very_special_domain', 'XFormError'],
endkey=['my_very_special_domain', 'XFormError', {}],
reduce=False,
include_docs=True,
).all()
related_errors = [xform_error for xform_error in xform_errors
if xform_error.get_id == 'abc-easy-as-456']
self.assertEqual(len(related_errors), 1)
related_error = related_errors[0]
self.assertEqual(related_error.problem,
'UsesReferrals: Sorry, referrals are no longer supported!')<|fim▁end|> | <case case_id="">
<update><foo>bar</foo></update>
</case> |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""Copyright 2008 Orbitz WorldWide<|fim▁hole|>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
from django.conf.urls import url
from . import views
urlpatterns = [
url('^get_data?$', views.get_data, name='events_get_data'),
url(r'(?P<event_id>\d+)/$', views.detail, name='events_detail'),
url('^$', views.view_events, name='events'),
]<|fim▁end|> | |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "remakery.settings")
from django.core.management import execute_from_command_line
<|fim▁hole|> execute_from_command_line(sys.argv)<|fim▁end|> | |
<|file_name|>qgsrendercontext.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
qgsrendercontext.cpp
--------------------
begin : March 16, 2008
copyright : (C) 2008 by Marco Hugentobler
email : marco dot hugentobler at karto dot baug dot ethz dot ch
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include "qgsrendercontext.h"
#include "qgsmapsettings.h"
#include "qgsexpression.h"
#include "qgsvectorlayer.h"
#include "qgsfeaturefilterprovider.h"
#include "qgslogger.h"
#include "qgspoint.h"
#define POINTS_TO_MM 2.83464567
#define INCH_TO_MM 25.4
QgsRenderContext::QgsRenderContext()
: mFlags( DrawEditingInfo | UseAdvancedEffects | DrawSelection | UseRenderingOptimization )
{
mVectorSimplifyMethod.setSimplifyHints( QgsVectorSimplifyMethod::NoSimplification );
// For RenderMetersInMapUnits support, when rendering in Degrees, the Ellipsoid must be set
// - for Previews/Icons the default Extent can be used
mDistanceArea.setEllipsoid( mDistanceArea.sourceCrs().ellipsoidAcronym() );
}
QgsRenderContext::QgsRenderContext( const QgsRenderContext &rh )
: mFlags( rh.mFlags )
, mPainter( rh.mPainter )
, mCoordTransform( rh.mCoordTransform )
, mDistanceArea( rh.mDistanceArea )
, mExtent( rh.mExtent )
, mOriginalMapExtent( rh.mOriginalMapExtent )
, mMapToPixel( rh.mMapToPixel )
, mRenderingStopped( rh.mRenderingStopped )
, mScaleFactor( rh.mScaleFactor )
, mRendererScale( rh.mRendererScale )
, mLabelingEngine( rh.mLabelingEngine )
, mSelectionColor( rh.mSelectionColor )
, mVectorSimplifyMethod( rh.mVectorSimplifyMethod )
, mExpressionContext( rh.mExpressionContext )
, mGeometry( rh.mGeometry )
, mFeatureFilterProvider( rh.mFeatureFilterProvider ? rh.mFeatureFilterProvider->clone() : nullptr )
, mSegmentationTolerance( rh.mSegmentationTolerance )
, mSegmentationToleranceType( rh.mSegmentationToleranceType )
, mTransformContext( rh.mTransformContext )
, mPathResolver( rh.mPathResolver )
, mTextRenderFormat( rh.mTextRenderFormat )
#ifdef QGISDEBUG
, mHasTransformContext( rh.mHasTransformContext )
#endif
{
}
QgsRenderContext &QgsRenderContext::operator=( const QgsRenderContext &rh )
{
mFlags = rh.mFlags;
mPainter = rh.mPainter;
mCoordTransform = rh.mCoordTransform;
mExtent = rh.mExtent;
mOriginalMapExtent = rh.mOriginalMapExtent;
mMapToPixel = rh.mMapToPixel;
mRenderingStopped = rh.mRenderingStopped;
mScaleFactor = rh.mScaleFactor;
mRendererScale = rh.mRendererScale;
mLabelingEngine = rh.mLabelingEngine;
mSelectionColor = rh.mSelectionColor;
mVectorSimplifyMethod = rh.mVectorSimplifyMethod;
mExpressionContext = rh.mExpressionContext;
mGeometry = rh.mGeometry;
mFeatureFilterProvider.reset( rh.mFeatureFilterProvider ? rh.mFeatureFilterProvider->clone() : nullptr );
mSegmentationTolerance = rh.mSegmentationTolerance;
mSegmentationToleranceType = rh.mSegmentationToleranceType;
mDistanceArea = rh.mDistanceArea;
mTransformContext = rh.mTransformContext;
mPathResolver = rh.mPathResolver;
mTextRenderFormat = rh.mTextRenderFormat;
#ifdef QGISDEBUG
mHasTransformContext = rh.mHasTransformContext;
#endif
return *this;
}
QgsRenderContext QgsRenderContext::fromQPainter( QPainter *painter )
{
QgsRenderContext context;
context.setPainter( painter );
if ( painter && painter->device() )
{
context.setScaleFactor( painter->device()->logicalDpiX() / 25.4 );
}
else
{
context.setScaleFactor( 3.465 ); //assume 88 dpi as standard value
}
if ( painter && painter->renderHints() & QPainter::Antialiasing )
{
context.setFlag( QgsRenderContext::Antialiasing, true );
}
return context;
}
QgsCoordinateTransformContext QgsRenderContext::transformContext() const
{
#ifdef QGISDEBUG
if ( !mHasTransformContext )
QgsDebugMsgLevel( QStringLiteral( "No QgsCoordinateTransformContext context set for transform" ), 4 );
#endif
return mTransformContext;
}
void QgsRenderContext::setTransformContext( const QgsCoordinateTransformContext &context )
{
mTransformContext = context;
#ifdef QGISDEBUG
mHasTransformContext = true;
#endif<|fim▁hole|> mFlags = flags;
}
void QgsRenderContext::setFlag( QgsRenderContext::Flag flag, bool on )
{
if ( on )
mFlags |= flag;
else
mFlags &= ~flag;
}
QgsRenderContext::Flags QgsRenderContext::flags() const
{
return mFlags;
}
bool QgsRenderContext::testFlag( QgsRenderContext::Flag flag ) const
{
return mFlags.testFlag( flag );
}
QgsRenderContext QgsRenderContext::fromMapSettings( const QgsMapSettings &mapSettings )
{
QgsRenderContext ctx;
ctx.setMapToPixel( mapSettings.mapToPixel() );
ctx.setExtent( mapSettings.visibleExtent() );
ctx.setMapExtent( mapSettings.visibleExtent() );
ctx.setFlag( DrawEditingInfo, mapSettings.testFlag( QgsMapSettings::DrawEditingInfo ) );
ctx.setFlag( ForceVectorOutput, mapSettings.testFlag( QgsMapSettings::ForceVectorOutput ) );
ctx.setFlag( UseAdvancedEffects, mapSettings.testFlag( QgsMapSettings::UseAdvancedEffects ) );
ctx.setFlag( UseRenderingOptimization, mapSettings.testFlag( QgsMapSettings::UseRenderingOptimization ) );
ctx.setCoordinateTransform( QgsCoordinateTransform() );
ctx.setSelectionColor( mapSettings.selectionColor() );
ctx.setFlag( DrawSelection, mapSettings.testFlag( QgsMapSettings::DrawSelection ) );
ctx.setFlag( DrawSymbolBounds, mapSettings.testFlag( QgsMapSettings::DrawSymbolBounds ) );
ctx.setFlag( RenderMapTile, mapSettings.testFlag( QgsMapSettings::RenderMapTile ) );
ctx.setFlag( Antialiasing, mapSettings.testFlag( QgsMapSettings::Antialiasing ) );
ctx.setFlag( RenderPartialOutput, mapSettings.testFlag( QgsMapSettings::RenderPartialOutput ) );
ctx.setFlag( RenderPreviewJob, mapSettings.testFlag( QgsMapSettings::RenderPreviewJob ) );
ctx.setScaleFactor( mapSettings.outputDpi() / 25.4 ); // = pixels per mm
ctx.setRendererScale( mapSettings.scale() );
ctx.setExpressionContext( mapSettings.expressionContext() );
ctx.setSegmentationTolerance( mapSettings.segmentationTolerance() );
ctx.setSegmentationToleranceType( mapSettings.segmentationToleranceType() );
ctx.mDistanceArea.setSourceCrs( mapSettings.destinationCrs(), mapSettings.transformContext() );
ctx.mDistanceArea.setEllipsoid( mapSettings.ellipsoid() );
ctx.setTransformContext( mapSettings.transformContext() );
ctx.setPathResolver( mapSettings.pathResolver() );
ctx.setTextRenderFormat( mapSettings.textRenderFormat() );
//this flag is only for stopping during the current rendering progress,
//so must be false at every new render operation
ctx.setRenderingStopped( false );
return ctx;
}
bool QgsRenderContext::forceVectorOutput() const
{
return mFlags.testFlag( ForceVectorOutput );
}
bool QgsRenderContext::useAdvancedEffects() const
{
return mFlags.testFlag( UseAdvancedEffects );
}
void QgsRenderContext::setUseAdvancedEffects( bool enabled )
{
setFlag( UseAdvancedEffects, enabled );
}
bool QgsRenderContext::drawEditingInformation() const
{
return mFlags.testFlag( DrawEditingInfo );
}
bool QgsRenderContext::showSelection() const
{
return mFlags.testFlag( DrawSelection );
}
void QgsRenderContext::setCoordinateTransform( const QgsCoordinateTransform &t )
{
mCoordTransform = t;
}
void QgsRenderContext::setDrawEditingInformation( bool b )
{
setFlag( DrawEditingInfo, b );
}
void QgsRenderContext::setForceVectorOutput( bool force )
{
setFlag( ForceVectorOutput, force );
}
void QgsRenderContext::setShowSelection( const bool showSelection )
{
setFlag( DrawSelection, showSelection );
}
bool QgsRenderContext::useRenderingOptimization() const
{
return mFlags.testFlag( UseRenderingOptimization );
}
void QgsRenderContext::setUseRenderingOptimization( bool enabled )
{
setFlag( UseRenderingOptimization, enabled );
}
void QgsRenderContext::setFeatureFilterProvider( const QgsFeatureFilterProvider *ffp )
{
if ( ffp )
{
mFeatureFilterProvider.reset( ffp->clone() );
}
else
{
mFeatureFilterProvider.reset( nullptr );
}
}
const QgsFeatureFilterProvider *QgsRenderContext::featureFilterProvider() const
{
return mFeatureFilterProvider.get();
}
double QgsRenderContext::convertToPainterUnits( double size, QgsUnitTypes::RenderUnit unit, const QgsMapUnitScale &scale ) const
{
double conversionFactor = 1.0;
switch ( unit )
{
case QgsUnitTypes::RenderMillimeters:
conversionFactor = mScaleFactor;
break;
case QgsUnitTypes::RenderPoints:
conversionFactor = mScaleFactor / POINTS_TO_MM;
break;
case QgsUnitTypes::RenderInches:
conversionFactor = mScaleFactor * INCH_TO_MM;
break;
case QgsUnitTypes::RenderMetersInMapUnits:
{
size = convertMetersToMapUnits( size );
unit = QgsUnitTypes::RenderMapUnits;
// Fall through to RenderMapUnits with size in meters converted to size in MapUnits
FALLTHROUGH
}
case QgsUnitTypes::RenderMapUnits:
{
double mup = scale.computeMapUnitsPerPixel( *this );
if ( mup > 0 )
{
conversionFactor = 1.0 / mup;
}
else
{
conversionFactor = 1.0;
}
break;
}
case QgsUnitTypes::RenderPixels:
conversionFactor = 1.0;
break;
case QgsUnitTypes::RenderUnknownUnit:
case QgsUnitTypes::RenderPercentage:
//no sensible value
conversionFactor = 1.0;
break;
}
double convertedSize = size * conversionFactor;
if ( unit == QgsUnitTypes::RenderMapUnits )
{
//check max/min size
if ( scale.minSizeMMEnabled )
convertedSize = std::max( convertedSize, scale.minSizeMM * mScaleFactor );
if ( scale.maxSizeMMEnabled )
convertedSize = std::min( convertedSize, scale.maxSizeMM * mScaleFactor );
}
return convertedSize;
}
double QgsRenderContext::convertToMapUnits( double size, QgsUnitTypes::RenderUnit unit, const QgsMapUnitScale &scale ) const
{
double mup = mMapToPixel.mapUnitsPerPixel();
switch ( unit )
{
case QgsUnitTypes::RenderMetersInMapUnits:
{
size = convertMetersToMapUnits( size );
// Fall through to RenderMapUnits with values of meters converted to MapUnits
FALLTHROUGH
}
case QgsUnitTypes::RenderMapUnits:
{
// check scale
double minSizeMU = std::numeric_limits<double>::lowest();
if ( scale.minSizeMMEnabled )
{
minSizeMU = scale.minSizeMM * mScaleFactor * mup;
}
if ( !qgsDoubleNear( scale.minScale, 0.0 ) )
{
minSizeMU = std::max( minSizeMU, size * ( mRendererScale / scale.minScale ) );
}
size = std::max( size, minSizeMU );
double maxSizeMU = std::numeric_limits<double>::max();
if ( scale.maxSizeMMEnabled )
{
maxSizeMU = scale.maxSizeMM * mScaleFactor * mup;
}
if ( !qgsDoubleNear( scale.maxScale, 0.0 ) )
{
maxSizeMU = std::min( maxSizeMU, size * ( mRendererScale / scale.maxScale ) );
}
size = std::min( size, maxSizeMU );
return size;
}
case QgsUnitTypes::RenderMillimeters:
{
return size * mScaleFactor * mup;
}
case QgsUnitTypes::RenderPoints:
{
return size * mScaleFactor * mup / POINTS_TO_MM;
}
case QgsUnitTypes::RenderInches:
{
return size * mScaleFactor * mup * INCH_TO_MM;
}
case QgsUnitTypes::RenderPixels:
{
return size * mup;
}
case QgsUnitTypes::RenderUnknownUnit:
case QgsUnitTypes::RenderPercentage:
//no sensible value
return 0.0;
}
return 0.0;
}
double QgsRenderContext::convertFromMapUnits( double sizeInMapUnits, QgsUnitTypes::RenderUnit outputUnit ) const
{
double mup = mMapToPixel.mapUnitsPerPixel();
switch ( outputUnit )
{
case QgsUnitTypes::RenderMetersInMapUnits:
{
return sizeInMapUnits / convertMetersToMapUnits( 1.0 );
}
case QgsUnitTypes::RenderMapUnits:
{
return sizeInMapUnits;
}
case QgsUnitTypes::RenderMillimeters:
{
return sizeInMapUnits / ( mScaleFactor * mup );
}
case QgsUnitTypes::RenderPoints:
{
return sizeInMapUnits / ( mScaleFactor * mup / POINTS_TO_MM );
}
case QgsUnitTypes::RenderInches:
{
return sizeInMapUnits / ( mScaleFactor * mup * INCH_TO_MM );
}
case QgsUnitTypes::RenderPixels:
{
return sizeInMapUnits / mup;
}
case QgsUnitTypes::RenderUnknownUnit:
case QgsUnitTypes::RenderPercentage:
//no sensible value
return 0.0;
}
return 0.0;
}
double QgsRenderContext::convertMetersToMapUnits( double meters ) const
{
switch ( mDistanceArea.sourceCrs().mapUnits() )
{
case QgsUnitTypes::DistanceMeters:
return meters;
case QgsUnitTypes::DistanceDegrees:
{
QgsPointXY pointCenter = mExtent.center();
// The Extent is in the sourceCrs(), when different from destinationCrs()
// - the point must be transformed, since DistanceArea uses the destinationCrs()
// Note: the default QgsCoordinateTransform() : authid() will return an empty String
if ( !mCoordTransform.isShortCircuited() )
{
pointCenter = mCoordTransform.transform( pointCenter );
}
return mDistanceArea.measureLineProjected( pointCenter, meters );
}
case QgsUnitTypes::DistanceKilometers:
case QgsUnitTypes::DistanceFeet:
case QgsUnitTypes::DistanceNauticalMiles:
case QgsUnitTypes::DistanceYards:
case QgsUnitTypes::DistanceMiles:
case QgsUnitTypes::DistanceCentimeters:
case QgsUnitTypes::DistanceMillimeters:
case QgsUnitTypes::DistanceUnknownUnit:
return ( meters * QgsUnitTypes::fromUnitToUnitFactor( QgsUnitTypes::DistanceMeters, mDistanceArea.sourceCrs().mapUnits() ) );
}
return meters;
}<|fim▁end|> | }
void QgsRenderContext::setFlags( QgsRenderContext::Flags flags )
{ |
<|file_name|>SimpleObjectRepository.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package au.com.scds.agric.dom.simple;
import java.util.List;
import org.apache.isis.applib.annotation.DomainService;
import org.apache.isis.applib.annotation.NatureOfService;
import org.apache.isis.applib.query.QueryDefault;
import org.apache.isis.applib.services.registry.ServiceRegistry2;
import org.apache.isis.applib.services.repository.RepositoryService;
@DomainService(
nature = NatureOfService.DOMAIN,
repositoryFor = SimpleObject.class
)
public class SimpleObjectRepository {
public List<SimpleObject> listAll() {
return repositoryService.allInstances(SimpleObject.class);
}
public List<SimpleObject> findByName(final String name) {
return repositoryService.allMatches(
new QueryDefault<>(
SimpleObject.class,
"findByName",
"name", name));
}
public SimpleObject create(final String name) {
final SimpleObject object = new SimpleObject(name);
serviceRegistry.injectServicesInto(object);
repositoryService.persist(object);
return object;
}
@javax.inject.Inject
RepositoryService repositoryService;
@javax.inject.Inject<|fim▁hole|>}<|fim▁end|> | ServiceRegistry2 serviceRegistry; |
<|file_name|>PersistenceResourcePostReturn.js<|end_file_name|><|fim▁begin|>"use strict";
tutao.provide('tutao.entity.base.PersistenceResourcePostReturn');
/**
* @constructor
* @param {Object=} data The json data to store in this entity.
*/
tutao.entity.base.PersistenceResourcePostReturn = function(data) {
if (data) {
this.updateData(data);
} else {
this.__format = "0";
this._generatedId = null;
this._permissionListId = null;
}
this._entityHelper = new tutao.entity.EntityHelper(this);
this.prototype = tutao.entity.base.PersistenceResourcePostReturn.prototype;
};
/**
* Updates the data of this entity.
* @param {Object=} data The json data to store in this entity.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.updateData = function(data) {
this.__format = data._format;
this._generatedId = data.generatedId;
this._permissionListId = data.permissionListId;
};
/**
* The version of the model this type belongs to.
* @const
*/
tutao.entity.base.PersistenceResourcePostReturn.MODEL_VERSION = '1';
/**
* The encrypted flag.
* @const
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.ENCRYPTED = false;
/**
* Provides the data of this instances as an object that can be converted to json.
* @return {Object} The json object.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.toJsonData = function() {
return {<|fim▁hole|> permissionListId: this._permissionListId
};
};
/**
* The id of the PersistenceResourcePostReturn type.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.TYPE_ID = 0;
/**
* The id of the generatedId attribute.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.GENERATEDID_ATTRIBUTE_ID = 2;
/**
* The id of the permissionListId attribute.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.PERMISSIONLISTID_ATTRIBUTE_ID = 3;
/**
* Sets the format of this PersistenceResourcePostReturn.
* @param {string} format The format of this PersistenceResourcePostReturn.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.setFormat = function(format) {
this.__format = format;
return this;
};
/**
* Provides the format of this PersistenceResourcePostReturn.
* @return {string} The format of this PersistenceResourcePostReturn.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.getFormat = function() {
return this.__format;
};
/**
* Sets the generatedId of this PersistenceResourcePostReturn.
* @param {string} generatedId The generatedId of this PersistenceResourcePostReturn.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.setGeneratedId = function(generatedId) {
this._generatedId = generatedId;
return this;
};
/**
* Provides the generatedId of this PersistenceResourcePostReturn.
* @return {string} The generatedId of this PersistenceResourcePostReturn.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.getGeneratedId = function() {
return this._generatedId;
};
/**
* Sets the permissionListId of this PersistenceResourcePostReturn.
* @param {string} permissionListId The permissionListId of this PersistenceResourcePostReturn.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.setPermissionListId = function(permissionListId) {
this._permissionListId = permissionListId;
return this;
};
/**
* Provides the permissionListId of this PersistenceResourcePostReturn.
* @return {string} The permissionListId of this PersistenceResourcePostReturn.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.getPermissionListId = function() {
return this._permissionListId;
};
/**
* Provides the entity helper of this entity.
* @return {tutao.entity.EntityHelper} The entity helper.
*/
tutao.entity.base.PersistenceResourcePostReturn.prototype.getEntityHelper = function() {
return this._entityHelper;
};<|fim▁end|> | _format: this.__format,
generatedId: this._generatedId, |
<|file_name|>attr.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::AttrBinding;
use dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use dom::bindings::codegen::InheritTypes::NodeCast;
use dom::bindings::global;
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::element::{Element, AttributeHandlers};
use dom::node::Node;
use dom::window::Window;
use dom::virtualmethods::vtable_for;
use devtools_traits::AttrInfo;
use servo_util::str::{DOMString, split_html_space_chars};
use string_cache::{Atom, Namespace};
use std::cell::Ref;
use std::mem;
pub enum AttrSettingType {
FirstSetAttr,
ReplacedAttr,
}
#[deriving(PartialEq, Clone)]
#[jstraceable]
pub enum AttrValue {
StringAttrValue(DOMString),
TokenListAttrValue(DOMString, Vec<Atom>),
UIntAttrValue(DOMString, u32),
AtomAttrValue(Atom),
}
impl AttrValue {
pub fn from_tokenlist(tokens: DOMString) -> AttrValue {
let atoms = split_html_space_chars(tokens.as_slice())
.map(|token| Atom::from_slice(token)).collect();
TokenListAttrValue(tokens, atoms)
}
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result: u32 = from_str(string.as_slice()).unwrap_or(default);
UIntAttrValue(string, result)
}
pub fn from_atomic(string: DOMString) -> AttrValue {
let value = Atom::from_slice(string.as_slice());
AtomAttrValue(value)
}
pub fn tokens<'a>(&'a self) -> Option<&'a [Atom]> {
match *self {
TokenListAttrValue(_, ref tokens) => Some(tokens.as_slice()),
_ => None
}
}
}
impl Str for AttrValue {
fn as_slice<'a>(&'a self) -> &'a str {
match *self {
StringAttrValue(ref value) |
TokenListAttrValue(ref value, _) |
UIntAttrValue(ref value, _) => value.as_slice(),
AtomAttrValue(ref value) => value.as_slice(),
}
}
}
#[dom_struct]
pub struct Attr {
reflector_: Reflector,
local_name: Atom,
value: DOMRefCell<AttrValue>,
name: Atom,
namespace: Namespace,
prefix: Option<DOMString>,
/// the element that owns this attribute.
owner: Option<JS<Element>>,
}
impl Reflectable for Attr {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
impl Attr {
fn new_inherited(local_name: Atom, value: AttrValue,
name: Atom, namespace: Namespace,
prefix: Option<DOMString>, owner: Option<JSRef<Element>>) -> Attr {
Attr {
reflector_: Reflector::new(),
local_name: local_name,
value: DOMRefCell::new(value),
name: name,
namespace: namespace,
prefix: prefix,
owner: owner.map(|o| JS::from_rooted(o)),
}
}
pub fn new(window: JSRef<Window>, local_name: Atom, value: AttrValue,
name: Atom, namespace: Namespace,
prefix: Option<DOMString>, owner: Option<JSRef<Element>>) -> Temporary<Attr> {
reflect_dom_object(box Attr::new_inherited(local_name, value, name, namespace, prefix, owner),
&global::Window(window), AttrBinding::Wrap)
}
#[inline]
pub fn name<'a>(&'a self) -> &'a Atom {
&self.name
}
#[inline]
pub fn namespace<'a>(&'a self) -> &'a Namespace {
&self.namespace
}
#[inline]
pub fn prefix<'a>(&'a self) -> &'a Option<DOMString> {
&self.prefix
}
}
impl<'a> AttrMethods for JSRef<'a, Attr> {
fn LocalName(self) -> DOMString {
self.local_name().as_slice().to_string()
}
fn Value(self) -> DOMString {
self.value().as_slice().to_string()
}
fn SetValue(self, value: DOMString) {
match self.owner {
None => {
*self.value.borrow_mut() = StringAttrValue(value)
}
Some(o) => {
let owner = o.root();
let value = owner.parse_attribute(&self.namespace, self.local_name(), value);
self.set_value(ReplacedAttr, value, *owner);
}
}
}
fn TextContent(self) -> DOMString {
self.Value()
}
fn SetTextContent(self, value: DOMString) {
self.SetValue(value)
}
fn NodeValue(self) -> DOMString {
self.Value()
}
fn SetNodeValue(self, value: DOMString) {
self.SetValue(value)
}
fn Name(self) -> DOMString {
self.name.as_slice().to_string()
}
fn GetNamespaceURI(self) -> Option<DOMString> {
let Namespace(ref atom) = self.namespace;
match atom.as_slice() {
"" => None,
url => Some(url.to_string()),
}
}
fn GetPrefix(self) -> Option<DOMString> {
self.prefix.clone()
}
fn GetOwnerElement(self) -> Option<Temporary<Element>> {
self.owner.map(|o| Temporary::new(o))
}
fn Specified(self) -> bool {
true // Always returns true
}
}
pub trait AttrHelpers<'a> {
fn set_value(self, set_type: AttrSettingType, value: AttrValue, owner: JSRef<Element>);
fn value(self) -> Ref<'a, AttrValue>;
fn local_name(self) -> &'a Atom;
fn summarize(self) -> AttrInfo;
}
impl<'a> AttrHelpers<'a> for JSRef<'a, Attr> {
fn set_value(self, set_type: AttrSettingType, value: AttrValue, owner: JSRef<Element>) {
assert!(Some(owner) == self.owner.map(|o| *o.root()));
let node: JSRef<Node> = NodeCast::from_ref(owner);
let namespace_is_null = self.namespace == ns!("");
match set_type {
ReplacedAttr if namespace_is_null => vtable_for(&node).before_remove_attr(self),
_ => ()
}
*self.value.borrow_mut() = value;
if namespace_is_null {
vtable_for(&node).after_set_attr(self)
}
}
fn value(self) -> Ref<'a, AttrValue> {
self.extended_deref().value.borrow()
}
fn local_name(self) -> &'a Atom {
&self.extended_deref().local_name<|fim▁hole|> fn summarize(self) -> AttrInfo {
let Namespace(ref ns) = self.namespace;
AttrInfo {
namespace: ns.as_slice().to_string(),
name: self.Name(),
value: self.Value(),
}
}
}
pub trait AttrHelpersForLayout {
unsafe fn value_ref_forever(&self) -> &'static str;
unsafe fn value_atom_forever(&self) -> Option<Atom>;
unsafe fn value_tokens_forever(&self) -> Option<&'static [Atom]>;
unsafe fn local_name_atom_forever(&self) -> Atom;
}
impl AttrHelpersForLayout for Attr {
#[inline]
unsafe fn value_ref_forever(&self) -> &'static str {
// This transmute is used to cheat the lifetime restriction.
let value = mem::transmute::<&AttrValue, &AttrValue>(self.value.borrow_for_layout());
value.as_slice()
}
#[inline]
unsafe fn value_atom_forever(&self) -> Option<Atom> {
let value = self.value.borrow_for_layout();
match *value {
AtomAttrValue(ref val) => Some(val.clone()),
_ => None,
}
}
#[inline]
unsafe fn value_tokens_forever(&self) -> Option<&'static [Atom]> {
// This transmute is used to cheat the lifetime restriction.
let value = mem::transmute::<&AttrValue, &AttrValue>(self.value.borrow_for_layout());
match *value {
TokenListAttrValue(_, ref tokens) => Some(tokens.as_slice()),
_ => None,
}
}
#[inline]
unsafe fn local_name_atom_forever(&self) -> Atom {
self.local_name.clone()
}
}<|fim▁end|> | }
|
<|file_name|>findAllRefsForObjectLiteralProperties.ts<|end_file_name|><|fim▁begin|>/// <reference path='fourslash.ts'/>
////var x = {
//// [|{| "isWriteAccess": true, "isDefinition": true |}property|]: {}
////};
////
////x.[|property|];
<|fim▁hole|>////
////let {[|property|]: pVar} = x;
verify.singleReferenceGroup("(property) property: {}");<|fim▁end|> | |
<|file_name|>TrafficManager.java<|end_file_name|><|fim▁begin|>package com.mapswithme.maps.maplayer.traffic;
import androidx.annotation.MainThread;
import androidx.annotation.NonNull;
import com.mapswithme.util.log.Logger;
import com.mapswithme.util.log.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
@MainThread
public enum TrafficManager
{
INSTANCE;
private final static String TAG = TrafficManager.class.getSimpleName();
@NonNull
private final Logger mLogger = LoggerFactory.INSTANCE.getLogger(LoggerFactory.Type.TRAFFIC);
@NonNull
private final TrafficState.StateChangeListener mStateChangeListener = new TrafficStateListener();
@NonNull
private TrafficState mState = TrafficState.DISABLED;
@NonNull
private final List<TrafficCallback> mCallbacks = new ArrayList<>();
private boolean mInitialized = false;
public void initialize()
{
mLogger.d(TAG, "Initialization of traffic manager and setting the listener for traffic state changes");
TrafficState.nativeSetListener(mStateChangeListener);
mInitialized = true;
}
public void toggle()
{
checkInitialization();
if (isEnabled())
disable();
else
enable();
}
private void enable()
{
mLogger.d(TAG, "Enable traffic");
TrafficState.nativeEnable();
}
private void disable()
{
checkInitialization();
mLogger.d(TAG, "Disable traffic");
TrafficState.nativeDisable();
}
public boolean isEnabled()
{
checkInitialization();
return TrafficState.nativeIsEnabled();
}
public void attach(@NonNull TrafficCallback callback)
{
checkInitialization();
if (mCallbacks.contains(callback))
{
throw new IllegalStateException("A callback '" + callback
+ "' is already attached. Check that the 'detachAll' method was called.");
}<|fim▁hole|> mLogger.d(TAG, "Attach callback '" + callback + "'");
mCallbacks.add(callback);
postPendingState();
}
private void postPendingState()
{
mStateChangeListener.onTrafficStateChanged(mState.ordinal());
}
public void detachAll()
{
checkInitialization();
if (mCallbacks.isEmpty())
{
mLogger.w(TAG, "There are no attached callbacks. Invoke the 'detachAll' method " +
"only when it's really needed!", new Throwable());
return;
}
for (TrafficCallback callback : mCallbacks)
mLogger.d(TAG, "Detach callback '" + callback + "'");
mCallbacks.clear();
}
private void checkInitialization()
{
if (!mInitialized)
throw new AssertionError("Traffic manager is not initialized!");
}
public void setEnabled(boolean enabled)
{
checkInitialization();
if (isEnabled() == enabled)
return;
if (enabled)
enable();
else
disable();
}
private class TrafficStateListener implements TrafficState.StateChangeListener
{
@Override
@MainThread
public void onTrafficStateChanged(int index)
{
TrafficState newTrafficState = TrafficState.values()[index];
mLogger.d(TAG, "onTrafficStateChanged current state = " + mState
+ " new value = " + newTrafficState);
if (mState == newTrafficState)
return;
mState = newTrafficState;
mState.activate(mCallbacks);
}
}
public interface TrafficCallback
{
void onEnabled();
void onDisabled();
void onWaitingData();
void onOutdated();
void onNetworkError();
void onNoData();
void onExpiredData();
void onExpiredApp();
}
}<|fim▁end|> | |
<|file_name|>bufferstream.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for bufferstream v0.6.2
// Project: https://github.com/dodo/node-bufferstream
// Definitions by: Bart van der Schoor <https://github.com/Bartvds>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/// <reference path="../node/node.d.ts" />
declare module 'bufferstream' {
import stream = require('stream');
export = BufferStream;
class BufferStream extends stream.Duplex {
constructor(options?: BufferStream.Opts);
/*
different buffer behaviors can be triggered by size:
none when output drains, bufferstream drains too
flexible buffers everthing that it gets and not piping out
<number> TODO buffer has given size. buffers everthing until buffer is full. when buffer is full then the stream will drain
*/
setSize(size: string): void; // can be one of ['none', 'flexible', <number>]
setSize(size: number): void; // can be one of ['none', 'flexible', <number>]
/*
enables stream buffering default
*/
enable(): void;
/*
flushes buffer and disables stream buffering. BufferStream now pipes all data as long as the output accepting data. when the output is draining BufferStream will buffer all input temporary.
token[s] buffer splitters (should be String or Buffer)
disables given tokens. wont flush until no splitter tokens are left.
*/
disable(): void;
disable(token: string, ...tokens: string[]): void;
disable(tokens: string[]): void; // Array
disable(token: Buffer, ...tokens: Buffer[]): void;
disable(tokens: Buffer[]): void; // Array
/*
each time BufferStream finds a splitter token in the input data it will emit a split event. this also works for binary data.
token[s] buffer splitters (should be String or Buffer)
*/
split(token: string, ...tokens: string[]): void;
split(tokens: string[]): void; // Array
split(token: Buffer, ...tokens: Buffer[]): void;
split(tokens: Buffer[]): void; // Array
/*
returns Buffer.
*/
getBuffer(): Buffer;
/*
returns Buffer.
*/
buffer: Buffer;
/*
shortcut for buffer.toString()
*/
toString(): string;
/*
shortcut for buffer.length
*/
length: number;
}
namespace BufferStream {
export interface Opts {
/*
default encoding for writing strings
*/
encoding?: string;
/*
if true and the source is a child_process the stream will block the entire process (timeouts wont work anymore, but splitting and listening on data still works, because they work sync)
*/
blocking?: boolean;
/*
defines buffer level or sets buffer to given size (see ↓setSize for more)
*/
size?: any;<|fim▁hole|> */
disabled?: boolean;
/*
short form for:
split(token, function (chunk) {emit('data', chunk)})
*/
// String or Buffer
split?: any;
}
export var fn: {warn: boolean};
}
}
declare module 'bufferstream/postbuffer' {
import http = require('http');
import BufferStream = require('bufferstream');
class PostBuffer extends BufferStream {
/*
for if you want to get all the post data from a http server request and do some db reqeust before.
http client buffer
*/
constructor(req: http.IncomingMessage);
/*
set a callback to get all post data from a http server request
*/
onEnd(callback: (data: any) => void): void;
/*
pumps data into another stream to allow incoming streams given options will be passed to Stream.pipe
*/
pipe(stream: NodeJS.WritableStream, options?: BufferStream.Opts): NodeJS.ReadableStream;
}
export = PostBuffer;
}<|fim▁end|> | /*
immediately call disable |
<|file_name|>v-mask.js<|end_file_name|><|fim▁begin|>(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = global || self, factory(global.VueMask = {}));
}(this, function (exports) { 'use strict';
function _typeof(obj) {
if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") {
_typeof = function (obj) {
return typeof obj;
};
} else {
_typeof = function (obj) {
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
};
}
return _typeof(obj);
}
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function ownKeys(object, enumerableOnly) {
var keys = Object.keys(object);
if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
if (enumerableOnly) symbols = symbols.filter(function (sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
});
keys.push.apply(keys, symbols);
}
return keys;
}
function _objectSpread2(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i] != null ? arguments[i] : {};
if (i % 2) {
ownKeys(source, true).forEach(function (key) {
_defineProperty(target, key, source[key]);
});
} else if (Object.getOwnPropertyDescriptors) {
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
} else {
ownKeys(source).forEach(function (key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
});
}
}
return target;
}
var placeholderChar = '_';
var strFunction = 'function';
var emptyArray = [];
function convertMaskToPlaceholder() {
var mask = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : emptyArray;
var placeholderChar$1 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : placeholderChar;
if (!isArray(mask)) {
throw new Error('Text-mask:convertMaskToPlaceholder; The mask property must be an array.');
}
if (mask.indexOf(placeholderChar$1) !== -1) {
throw new Error('Placeholder character must not be used as part of the mask. Please specify a character ' + 'that is not present in your mask as your placeholder character.\n\n' + "The placeholder character that was received is: ".concat(JSON.stringify(placeholderChar$1), "\n\n") + "The mask that was received is: ".concat(JSON.stringify(mask)));
}
return mask.map(function (char) {
return char instanceof RegExp ? placeholderChar$1 : char;
}).join('');
}
function isArray(value) {
return Array.isArray && Array.isArray(value) || value instanceof Array;
}
var strCaretTrap = '[]';
function processCaretTraps(mask) {
var indexes = [];
var indexOfCaretTrap;
while (indexOfCaretTrap = mask.indexOf(strCaretTrap), indexOfCaretTrap !== -1) {
indexes.push(indexOfCaretTrap);
mask.splice(indexOfCaretTrap, 1);
}
return {
maskWithoutCaretTraps: mask,
indexes: indexes
};
}
var emptyArray$1 = [];
var emptyString = '';
function conformToMask() {
var rawValue = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : emptyString;
var mask = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : emptyArray$1;
var config = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
if (!isArray(mask)) {
if (_typeof(mask) === strFunction) {
mask = mask(rawValue, config);
mask = processCaretTraps(mask).maskWithoutCaretTraps;
} else {
throw new Error('Text-mask:conformToMask; The mask property must be an array.');
}
}
var _config$guide = config.guide,
guide = _config$guide === void 0 ? true : _config$guide,
_config$previousConfo = config.previousConformedValue,
previousConformedValue = _config$previousConfo === void 0 ? emptyString : _config$previousConfo,
_config$placeholderCh = config.placeholderChar,
placeholderChar$1 = _config$placeholderCh === void 0 ? placeholderChar : _config$placeholderCh,
_config$placeholder = config.placeholder,
placeholder = _config$placeholder === void 0 ? convertMaskToPlaceholder(mask, placeholderChar$1) : _config$placeholder,
currentCaretPosition = config.currentCaretPosition,
keepCharPositions = config.keepCharPositions;
var suppressGuide = guide === false && previousConformedValue !== undefined;
var rawValueLength = rawValue.length;
var previousConformedValueLength = previousConformedValue.length;
var placeholderLength = placeholder.length;
var maskLength = mask.length;
var editDistance = rawValueLength - previousConformedValueLength;
var isAddition = editDistance > 0;
var indexOfFirstChange = currentCaretPosition + (isAddition ? -editDistance : 0);
var indexOfLastChange = indexOfFirstChange + Math.abs(editDistance);
if (keepCharPositions === true && !isAddition) {
var compensatingPlaceholderChars = emptyString;
for (var i = indexOfFirstChange; i < indexOfLastChange; i++) {
if (placeholder[i] === placeholderChar$1) {
compensatingPlaceholderChars += placeholderChar$1;
}
}
rawValue = rawValue.slice(0, indexOfFirstChange) + compensatingPlaceholderChars + rawValue.slice(indexOfFirstChange, rawValueLength);
}
var rawValueArr = rawValue.split(emptyString).map(function (char, i) {
return {
char: char,
isNew: i >= indexOfFirstChange && i < indexOfLastChange
};
});
for (var _i = rawValueLength - 1; _i >= 0; _i--) {
var char = rawValueArr[_i].char;
if (char !== placeholderChar$1) {
var shouldOffset = _i >= indexOfFirstChange && previousConformedValueLength === maskLength;
if (char === placeholder[shouldOffset ? _i - editDistance : _i]) {
rawValueArr.splice(_i, 1);
}
}
}
var conformedValue = emptyString;
var someCharsRejected = false;
placeholderLoop: for (var _i2 = 0; _i2 < placeholderLength; _i2++) {
var charInPlaceholder = placeholder[_i2];
if (charInPlaceholder === placeholderChar$1) {
if (rawValueArr.length > 0) {
while (rawValueArr.length > 0) {
var _rawValueArr$shift = rawValueArr.shift(),
rawValueChar = _rawValueArr$shift.char,
isNew = _rawValueArr$shift.isNew;
if (rawValueChar === placeholderChar$1 && suppressGuide !== true) {
conformedValue += placeholderChar$1;
continue placeholderLoop;
} else if (mask[_i2].test(rawValueChar)) {
if (keepCharPositions !== true || isNew === false || previousConformedValue === emptyString || guide === false || !isAddition) {
conformedValue += rawValueChar;
} else {
var rawValueArrLength = rawValueArr.length;
var indexOfNextAvailablePlaceholderChar = null;
for (var _i3 = 0; _i3 < rawValueArrLength; _i3++) {
var charData = rawValueArr[_i3];
if (charData.char !== placeholderChar$1 && charData.isNew === false) {
break;
}
if (charData.char === placeholderChar$1) {
indexOfNextAvailablePlaceholderChar = _i3;
break;
}
}
if (indexOfNextAvailablePlaceholderChar !== null) {
conformedValue += rawValueChar;
rawValueArr.splice(indexOfNextAvailablePlaceholderChar, 1);
} else {
_i2--;
}
}
continue placeholderLoop;
} else {
someCharsRejected = true;
}
}
}
if (suppressGuide === false) {
conformedValue += placeholder.substr(_i2, placeholderLength);
}
break;
} else {
conformedValue += charInPlaceholder;
}
}
if (suppressGuide && isAddition === false) {
var indexOfLastFilledPlaceholderChar = null;
for (var _i4 = 0; _i4 < conformedValue.length; _i4++) {
if (placeholder[_i4] === placeholderChar$1) {
indexOfLastFilledPlaceholderChar = _i4;
}
}
if (indexOfLastFilledPlaceholderChar !== null) {
conformedValue = conformedValue.substr(0, indexOfLastFilledPlaceholderChar + 1);
} else {
conformedValue = emptyString;
}
}
return {
conformedValue: conformedValue,
meta: {
someCharsRejected: someCharsRejected
}
};
}
var NEXT_CHAR_OPTIONAL = {
__nextCharOptional__: true
};
var defaultMaskReplacers = {
'#': /\d/,
A: /[a-z]/i,
N: /[a-z0-9]/i,
'?': NEXT_CHAR_OPTIONAL,
X: /./
};
var stringToRegexp = function stringToRegexp(str) {
var lastSlash = str.lastIndexOf('/');
return new RegExp(str.slice(1, lastSlash), str.slice(lastSlash + 1));
};
var makeRegexpOptional = function makeRegexpOptional(charRegexp) {
return stringToRegexp(charRegexp.toString().replace(/.(\/)[gmiyus]{0,6}$/, function (match) {
return match.replace('/', '?/');
}));
};
var escapeIfNeeded = function escapeIfNeeded(char) {
return '[\\^$.|?*+()'.indexOf(char) > -1 ? "\\".concat(char) : char;
};
var charRegexp = function charRegexp(char) {<|fim▁hole|>
var isRegexp = function isRegexp(entity) {
return entity instanceof RegExp;
};
var castToRegexp = function castToRegexp(char) {
return isRegexp(char) ? char : charRegexp(char);
};
function stringMaskToRegExpMask(stringMask) {
return stringMask.split('').map(function (char, index, array) {
var maskChar = defaultMaskReplacers[char] || char;
var previousChar = array[index - 1];
var previousMaskChar = defaultMaskReplacers[previousChar] || previousChar;
if (maskChar === NEXT_CHAR_OPTIONAL) {
return null;
}
if (previousMaskChar === NEXT_CHAR_OPTIONAL) {
return makeRegexpOptional(castToRegexp(maskChar));
}
return maskChar;
}).filter(Boolean);
}
var trigger = function trigger(el, type) {
var e = document.createEvent('HTMLEvents');
e.initEvent(type, true, true);
el.dispatchEvent(e);
};
var queryInputElementInside = function queryInputElementInside(el) {
return el instanceof HTMLInputElement ? el : el.querySelector('input') || el;
};
var inBrowser = typeof window !== 'undefined';
var UA = inBrowser && window.navigator.userAgent.toLowerCase();
var isEdge = UA && UA.indexOf('edge/') > 0;
var isAndroid = UA && UA.indexOf('android') > 0;
var isChrome = UA && /chrome\/\d+/.test(UA) && !isEdge;
function createOptions() {
var elementOptions = new Map();
var defaultOptions = {
previousValue: '',
mask: []
};
function get(el) {
return elementOptions.get(el) || _objectSpread2({}, defaultOptions);
}
function partiallyUpdate(el, newOptions) {
elementOptions.set(el, _objectSpread2({}, get(el), {}, newOptions));
}
function remove(el) {
elementOptions.delete(el);
}
return {
partiallyUpdate: partiallyUpdate,
remove: remove,
get: get
};
}
var options = createOptions();
function triggerInputUpdate(el) {
var fn = trigger.bind(null, el, 'input');
if (isAndroid && isChrome) {
setTimeout(fn, 0);
} else {
fn();
}
}
function updateValue(el) {
var force = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
var value = el.value;
var _options$get = options.get(el),
previousValue = _options$get.previousValue,
mask = _options$get.mask;
var isValueChanged = value !== previousValue;
var isLengthIncreased = value.length > previousValue.length;
var isUpdateNeeded = value && isValueChanged && isLengthIncreased;
if (force || isUpdateNeeded) {
var _conformToMask = conformToMask(value, mask, {
guide: false
}),
conformedValue = _conformToMask.conformedValue;
el.value = conformedValue;
triggerInputUpdate(el);
}
options.partiallyUpdate(el, {
previousValue: value
});
}
function updateMask(el, mask) {
options.partiallyUpdate(el, {
mask: stringMaskToRegExpMask(mask)
});
}
var directive = {
bind: function bind(el, _ref) {
var value = _ref.value;
el = queryInputElementInside(el);
updateMask(el, value);
updateValue(el);
},
componentUpdated: function componentUpdated(el, _ref2) {
var value = _ref2.value,
oldValue = _ref2.oldValue;
el = queryInputElementInside(el);
var isMaskChanged = value !== oldValue;
if (isMaskChanged) {
updateMask(el, value);
}
updateValue(el, isMaskChanged);
},
unbind: function unbind(el) {
el = queryInputElementInside(el);
options.remove(el);
}
};
var plugin = (function (Vue) {
Vue.directive('mask', directive);
});
exports.VueMaskDirective = directive;
exports.VueMaskPlugin = plugin;
exports.default = plugin;
Object.defineProperty(exports, '__esModule', { value: true });
}));<|fim▁end|> | return new RegExp("/[".concat(escapeIfNeeded(char), "]/"));
}; |
<|file_name|>transformed_distribution_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for TransformedDistribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from scipy import stats
from tensorflow.contrib import distributions
from tensorflow.contrib import linalg
from tensorflow.contrib.distributions.python.ops import bijectors
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
bs = bijectors
ds = distributions
la = linalg
class TransformedDistributionTest(test.TestCase):
def _cls(self):
return ds.TransformedDistribution
def testTransformedDistribution(self):
g = ops.Graph()
with g.as_default():
mu = 3.0
sigma = 2.0
# Note: the Jacobian callable only works for this example; more generally
# you may or may not need a reduce_sum.
log_normal = self._cls()(
distribution=ds.Normal(loc=mu, scale=sigma),
bijector=bs.Exp(event_ndims=0))
sp_dist = stats.lognorm(s=sigma, scale=np.exp(mu))
# sample
sample = log_normal.sample(100000, seed=235)
self.assertAllEqual([], log_normal.event_shape)
with self.test_session(graph=g):
self.assertAllEqual([], log_normal.event_shape_tensor().eval())
self.assertAllClose(
sp_dist.mean(), np.mean(sample.eval()), atol=0.0, rtol=0.05)
# pdf, log_pdf, cdf, etc...
# The mean of the lognormal is around 148.
test_vals = np.linspace(0.1, 1000., num=20).astype(np.float32)
for func in [[log_normal.log_prob, sp_dist.logpdf],
[log_normal.prob, sp_dist.pdf],
[log_normal.log_cdf, sp_dist.logcdf],
[log_normal.cdf, sp_dist.cdf],
[log_normal.survival_function, sp_dist.sf],
[log_normal.log_survival_function, sp_dist.logsf]]:
actual = func[0](test_vals)
expected = func[1](test_vals)
with self.test_session(graph=g):
self.assertAllClose(expected, actual.eval(), atol=0, rtol=0.01)
def testCachedSamplesWithoutInverse(self):
with self.test_session() as sess:
mu = 3.0
sigma = 0.02
log_normal = self._cls()(
distribution=ds.Normal(loc=mu, scale=sigma),
bijector=bs.Exp(event_ndims=0))
sample = log_normal.sample(1)
sample_val, log_pdf_val = sess.run([sample, log_normal.log_prob(sample)])
self.assertAllClose(
stats.lognorm.logpdf(sample_val, s=sigma, scale=np.exp(mu)),
log_pdf_val,
atol=1e-2)
def testShapeChangingBijector(self):
with self.test_session():
softmax = bs.SoftmaxCentered()
standard_normal = ds.Normal(loc=0., scale=1.)
multi_logit_normal = self._cls()(
distribution=standard_normal,
bijector=softmax)
x = [[-np.log(3.), 0.],
[np.log(3), np.log(5)]]
y = softmax.forward(x).eval()
expected_log_pdf = (stats.norm(loc=0., scale=1.).logpdf(x) -
np.sum(np.log(y), axis=-1))
self.assertAllClose(expected_log_pdf,
multi_logit_normal.log_prob(y).eval())
self.assertAllClose(
[1, 2, 3, 2],
array_ops.shape(multi_logit_normal.sample([1, 2, 3])).eval())
self.assertAllEqual([2], multi_logit_normal.event_shape)
self.assertAllEqual([2], multi_logit_normal.event_shape_tensor().eval())
def testEntropy(self):
with self.test_session():
shift = np.array([[-1, 0, 1], [-1, -2, -3]], dtype=np.float32)
diag = np.array([[1, 2, 3], [2, 3, 2]], dtype=np.float32)
actual_mvn_entropy = np.concatenate([
[stats.multivariate_normal(shift[i], np.diag(diag[i]**2)).entropy()]
for i in range(len(diag))])
fake_mvn = self._cls()(
ds.MultivariateNormalDiag(
loc=array_ops.zeros_like(shift),
scale_diag=array_ops.ones_like(diag),
validate_args=True),
bs.AffineLinearOperator(
shift,
scale=la.LinearOperatorDiag(diag, is_non_singular=True),
validate_args=True),
validate_args=True)
self.assertAllClose(actual_mvn_entropy,
fake_mvn.entropy().eval())
class ScalarToMultiTest(test.TestCase):
def _cls(self):
return ds.TransformedDistribution
def setUp(self):
self._shift = np.array([-1, 0, 1], dtype=np.float32)
self._tril = np.array([[[1., 0, 0],
[2, 1, 0],
[3, 2, 1]],
[[2, 0, 0],
[3, 2, 0],
[4, 3, 2]]],
dtype=np.float32)
def _testMVN(self,
base_distribution_class,
base_distribution_kwargs,
batch_shape=(),
event_shape=(),
not_implemented_message=None):
with self.test_session() as sess:
# Overriding shapes must be compatible w/bijector; most bijectors are
# batch_shape agnostic and only care about event_ndims.
# In the case of `Affine`, if we got it wrong then it would fire an
# exception due to incompatible dimensions.
batch_shape_pl = array_ops.placeholder(<|fim▁hole|> feed_dict = {batch_shape_pl: np.array(batch_shape, dtype=np.int32),
event_shape_pl: np.array(event_shape, dtype=np.int32)}
fake_mvn_dynamic = self._cls()(
distribution=base_distribution_class(validate_args=True,
**base_distribution_kwargs),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=batch_shape_pl,
event_shape=event_shape_pl,
validate_args=True)
fake_mvn_static = self._cls()(
distribution=base_distribution_class(validate_args=True,
**base_distribution_kwargs),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=batch_shape,
event_shape=event_shape,
validate_args=True)
actual_mean = np.tile(self._shift, [2, 1]) # Affine elided this tile.
actual_cov = np.matmul(self._tril, np.transpose(self._tril, [0, 2, 1]))
def actual_mvn_log_prob(x):
return np.concatenate([
[stats.multivariate_normal(
actual_mean[i], actual_cov[i]).logpdf(x[:, i, :])]
for i in range(len(actual_cov))]).T
actual_mvn_entropy = np.concatenate([
[stats.multivariate_normal(
actual_mean[i], actual_cov[i]).entropy()]
for i in range(len(actual_cov))])
self.assertAllEqual([3], fake_mvn_static.event_shape)
self.assertAllEqual([2], fake_mvn_static.batch_shape)
self.assertAllEqual(tensor_shape.TensorShape(None),
fake_mvn_dynamic.event_shape)
self.assertAllEqual(tensor_shape.TensorShape(None),
fake_mvn_dynamic.batch_shape)
x = fake_mvn_static.sample(5, seed=0).eval()
for unsupported_fn in (fake_mvn_static.log_cdf,
fake_mvn_static.cdf,
fake_mvn_static.survival_function,
fake_mvn_static.log_survival_function):
with self.assertRaisesRegexp(NotImplementedError,
not_implemented_message):
unsupported_fn(x)
num_samples = 5e3
for fake_mvn, feed_dict in ((fake_mvn_static, {}),
(fake_mvn_dynamic, feed_dict)):
# Ensure sample works by checking first, second moments.
y = fake_mvn.sample(int(num_samples), seed=0)
x = y[0:5, ...]
sample_mean = math_ops.reduce_mean(y, 0)
centered_y = array_ops.transpose(y - sample_mean, [1, 2, 0])
sample_cov = math_ops.matmul(
centered_y, centered_y, transpose_b=True) / num_samples
[
sample_mean_,
sample_cov_,
x_,
fake_event_shape_,
fake_batch_shape_,
fake_log_prob_,
fake_prob_,
fake_entropy_,
] = sess.run([
sample_mean,
sample_cov,
x,
fake_mvn.event_shape_tensor(),
fake_mvn.batch_shape_tensor(),
fake_mvn.log_prob(x),
fake_mvn.prob(x),
fake_mvn.entropy(),
], feed_dict=feed_dict)
self.assertAllClose(actual_mean, sample_mean_, atol=0.1, rtol=0.1)
self.assertAllClose(actual_cov, sample_cov_, atol=0., rtol=0.1)
# Ensure all other functions work as intended.
self.assertAllEqual([5, 2, 3], x_.shape)
self.assertAllEqual([3], fake_event_shape_)
self.assertAllEqual([2], fake_batch_shape_)
self.assertAllClose(actual_mvn_log_prob(x_), fake_log_prob_,
atol=0., rtol=1e-6)
self.assertAllClose(np.exp(actual_mvn_log_prob(x_)), fake_prob_,
atol=0., rtol=1e-5)
self.assertAllClose(actual_mvn_entropy, fake_entropy_,
atol=0., rtol=1e-6)
def testScalarBatchScalarEvent(self):
self._testMVN(
base_distribution_class=ds.Normal,
base_distribution_kwargs={"loc": 0., "scale": 1.},
batch_shape=[2],
event_shape=[3],
not_implemented_message="not implemented when overriding event_shape")
def testScalarBatchNonScalarEvent(self):
self._testMVN(
base_distribution_class=ds.MultivariateNormalDiag,
base_distribution_kwargs={"loc": [0., 0., 0.],
"scale_diag": [1., 1, 1]},
batch_shape=[2],
not_implemented_message="not implemented")
with self.test_session():
# Can't override event_shape for scalar batch, non-scalar event.
with self.assertRaisesRegexp(ValueError, "base distribution not scalar"):
self._cls()(
distribution=ds.MultivariateNormalDiag(loc=[0.], scale_diag=[1.]),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=[2],
event_shape=[3],
validate_args=True)
def testNonScalarBatchScalarEvent(self):
self._testMVN(
base_distribution_class=ds.Normal,
base_distribution_kwargs={"loc": [0., 0], "scale": [1., 1]},
event_shape=[3],
not_implemented_message="not implemented when overriding event_shape")
with self.test_session():
# Can't override batch_shape for non-scalar batch, scalar event.
with self.assertRaisesRegexp(ValueError, "base distribution not scalar"):
self._cls()(
distribution=ds.Normal(loc=[0.], scale=[1.]),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=[2],
event_shape=[3],
validate_args=True)
def testNonScalarBatchNonScalarEvent(self):
with self.test_session():
# Can't override event_shape and/or batch_shape for non_scalar batch,
# non-scalar event.
with self.assertRaisesRegexp(ValueError, "base distribution not scalar"):
self._cls()(
distribution=ds.MultivariateNormalDiag(loc=[[0.]],
scale_diag=[[1.]]),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=[2],
event_shape=[3],
validate_args=True)
if __name__ == "__main__":
test.main()<|fim▁end|> | dtypes.int32, name="dynamic_batch_shape")
event_shape_pl = array_ops.placeholder(
dtypes.int32, name="dynamic_event_shape") |
<|file_name|>gotoDefinitionInObjectBindingPattern2.ts<|end_file_name|><|fim▁begin|>/// <reference path='fourslash.ts' />
//// var p0 = ({a/*1*/a}) => {console.log(aa)};
//// function f2({ a/*a1*/1, b/*b1*/1 }: { /*a1_dest*/a1: number, /*b1_dest*/b1: number } = { a1: 0, b1: 0 }) {}
verify.goToDefinition("1", []);<|fim▁hole|>verify.goToDefinition("a1", "a1_dest");
verify.goToDefinition("b1", "b1_dest");<|fim▁end|> | |
<|file_name|>vcx.rs<|end_file_name|><|fim▁begin|>use utils::version_constants;
use libc::c_char;
use utils::cstring::CStringUtils;
use utils::libindy::{wallet, pool};
use utils::error;
use settings;
use std::ffi::CString;
use utils::threadpool::spawn;
use error::prelude::*;
/// Initializes VCX with config settings
///
/// example configuration is in libvcx/sample_config/config.json
///
/// #Params
/// command_handle: command handle to map callback to user context.
///
/// config_path: path to a config file to populate config attributes
///
/// cb: Callback that provides error status of initialization
///
/// #Returns
/// Error code as a u32
#[no_mangle]
pub extern fn vcx_init_with_config(command_handle: u32,
config: *const c_char,
cb: Option<extern fn(xcommand_handle: u32, err: u32)>) -> u32 {
info!("vcx_init_with_config >>>");
check_useful_c_str!(config,VcxErrorKind::InvalidOption);
check_useful_c_callback!(cb, VcxErrorKind::InvalidOption);
trace!("vcx_init(command_handle: {}, config: {:?})",
command_handle, config);
if config == "ENABLE_TEST_MODE" {
settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE, "true");
settings::set_defaults();
} else {
match settings::process_config_string(&config) {
Err(e) => {
error!("Invalid configuration specified: {}", e);
return e.into();
}
Ok(_) => (),
}
};
_finish_init(command_handle, cb)
}
/// Initializes VCX with config file
///
/// An example file is at libvcx/sample_config/config.json
///
/// #Params
/// command_handle: command handle to map callback to user context.
///
/// config_path: path to a config file to populate config attributes
///
/// cb: Callback that provides error status of initialization
///
/// #Returns
/// Error code as a u32
#[no_mangle]
pub extern fn vcx_init(command_handle: u32,
config_path: *const c_char,
cb: Option<extern fn(xcommand_handle: u32, err: u32)>) -> u32 {
info!("vcx_init >>>");
check_useful_c_callback!(cb, VcxErrorKind::InvalidOption);
trace!("vcx_init(command_handle: {}, config_path: {:?})",
command_handle, config_path);
if !config_path.is_null() {
check_useful_c_str!(config_path,VcxErrorKind::InvalidOption);
if config_path == "ENABLE_TEST_MODE" {
settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE, "true");
settings::set_defaults();
} else {
match settings::process_config_file(&config_path) {
Err(e) => {
return VcxError::from_msg(VcxErrorKind::InvalidConfiguration, "Cannot initialize with given config path.").into();
}
Ok(_) => {
match settings::validate_payment_method() {
Ok(_) => (),
Err(e) => return e.into()
}
}
};
}
} else {
error!("Cannot initialize with given config path: config path is null.");
return VcxError::from_msg(VcxErrorKind::InvalidConfiguration, "Cannot initialize with given config path: config path is null.").into();
}
_finish_init(command_handle, cb)
}
fn _finish_init(command_handle: u32, cb: extern fn(xcommand_handle: u32, err: u32)) -> u32 {
::utils::threadpool::init();
settings::log_settings();
if wallet::get_wallet_handle() > 0 {
error!("Library was already initialized");
return VcxError::from_msg(VcxErrorKind::AlreadyInitialized, "Library was already initialized").into();
}
// Wallet name was already validated
let wallet_name = match settings::get_config_value(settings::CONFIG_WALLET_NAME) {
Ok(x) => x,
Err(_) => {
trace!("Using default wallet: {}", settings::DEFAULT_WALLET_NAME.to_string());
settings::set_config_value(settings::CONFIG_WALLET_NAME, settings::DEFAULT_WALLET_NAME);
settings::DEFAULT_WALLET_NAME.to_string()
}
};
let wallet_type = settings::get_config_value(settings::CONFIG_WALLET_TYPE).ok();
let storage_config = settings::get_config_value(settings::CONFIG_WALLET_STORAGE_CONFIG).ok();
let storage_creds = settings::get_config_value(settings::CONFIG_WALLET_STORAGE_CREDS).ok();
trace!("libvcx version: {}{}", version_constants::VERSION, version_constants::REVISION);
spawn(move || {
if settings::get_config_value(settings::CONFIG_GENESIS_PATH).is_ok() {
match ::utils::libindy::init_pool() {
Ok(_) => (),
Err(e) => {
error!("Init Pool Error {}.", e);
return Ok(cb(command_handle, e.into()));
}
}
}
match wallet::open_wallet(&wallet_name, wallet_type.as_ref().map(String::as_str),
storage_config.as_ref().map(String::as_str), storage_creds.as_ref().map(String::as_str)) {
Ok(_) => {
debug!("Init Wallet Successful");
cb(command_handle, error::SUCCESS.code_num);
}
Err(e) => {
error!("Init Wallet Error {}.", e);
cb(command_handle, e.into());
}
}
Ok(())
});
error::SUCCESS.code_num
}
lazy_static! {
pub static ref VERSION_STRING: CString = CString::new(format!("{}{}", version_constants::VERSION, version_constants::REVISION)).unwrap();
}
#[no_mangle]
pub extern fn vcx_version() -> *const c_char {
info!("vcx_version >>>");
VERSION_STRING.as_ptr()
}
/// Reset libvcx to a pre-configured state, releasing/deleting any handles and freeing memory
///
/// libvcx will be inoperable and must be initialized again with vcx_init_with_config
///
/// #Params
/// delete: specify whether wallet/pool should be deleted
///
/// #Returns
/// Success
#[no_mangle]
pub extern fn vcx_shutdown(delete: bool) -> u32 {
info!("vcx_shutdown >>>");
trace!("vcx_shutdown(delete: {})", delete);
match wallet::close_wallet() {
Ok(_) => {}
Err(_) => {}
};
match pool::close() {
Ok(_) => {}
Err(_) => {}
};
::schema::release_all();
::connection::release_all();
::issuer_credential::release_all();
::credential_def::release_all();
::proof::release_all();
::disclosed_proof::release_all();
::credential::release_all();
if delete {
let pool_name = settings::get_config_value(settings::CONFIG_POOL_NAME)
.unwrap_or(settings::DEFAULT_POOL_NAME.to_string());
let wallet_name = settings::get_config_value(settings::CONFIG_WALLET_NAME)
.unwrap_or(settings::DEFAULT_WALLET_NAME.to_string());
let wallet_type = settings::get_config_value(settings::CONFIG_WALLET_TYPE).ok();
match wallet::delete_wallet(&wallet_name, wallet_type.as_ref().map(String::as_str), None, None) {
Ok(_) => (),
Err(_) => (),
};
match pool::delete(&pool_name) {
Ok(_) => (),
Err(_) => (),
};
}
settings::clear_config();
trace!("vcx_shutdown(delete: {})", delete);
error::SUCCESS.code_num
}
#[no_mangle]
pub extern fn vcx_error_c_message(error_code: u32) -> *const c_char {
info!("vcx_error_c_message >>>");
trace!("vcx_error_message(error_code: {})", error_code);
error::error_c_message(&error_code).as_ptr()
}
#[no_mangle]
pub extern fn vcx_update_institution_info(name: *const c_char, logo_url: *const c_char) -> u32 {
info!("vcx_update_institution_info >>>");<|fim▁hole|> check_useful_c_str!(name, VcxErrorKind::InvalidConfiguration);
check_useful_c_str!(logo_url, VcxErrorKind::InvalidConfiguration);
trace!("vcx_update_institution_info(name: {}, logo_url: {})", name, logo_url);
settings::set_config_value(::settings::CONFIG_INSTITUTION_NAME, &name);
settings::set_config_value(::settings::CONFIG_INSTITUTION_LOGO_URL, &logo_url);
error::SUCCESS.code_num
}
#[no_mangle]
pub extern fn vcx_mint_tokens(seed: *const c_char, fees: *const c_char) {
info!("vcx_mint_tokens >>>");
// TODO: CHEC
let seed = if !seed.is_null() {
match CStringUtils::c_str_to_string(seed) {
Ok(opt_val) => opt_val.map(String::from),
Err(_) => return ()
}
} else {
None
};
let fees = if !fees.is_null() {
match CStringUtils::c_str_to_string(fees) {
Ok(opt_val) => opt_val.map(String::from),
Err(_) => return ()
}
} else {
None
};
trace!("vcx_mint_tokens(seed: {:?}, fees: {:?})", seed, fees);
::utils::libindy::payments::mint_tokens_and_set_fees(None, None, fees, seed).unwrap_or_default();
}
/// Get details for last occurred error.
///
/// This function should be called in two places to handle both cases of error occurrence:
/// 1) synchronous - in the same application thread
/// 2) asynchronous - inside of function callback
///
/// NOTE: Error is stored until the next one occurs in the same execution thread or until asynchronous callback finished.
/// Returning pointer has the same lifetime.
///
/// #Params
/// * `error_json_p` - Reference that will contain error details (if any error has occurred before)
/// in the format:
/// {
/// "backtrace": Optional<str> - error backtrace.
/// Collecting of backtrace can be enabled by setting environment variable `RUST_BACKTRACE=1`
/// "message": str - human-readable error description
/// }
///
#[no_mangle]
pub extern fn vcx_get_current_error(error_json_p: *mut *const c_char) {
trace!("vcx_get_current_error >>> error_json_p: {:?}", error_json_p);
let error = get_current_error_c_json();
unsafe { *error_json_p = error };
trace!("vcx_get_current_error: <<<");
}
#[cfg(test)]
mod tests {
use super::*;
use std::time::Duration;
use std::ptr;
use std::thread;
use utils::{
libindy::{
wallet::{import, tests::export_test_wallet, tests::delete_import_wallet_path},
pool::get_pool_handle
},
get_temp_dir_path
};
use api::return_types_u32;
fn create_config_util(logging: Option<&str>) -> String {
json!({"agency_did" : "72x8p4HubxzUK1dwxcc5FU",
"remote_to_sdk_did" : "UJGjM6Cea2YVixjWwHN9wq",
"sdk_to_remote_did" : "AB3JM851T4EQmhh8CdagSP",
"sdk_to_remote_verkey" : "888MFrZjXDoi2Vc8Mm14Ys112tEZdDegBZZoembFEATE",
"institution_name" : "evernym enterprise",
"agency_verkey" : "91qMFrZjXDoi2Vc8Mm14Ys112tEZdDegBZZoembFEATE",
"remote_to_sdk_verkey" : "91qMFrZjXDoi2Vc8Mm14Ys112tEZdDegBZZoembFEATE",
"genesis_path": get_temp_dir_path(Some("pool1.txn")).to_str().unwrap(),
"payment_method": "null"}).to_string()
}
#[cfg(feature = "agency")]
#[cfg(feature = "pool_tests")]
#[test]
fn test_init_with_file() {
init!("ledger");
wallet::close_wallet().unwrap();
pool::close().unwrap();
let config_path_buf = get_temp_dir_path(Some("test_init.json"));
let config_path = config_path_buf.to_str().unwrap();
let content = create_config_util(Some("true"));
settings::write_config_to_file(&content, config_path).unwrap();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init(cb.command_handle,
CString::new(config_path).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
cb.receive(Some(Duration::from_secs(10))).unwrap();
// Assert pool was initialized
assert_ne!(get_pool_handle().unwrap(), 0);
}
#[cfg(feature = "agency")]
#[cfg(feature = "pool_tests")]
#[test]
fn test_init_with_file_no_payment_method() {
init!("false");
settings::clear_config();
let config_path_buf = get_temp_dir_path(Some("test_init.json"));
let config_path = config_path_buf.to_str().unwrap();
let content = json!({
"wallet_name": settings::DEFAULT_WALLET_NAME,
"wallet_key": settings::DEFAULT_WALLET_KEY,
"wallet_key_derivation": settings::DEFAULT_WALLET_KEY_DERIVATION,
}).to_string();
settings::write_config_to_file(&content, config_path).unwrap();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init(cb.command_handle,
CString::new(config_path).unwrap().into_raw(),
Some(cb.get_callback())),
error::MISSING_PAYMENT_METHOD.code_num);
}
#[cfg(feature = "agency")]
#[cfg(feature = "pool_tests")]
#[test]
fn test_init_with_config() {
init!("ledger");
wallet::close_wallet().unwrap();
pool::close().unwrap();
let content = create_config_util(None);
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
cb.receive(Some(Duration::from_secs(10))).unwrap();
// Assert pool was initialized
assert_ne!(get_pool_handle().unwrap(), 0);
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_init_fails_when_open_pool_fails() {
settings::set_defaults();
vcx_shutdown(true);
use std::fs;
use std::io::Write;
settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE, "false");
settings::set_config_value(settings::CONFIG_WALLET_KEY, settings::DEFAULT_WALLET_KEY);
// Write invalid genesis.txn
let mut f = fs::File::create(get_temp_dir_path(Some(::utils::constants::GENESIS_PATH)).to_str().unwrap()).unwrap();
f.write_all("{}".as_bytes()).unwrap();
f.flush().unwrap();
f.sync_all().unwrap();
let wallet_name = "test_init_fails_when_open_pool_fails";
wallet::create_wallet(wallet_name, None, None, None).unwrap();
let content = create_config_util(None);
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
let rc = cb.receive(Some(Duration::from_secs(10)));
thread::sleep(Duration::from_secs(1));
assert!(rc.is_err());
assert_eq!(get_pool_handle().unwrap_err().kind(), VcxErrorKind::NoPoolOpen);
assert_eq!(wallet::get_wallet_handle(), 0);
wallet::delete_wallet(wallet_name, None, None, None).unwrap();
}
#[test]
fn test_init_can_be_called_with_no_pool_config() {
init!("false");
wallet::close_wallet().unwrap();
let content = json!({
"wallet_name": settings::DEFAULT_WALLET_NAME,
"wallet_key": settings::DEFAULT_WALLET_KEY,
"wallet_key_derivation": settings::DEFAULT_WALLET_KEY_DERIVATION,
}).to_string();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
cb.receive(Some(Duration::from_secs(10))).unwrap();
// assert that pool was never initialized
assert!(get_pool_handle().is_err());
}
#[test]
fn test_init_fails_with_no_wallet_key() {
settings::set_defaults();
vcx_shutdown(true);
let content = json!({
"wallet_name": settings::DEFAULT_WALLET_NAME,
}).to_string();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::MISSING_WALLET_KEY.code_num);
}
#[test]
fn test_config_with_no_wallet_uses_default() {
init!("false");
vcx_shutdown(false);
thread::sleep(Duration::from_secs(1));
assert!(settings::get_config_value(settings::CONFIG_WALLET_NAME).is_err());
let content = json!({
"wallet_key": "key",
}).to_string();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
let err = cb.receive(Some(Duration::from_secs(10)));
// Assert default wallet name
assert_eq!(settings::get_config_value(settings::CONFIG_WALLET_NAME).unwrap(), settings::DEFAULT_WALLET_NAME);
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_vcx_init_with_default_values() {
init!("ledger");
wallet::close_wallet().unwrap();
pool::close().unwrap();
let content = "{}".to_string();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
cb.receive(Some(Duration::from_secs(10))).unwrap();
}
#[cfg(feature = "agency")]
#[cfg(feature = "pool_tests")]
#[test]
fn test_vcx_init_called_twice_fails() {
init!("ledger");
wallet::close_wallet().unwrap();
pool::close().unwrap();
let content = "{}";
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
cb.receive(Some(Duration::from_secs(10))).unwrap();
// Repeat call
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::ALREADY_INITIALIZED.code_num);
}
#[cfg(feature = "agency")]
#[cfg(feature = "pool_tests")]
#[test]
fn test_vcx_init_called_twice_passes_after_shutdown() {
init!("ledger");
wallet::close_wallet().unwrap();
pool::close().unwrap();
let content = format!(r#"{{"wallet_name":"{}"}}"#, settings::DEFAULT_WALLET_NAME);
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content.clone()).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
cb.receive(Some(Duration::from_secs(10))).unwrap();
//Assert config values were set correctly
assert_eq!(settings::get_config_value("wallet_name").unwrap(), settings::DEFAULT_WALLET_NAME);
//Verify shutdown was successful
vcx_shutdown(true);
assert_eq!(settings::get_config_value("wallet_name").unwrap_err().kind(), VcxErrorKind::InvalidConfiguration);
// Init for the second time works
::utils::devsetup::tests::setup_ledger_env();
wallet::close_wallet().unwrap();
pool::close().unwrap();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
cb.receive(Some(Duration::from_secs(10))).unwrap();
vcx_shutdown(true);
}
#[cfg(feature = "agency")]
#[cfg(feature = "pool_tests")]
#[test]
fn test_init_fails_with_open_wallet() {
init!("ledger");
let config_path_buf = get_temp_dir_path(Some("test_init.json"));
let config_path = config_path_buf.to_str().unwrap();
let content = create_config_util(None);
settings::write_config_to_file(&content, config_path).unwrap();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init(cb.command_handle,
CString::new(config_path).unwrap().into_raw(),
Some(cb.get_callback())),
error::ALREADY_INITIALIZED.code_num);
}
#[test]
fn test_init_after_importing_wallet_success() {
settings::set_defaults();
teardown!("false");
let export_path = export_test_wallet();
vcx_shutdown(true);
let import_config = json!({
settings::CONFIG_WALLET_NAME: settings::DEFAULT_WALLET_NAME,
settings::CONFIG_WALLET_KEY: settings::DEFAULT_WALLET_KEY,
settings::CONFIG_WALLET_KEY_DERIVATION: settings::DEFAULT_WALLET_KEY_DERIVATION,
settings::CONFIG_WALLET_BACKUP_KEY: settings::DEFAULT_WALLET_BACKUP_KEY,
settings::CONFIG_EXPORTED_WALLET_PATH: export_path,
}).to_string();
import(&import_config).unwrap();
let content = json!({
"wallet_name": settings::DEFAULT_WALLET_NAME,
"wallet_key": settings::DEFAULT_WALLET_KEY,
"wallet_key_derivation": settings::DEFAULT_WALLET_KEY_DERIVATION,
}).to_string();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
cb.receive(Some(Duration::from_secs(10))).unwrap();
delete_import_wallet_path(export_path);
vcx_shutdown(true);
}
#[test]
fn test_init_with_imported_wallet_fails_with_different_params() {
settings::set_defaults();
teardown!("false");
let export_path = export_test_wallet();
vcx_shutdown(true);
let import_config = json!({
settings::CONFIG_WALLET_NAME: settings::DEFAULT_WALLET_NAME,
settings::CONFIG_WALLET_KEY: settings::DEFAULT_WALLET_KEY,
settings::CONFIG_WALLET_KEY_DERIVATION: settings::DEFAULT_WALLET_KEY_DERIVATION,
settings::CONFIG_EXPORTED_WALLET_PATH: export_path,
settings::CONFIG_WALLET_BACKUP_KEY: settings::DEFAULT_WALLET_BACKUP_KEY,
}).to_string();
import(&import_config).unwrap();
let content = json!({
"wallet_name": "different_wallet_name",
"wallet_key": settings::DEFAULT_WALLET_KEY,
"wallet_key_derivation": settings::DEFAULT_WALLET_KEY_DERIVATION,
}).to_string();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
assert_eq!(cb.receive(Some(Duration::from_secs(10))).err(), Some(error::WALLET_NOT_FOUND.code_num));
delete_import_wallet_path(export_path);
settings::set_config_value(settings::CONFIG_WALLET_NAME, settings::DEFAULT_WALLET_NAME);
vcx_shutdown(true);
}
#[test]
fn test_import_after_init_fails() {
settings::set_defaults();
teardown!("false");
let export_path = export_test_wallet();
vcx_shutdown(false);
let content = json!({
"wallet_name": settings::DEFAULT_WALLET_NAME,
"wallet_key": settings::DEFAULT_WALLET_KEY,
"wallet_key_derivation": settings::DEFAULT_WALLET_KEY_DERIVATION,
}).to_string();
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
cb.receive(Some(Duration::from_secs(10))).unwrap();
let import_config = json!({
settings::CONFIG_WALLET_NAME: settings::DEFAULT_WALLET_NAME,
settings::CONFIG_WALLET_KEY: settings::DEFAULT_WALLET_KEY,
settings::CONFIG_EXPORTED_WALLET_PATH: export_path,
settings::CONFIG_WALLET_BACKUP_KEY: settings::DEFAULT_WALLET_BACKUP_KEY,
}).to_string();
assert_eq!(import(&import_config).unwrap_err().kind(), VcxErrorKind::DuplicationWallet);
delete_import_wallet_path(export_path);
vcx_shutdown(true);
}
#[test]
fn test_init_bad_path() {
use utils::libindy::pool::get_pool_handle;
init!("false");
let config_path = "";
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init(cb.command_handle,
CString::new(config_path).unwrap().into_raw(),
Some(cb.get_callback())),
error::INVALID_OPTION.code_num);
match get_pool_handle() {
Ok(h) => { pool::close().unwrap(); }
Err(_) => {}
};
}
// this test now fails, you must provide a path to a valid config
#[test]
fn test_init_no_config_path() {
init!("true");
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init(cb.command_handle,
ptr::null(),
Some(cb.get_callback())),
error::INVALID_CONFIGURATION.code_num);
}
#[test]
fn test_shutdown_with_no_previous_config() {
vcx_shutdown(true);
vcx_shutdown(false);
}
#[test]
fn test_shutdown() {
init!("true");
let data = r#"["name","male"]"#;
let connection = ::connection::tests::build_test_connection();
let credentialdef = ::credential_def::create_new_credentialdef("SID".to_string(), "NAME".to_string(), "4fUDR9R7fjwELRvH9JT6HH".to_string(), "id".to_string(), "tag".to_string(), "{}".to_string()).unwrap();
let issuer_credential = ::issuer_credential::issuer_credential_create(credentialdef, "1".to_string(), "8XFh8yBzrpJQmNyZzgoTqB".to_owned(), "credential_name".to_string(), "{\"attr\":\"value\"}".to_owned(), 1).unwrap();
let proof = ::proof::create_proof("1".to_string(), "[]".to_string(), "[]".to_string(), r#"{"support_revocation":false}"#.to_string(), "Optional".to_owned()).unwrap();
let schema = ::schema::create_new_schema("5", "VsKV7grR1BUE29mG2Fm2kX".to_string(), "name".to_string(), "0.1".to_string(), data.to_string()).unwrap();
let disclosed_proof = ::disclosed_proof::create_proof("id", ::utils::constants::PROOF_REQUEST_JSON).unwrap();
let credential = ::credential::credential_create_with_offer("name", ::utils::constants::CREDENTIAL_OFFER_JSON).unwrap();
vcx_shutdown(true);
assert_eq!(::connection::release(connection).unwrap_err().kind(), VcxErrorKind::InvalidConnectionHandle);
assert_eq!(::issuer_credential::release(issuer_credential).unwrap_err().kind(), VcxErrorKind::InvalidIssuerCredentialHandle);
assert_eq!(::schema::release(schema).unwrap_err().kind(), VcxErrorKind::InvalidSchemaHandle);
assert_eq!(::proof::release(proof).unwrap_err().kind(), VcxErrorKind::InvalidProofHandle);
assert_eq!(::credential_def::release(credentialdef).unwrap_err().kind(), VcxErrorKind::InvalidCredDefHandle);
assert_eq!(::credential::release(credential).unwrap_err().kind(), VcxErrorKind::InvalidCredentialHandle);
assert_eq!(::disclosed_proof::release(disclosed_proof).unwrap_err().kind(), VcxErrorKind::InvalidDisclosedProofHandle);
assert_eq!(wallet::get_wallet_handle(), 0);
}
#[test]
fn test_error_c_message() {
init!("true");
let c_message = CStringUtils::c_str_to_string(vcx_error_c_message(0)).unwrap().unwrap();
assert_eq!(c_message, error::SUCCESS.message);
let c_message = CStringUtils::c_str_to_string(vcx_error_c_message(1001)).unwrap().unwrap();
assert_eq!(c_message, error::UNKNOWN_ERROR.message);
let c_message = CStringUtils::c_str_to_string(vcx_error_c_message(100100)).unwrap().unwrap();
assert_eq!(c_message, error::UNKNOWN_ERROR.message);
let c_message = CStringUtils::c_str_to_string(vcx_error_c_message(1021)).unwrap().unwrap();
assert_eq!(c_message, error::INVALID_ATTRIBUTES_STRUCTURE.message);
}
#[test]
fn test_vcx_version() {
let return_version = CStringUtils::c_str_to_string(vcx_version()).unwrap().unwrap();
assert!(return_version.len() > 5);
}
#[test]
fn test_vcx_update_institution_info() {
init!("true");
let new_name = "new_name";
let new_url = "http://www.evernym.com";
assert_ne!(new_name, &settings::get_config_value(::settings::CONFIG_INSTITUTION_NAME).unwrap());
assert_ne!(new_url, &settings::get_config_value(::settings::CONFIG_INSTITUTION_LOGO_URL).unwrap());
assert_eq!(error::SUCCESS.code_num, vcx_update_institution_info(CString::new(new_name.to_string()).unwrap().into_raw(),
CString::new(new_url.to_string()).unwrap().into_raw()));
assert_eq!(new_name, &settings::get_config_value(::settings::CONFIG_INSTITUTION_NAME).unwrap());
assert_eq!(new_url, &settings::get_config_value(::settings::CONFIG_INSTITUTION_LOGO_URL).unwrap());
::settings::set_defaults();
}
// This test is ignored because it sets up logging, which can only be done
// once per process.
#[ignore]
#[cfg(feature = "agency")]
#[cfg(feature = "pool_tests")]
#[test]
fn test_init_with_logging_config() {
init!("ledger");
wallet::close_wallet().unwrap();
pool::close().unwrap();
let content = create_config_util(Some("debug"));
let cb = return_types_u32::Return_U32::new().unwrap();
assert_eq!(vcx_init_with_config(cb.command_handle,
CString::new(content).unwrap().into_raw(),
Some(cb.get_callback())),
error::SUCCESS.code_num);
cb.receive(Some(Duration::from_secs(10))).unwrap();
assert_ne!(get_pool_handle().unwrap(), 0);
debug!("This statement should log");
}
#[test]
fn get_current_error_works_for_no_error() {
let mut error_json_p: *const c_char = ptr::null();
vcx_get_current_error(&mut error_json_p);
assert_eq!(None, CStringUtils::c_str_to_string(error_json_p).unwrap());
}
#[test]
fn get_current_error_works_for_sync_error() {
::api::utils::vcx_provision_agent(ptr::null());
let mut error_json_p: *const c_char = ptr::null();
vcx_get_current_error(&mut error_json_p);
assert!(CStringUtils::c_str_to_string(error_json_p).unwrap().is_some());
}
#[test]
fn get_current_error_works_for_async_error() {
extern fn cb(storage_handle: u32,
err: u32,
config: *const c_char) {
let mut error_json_p: *const c_char = ptr::null();
vcx_get_current_error(&mut error_json_p);
assert!(CStringUtils::c_str_to_string(error_json_p).unwrap().is_some());
}
let config = CString::new("{}").unwrap();
::api::utils::vcx_agent_provision_async(0, config.as_ptr(), Some(cb));
::std::thread::sleep(::std::time::Duration::from_secs(1));
}
}<|fim▁end|> | |
<|file_name|>IsProcessValid.cpp<|end_file_name|><|fim▁begin|>#include <windows.h>
#include "NativeCore.hpp"
bool RC_CallConv IsProcessValid(RC_Pointer handle)
{
if (handle == nullptr)
{
return false;<|fim▁hole|> }
const auto retn = WaitForSingleObject(handle, 0);
if (retn == WAIT_FAILED)
{
return false;
}
return retn == WAIT_TIMEOUT;
}<|fim▁end|> | |
<|file_name|>logger.rs<|end_file_name|><|fim▁begin|>extern crate env_logger;
extern crate log_panics;
extern crate log;
#[cfg(target_os = "android")]
extern crate android_logger;
extern crate libc;
use self::env_logger::Builder as EnvLoggerBuilder;
use self::log::{LevelFilter, Level};
use std::env;
use std::io::Write;
#[cfg(target_os = "android")]
use self::android_logger::Filter;
use log::{Record, Metadata};
use self::libc::{c_void, c_char};
use std::ffi::CString;
use std::ptr;
use errors::prelude::*;
use utils::ctypes;
pub static mut LOGGER_STATE: LoggerState = LoggerState::Default;
pub enum LoggerState {<|fim▁hole|>}
impl LoggerState {
pub fn get(&self) -> (*const c_void, Option<EnabledCB>, Option<LogCB>, Option<FlushCB>) {
match self {
LoggerState::Default => (ptr::null(), Some(LibindyDefaultLogger::enabled), Some(LibindyDefaultLogger::log), Some(LibindyDefaultLogger::flush)),
LoggerState::Custom => unsafe { (CONTEXT, ENABLED_CB, LOG_CB, FLUSH_CB) },
}
}
}
pub type EnabledCB = extern fn(context: *const c_void,
level: u32,
target: *const c_char) -> bool;
pub type LogCB = extern fn(context: *const c_void,
level: u32,
target: *const c_char,
message: *const c_char,
module_path: *const c_char,
file: *const c_char,
line: u32);
pub type FlushCB = extern fn(context: *const c_void);
static mut CONTEXT: *const c_void = ptr::null();
static mut ENABLED_CB: Option<EnabledCB> = None;
static mut LOG_CB: Option<LogCB> = None;
static mut FLUSH_CB: Option<FlushCB> = None;
pub struct LibindyLogger {
context: *const c_void,
enabled: Option<EnabledCB>,
log: LogCB,
flush: Option<FlushCB>,
}
impl LibindyLogger {
fn new(context: *const c_void, enabled: Option<EnabledCB>, log: LogCB, flush: Option<FlushCB>) -> Self {
LibindyLogger { context, enabled, log, flush }
}
}
impl log::Log for LibindyLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
if let Some(enabled_cb) = self.enabled {
let level = metadata.level() as u32;
let target = CString::new(metadata.target()).unwrap();
enabled_cb(self.context,
level,
target.as_ptr(),
)
} else { true }
}
fn log(&self, record: &Record) {
let log_cb = self.log;
let level = record.level() as u32;
let target = CString::new(record.target()).unwrap();
let message = CString::new(record.args().to_string()).unwrap();
let module_path = record.module_path().map(|a| CString::new(a).unwrap());
let file = record.file().map(|a| CString::new(a).unwrap());
let line = record.line().unwrap_or(0);
log_cb(self.context,
level,
target.as_ptr(),
message.as_ptr(),
module_path.as_ref().map(|p| p.as_ptr()).unwrap_or(ptr::null()),
file.as_ref().map(|p| p.as_ptr()).unwrap_or(ptr::null()),
line,
)
}
fn flush(&self) {
if let Some(flush_cb) = self.flush {
flush_cb(self.context)
}
}
}
unsafe impl Sync for LibindyLogger {}
unsafe impl Send for LibindyLogger {}
impl LibindyLogger {
pub fn init(context: *const c_void, enabled: Option<EnabledCB>, log: LogCB, flush: Option<FlushCB>) -> Result<(), IndyError> {
let logger = LibindyLogger::new(context, enabled, log, flush);
log::set_boxed_logger(Box::new(logger))?;
log::set_max_level(LevelFilter::Trace);
unsafe {
LOGGER_STATE = LoggerState::Custom;
CONTEXT = context;
ENABLED_CB = enabled;
LOG_CB = Some(log);
FLUSH_CB = flush
};
Ok(())
}
}
pub struct LibindyDefaultLogger;
impl LibindyDefaultLogger {
pub fn init(pattern: Option<String>) -> Result<(), IndyError> {
let pattern = pattern.or(env::var("RUST_LOG").ok());
log_panics::init(); //Logging of panics is essential for android. As android does not log to stdout for native code
if cfg!(target_os = "android") {
#[cfg(target_os = "android")]
let log_filter = match pattern {
Some(val) => match val.to_lowercase().as_ref() {
"error" => Filter::default().with_min_level(log::Level::Error),
"warn" => Filter::default().with_min_level(log::Level::Warn),
"info" => Filter::default().with_min_level(log::Level::Info),
"debug" => Filter::default().with_min_level(log::Level::Debug),
"trace" => Filter::default().with_min_level(log::Level::Trace),
_ => Filter::default().with_min_level(log::Level::Error),
},
None => Filter::default().with_min_level(log::Level::Error)
};
//Set logging to off when deploying production android app.
#[cfg(target_os = "android")]
android_logger::init_once(log_filter);
info!("Logging for Android");
} else {
EnvLoggerBuilder::new()
.format(|buf, record| writeln!(buf, "{:>5}|{:<30}|{:>35}:{:<4}| {}", record.level(), record.target(), record.file().get_or_insert(""), record.line().get_or_insert(0), record.args()))
.filter(None, LevelFilter::Off)
.parse(pattern.as_ref().map(String::as_str).unwrap_or(""))
.try_init()?;
}
unsafe { LOGGER_STATE = LoggerState::Default };
Ok(())
}
extern fn enabled(_context: *const c_void,
level: u32,
target: *const c_char) -> bool {
let level = get_level(level);
let target = ctypes::c_str_to_string(target).unwrap().unwrap();
let metadata: Metadata = Metadata::builder()
.level(level)
.target(&target)
.build();
log::logger().enabled(&metadata)
}
extern fn log(_context: *const c_void,
level: u32,
target: *const c_char,
args: *const c_char,
module_path: *const c_char,
file: *const c_char,
line: u32) {
let target = ctypes::c_str_to_string(target).unwrap().unwrap();
let args = ctypes::c_str_to_string(args).unwrap().unwrap();
let module_path = ctypes::c_str_to_string(module_path).unwrap();
let file = ctypes::c_str_to_string(file).unwrap();
let level = get_level(level);
log::logger().log(
&Record::builder()
.args(format_args!("{}", args))
.level(level)
.target(&target)
.module_path(module_path)
.file(file)
.line(Some(line))
.build(),
);
}
extern fn flush(_context: *const c_void) {
log::logger().flush()
}
}
fn get_level(level: u32) -> Level {
match level {
1 => Level::Error,
2 => Level::Warn,
3 => Level::Info,
4 => Level::Debug,
5 => Level::Trace,
_ => unreachable!(),
}
}
#[macro_export]
macro_rules! try_log {
($expr:expr) => (match $expr {
Ok(val) => val,
Err(err) => {
error!("try_log! | {}", err);
return Err(From::from(err))
}
})
}
macro_rules! _map_err {
($lvl:expr, $expr:expr) => (
|err| {
log!($lvl, "{} - {}", $expr, err);
err
}
);
($lvl:expr) => (
|err| {
log!($lvl, "{}", err);
err
}
)
}
#[macro_export]
macro_rules! map_err_err {
() => ( _map_err!(::log::Level::Error) );
($($arg:tt)*) => ( _map_err!(::log::Level::Error, $($arg)*) )
}
#[macro_export]
macro_rules! map_err_trace {
() => ( _map_err!(::log::Level::Trace) );
($($arg:tt)*) => ( _map_err!(::log::Level::Trace, $($arg)*) )
}
#[macro_export]
macro_rules! map_err_info {
() => ( _map_err!(::log::Level::Info) );
($($arg:tt)*) => ( _map_err!(::log::Level::Info, $($arg)*) )
}
#[cfg(debug_assertions)]
#[macro_export]
macro_rules! secret {
($val:expr) => {{ $val }};
}
#[cfg(not(debug_assertions))]
#[macro_export]
macro_rules! secret {
($val:expr) => {{ "_" }};
}<|fim▁end|> | Default,
Custom |
<|file_name|>logIn.js<|end_file_name|><|fim▁begin|>/*jslint browser: true*/
/*global $, jQuery, alert*/
<|fim▁hole|> dateFormat: 'yy-mm-dd',
inline: true,
showOtherMonths: true
});
});
$(document).ready(function () {
$("input[name='rep_password']").focusout(function () {
var p1 = $('input[name="password"]').val(), p2 = $('input[name="rep_password"]').val();
if (p1 !== p2) {
$('#passDM').show(300);
} else if (p1 === "") {
$('#passDM').show(300);
} else {
$('#passDM').hide(300);
}
});
});
$(document).ready(function () {
$("input[name=password]").focusin(function () {
$('#passDM').hide(300);
});
$("input[name=rep_password]").focusin(function () {
$('#passDM').hide(300);
});
});
}(jQuery));<|fim▁end|> | (function ($) {
"use strict";
$(document).ready(function () {
$("input[name=dob]").datepicker({
|
<|file_name|>itemdemo.js<|end_file_name|><|fim▁begin|>import * as React from 'react';
import {Row,Col,Table,Code,Items,Item} from 'yrui';
import thead from './thead';
let items=[{
key:'style',
expr:'设置items样式',
type:'object',
values:'-',
default:'-',
}];
let item=[{
key:'border',
expr:'设置border样式',
type:'string',
values:'-',
default:'-',
}];
const code=`
<Items>
<Item>
<h2>items配置</h2>
<Table thead={thead} tbody={items} />
</Item>
<Item>
<h2>item配置</h2>
<Table thead={thead} tbody={item} />
</Item>
</Items>
`;
export default class ItemsDemo extends React.Component{
render(){
return(
<Items>
<Item>
<h2>代码示例</h2>
<Code title="input" code={code} /><|fim▁hole|> </Item>
<Item>
<Row gutter={8}>
<Col span={6} sm={12}>
<h2>参数说明</h2>
<Table thead={thead} tbody={items} noBorder={true} />
</Col>
<Col span={6} sm={12}>
<h2>参数说明</h2>
<Table thead={thead} tbody={item} noBorder={true} />
</Col>
</Row>
</Item>
</Items>
);
}
}<|fim▁end|> | |
<|file_name|>AssetRoleManagerTest.java<|end_file_name|><|fim▁begin|>package org.tll.canyon.service;
import java.util.List;
import java.util.ArrayList;
import org.jmock.Mock;
import org.springframework.orm.ObjectRetrievalFailureException;
import org.tll.canyon.dao.AssetRoleDao;
import org.tll.canyon.model.AssetRole;
import org.tll.canyon.service.BaseManagerTestCase;
import org.tll.canyon.service.impl.AssetRoleManagerImpl;
public class AssetRoleManagerTest extends BaseManagerTestCase {
private final String assetRoleId = "1";
private AssetRoleManagerImpl assetRoleManager = new AssetRoleManagerImpl();
private Mock assetRoleDao = null;
protected void setUp() throws Exception {
super.setUp();
assetRoleDao = new Mock(AssetRoleDao.class);
assetRoleManager.setAssetRoleDao((AssetRoleDao) assetRoleDao.proxy());
}
protected void tearDown() throws Exception {
super.tearDown();
assetRoleManager = null;
}
public void testGetAssetRoles() throws Exception {
List results = new ArrayList();
AssetRole assetRole = new AssetRole();
results.add(assetRole);
// set expected behavior on dao
assetRoleDao.expects(once()).method("getAssetRoles")
.will(returnValue(results));
List assetRoles = assetRoleManager.getAssetRoles(null);
assertTrue(assetRoles.size() == 1);
assetRoleDao.verify();
}
<|fim▁hole|> assetRoleDao.expects(once()).method("getAssetRole")
.will(returnValue(new AssetRole()));
AssetRole assetRole = assetRoleManager.getAssetRole(assetRoleId);
assertTrue(assetRole != null);
assetRoleDao.verify();
}
public void testSaveAssetRole() throws Exception {
AssetRole assetRole = new AssetRole();
// set expected behavior on dao
assetRoleDao.expects(once()).method("saveAssetRole")
.with(same(assetRole)).isVoid();
assetRoleManager.saveAssetRole(assetRole);
assetRoleDao.verify();
}
public void testAddAndRemoveAssetRole() throws Exception {
AssetRole assetRole = new AssetRole();
// set required fields
// set expected behavior on dao
assetRoleDao.expects(once()).method("saveAssetRole")
.with(same(assetRole)).isVoid();
assetRoleManager.saveAssetRole(assetRole);
assetRoleDao.verify();
// reset expectations
assetRoleDao.reset();
assetRoleDao.expects(once()).method("removeAssetRole").with(eq(new Long(assetRoleId)));
assetRoleManager.removeAssetRole(assetRoleId);
assetRoleDao.verify();
// reset expectations
assetRoleDao.reset();
// remove
Exception ex = new ObjectRetrievalFailureException(AssetRole.class, assetRole.getId());
assetRoleDao.expects(once()).method("removeAssetRole").isVoid();
assetRoleDao.expects(once()).method("getAssetRole").will(throwException(ex));
assetRoleManager.removeAssetRole(assetRoleId);
try {
assetRoleManager.getAssetRole(assetRoleId);
fail("AssetRole with identifier '" + assetRoleId + "' found in database");
} catch (ObjectRetrievalFailureException e) {
assertNotNull(e.getMessage());
}
assetRoleDao.verify();
}
}<|fim▁end|> | public void testGetAssetRole() throws Exception {
// set expected behavior on dao |
<|file_name|>vcardupdate.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2006-2009 by Jakob Schroeter <[email protected]>
This file is part of the gloox library. http://camaya.net/gloox
This software is distributed under a license. The full license
agreement can be found in the file LICENSE in this distribution.
This software may not be copied, modified, sold or distributed
other than expressed in the named license agreement.
This software is distributed without any warranty.
*/
#include "vcardupdate.h"
#include "tag.h"
namespace gloox
{
VCardUpdate::VCardUpdate()
: StanzaExtension( ExtVCardUpdate ),
m_notReady( true ), m_noImage( true ), m_valid( true )
{
}
VCardUpdate::VCardUpdate( const std::string& hash )
: StanzaExtension( ExtVCardUpdate ),
m_hash( hash ), m_notReady( false ), m_noImage( false ), m_valid( true )
{
if( m_hash.empty() )
{
m_noImage = true;
m_valid = false;
}
}
VCardUpdate::VCardUpdate( const Tag* tag )
: StanzaExtension( ExtVCardUpdate ),
m_notReady( true ), m_noImage( true ), m_valid( false )
{
if( tag && tag->name() == "x" && tag->hasAttribute( XMLNS, XMLNS_X_VCARD_UPDATE ) )
{
m_valid = true;
if( tag->hasChild( "photo" ) )
{
m_notReady = false;
m_hash = tag->findChild( "photo" )->cdata();
if( !m_hash.empty() )
m_noImage = false;
}
}
}
VCardUpdate::~VCardUpdate()
{
}
const std::string& VCardUpdate::filterString() const
{
static const std::string filter = "/presence/x[@xmlns='" + XMLNS_X_VCARD_UPDATE + "']";
return filter;
}
Tag* VCardUpdate::tag() const
{
if( !m_valid )<|fim▁hole|>
Tag* x = new Tag( "x", XMLNS, XMLNS_X_VCARD_UPDATE );
if( !m_notReady )
{
Tag* p = new Tag( x, "photo" );
if( !m_noImage )
p->setCData( m_hash );
}
return x;
}
}<|fim▁end|> | return 0; |
<|file_name|>tally-frontend.go<|end_file_name|><|fim▁begin|>package main
import (
"flag"
"log"
"net/http"
"os"
"path"
"time"
"github.com/emef/tally/frontend"
)
func main() {
defaultStaticDir := path.Join(
os.Getenv("GOPATH"), "src/github.com/emef/tally/frontend/static")
staticDir := flag.String(
"static_dir", defaultStaticDir, "Directory to static assets")
port := flag.String("port", ":8000", "Port to run http frontend")
endpoint := flag.String("endpoint", ":5020", "Endpoint to tally backend")
flag.Parse()
config := &frontend.FrontendConfig{
StaticDir: *staticDir, BackendEndpoint: *endpoint}
frontendServer, _ := frontend.NewFrontendServer(config)
println("using static dir", *staticDir)
srv := &http.Server{
Handler: frontendServer.Router,<|fim▁hole|> log.Fatal(srv.ListenAndServe())
}<|fim▁end|> | Addr: *port,
WriteTimeout: 15 * time.Second,
ReadTimeout: 15 * time.Second}
|
<|file_name|>menus.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from gluon import current
from s3 import *
from s3layouts import *
try:
from .layouts import *
except ImportError:
pass
import s3menus as default
red_cross_filter = {"organisation.organisation_type_id$name" : "Red Cross / Red Crescent"}
# =============================================================================
class S3MainMenu(default.S3MainMenu):
""" Custom Application Main Menu """
# -------------------------------------------------------------------------
@classmethod
def menu(cls):
""" Compose Menu """
# Modules menus
main_menu = MM()(
cls.menu_modules(),
)
# Additional menus
current.menu.personal = cls.menu_personal()
current.menu.dashboard = cls.menu_dashboard()
return main_menu
# -------------------------------------------------------------------------
@classmethod
def menu_modules(cls):
""" Custom Modules Menu """
T = current.T
return [
homepage("gis")(
),
homepage("hrm", "org", name=T("Staff"),
vars=dict(group="staff"))(
MM("Staff", c="hrm", f="staff"),
MM("Teams", c="hrm", f="group"),
MM("National Societies", c="org", f="organisation",
vars = red_cross_filter),
MM("Offices", c="org", f="office"),
MM("Job Titles", c="hrm", f="job_title"),
#MM("Skill List", c="hrm", f="skill"),
MM("Training Events", c="hrm", f="training_event"),
MM("Training Courses", c="hrm", f="course"),
MM("Certificate List", c="hrm", f="certificate"),
),
homepage("vol", name=T("Volunteers"))(
MM("Volunteers", c="vol", f="volunteer"),
MM("Teams", c="vol", f="group"),
MM("Volunteer Roles", c="vol", f="job_title"),
MM("Programs", c="vol", f="programme"),
#MM("Skill List", c="vol", f="skill"),
MM("Training Events", c="vol", f="training_event"),
MM("Training Courses", c="vol", f="course"),
MM("Certificate List", c="vol", f="certificate"),
),
homepage("member")(
MM("Members", c="member", f="membership"),
),
homepage("inv", "supply", "req")(
MM("Warehouses", c="inv", f="warehouse"),
MM("Received Shipments", c="inv", f="recv"),
MM("Sent Shipments", c="inv", f="send"),
MM("Items", c="supply", f="item"),
MM("Item Catalogs", c="supply", f="catalog"),
MM("Item Categories", c="supply", f="item_category"),
M("Requests", c="req", f="req")(),
#M("Commitments", f="commit")(),
),
homepage("asset")(
MM("Assets", c="asset", f="asset"),
MM("Items", c="asset", f="item"),
),
homepage("survey")(
MM("Assessment Templates", c="survey", f="template"),
MM("Disaster Assessments", c="survey", f="series"),
),
homepage("project")(
MM("Projects", c="project", f="project"),
MM("Communities", c="project", f="location"),
),
homepage("vulnerability")(
MM("Map", c="vulnerability", f="index"),
),
homepage("event", "irs")(
MM("Events", c="event", f="event"),
MM("Incident Reports", c="irs", f="ireport"),
),
homepage("deploy", name="RDRT")(
MM("Missions", c="deploy", f="mission", m="summary"),
MM("Members", c="deploy", f="human_resource", m="summary"),
),
]
# -------------------------------------------------------------------------
@classmethod
def menu_dashboard(cls):
""" Dashboard Menu (at bottom of page) """
DB = S3DashBoardMenuLayout
request = current.request
if request.controller == "vol":
dashboard = DB()(
DB("VOLUNTEERS",
c="vol",
image = "graphic_staff_wide.png",
title = "Volunteers")(
DB("Manage Volunteer Data", f="volunteer"),
DB("Manage Teams Data", f="group"),
),
DB("CATALOGS",
c="hrm",
image="graphic_catalogue.png",
title="Catalogs")(
DB("Certificates", f="certificate"),
DB("Training Courses", f="course"),
#DB("Skills", f="skill"),
DB("Job Titles", f="job_title")
))
elif request.controller in ("hrm", "org"):
dashboard = DB()(
DB("STAFF",
c="hrm",
image = "graphic_staff_wide.png",
title = "Staff")(
DB("Manage Staff Data", f="staff"),
DB("Manage Teams Data", f="group"),
),
DB("OFFICES",
c="org",
image = "graphic_office.png",
title = "Offices")(
DB("Manage Offices Data", f="office"),
DB("Manage National Society Data", f="organisation",
vars=red_cross_filter
),
),
DB("CATALOGS",
c="hrm",
image="graphic_catalogue.png",
title="Catalogs")(
DB("Certificates", f="certificate"),
DB("Training Courses", f="course"),
#DB("Skills", f="skill"),
DB("Job Titles", f="job_title")
))
elif request.controller == "default" and request.function == "index":
dashboard = DB(_id="dashboard")(
DB("Staff", c="hrm", f="staff", m="search",
image = "graphic_staff.png",
title = "Staff",
text = "Add new and manage existing staff."),
DB("Volunteers", c="vol", f="volunteer", m="search",
image = "graphic_volunteers.png",
title = "Volunteers",
text = "Add new and manage existing volunteers."),
DB("Members", c="member", f="index",
image = "graphic_members.png",
title = "Members",
text = "Add new and manage existing members."),
DB("Warehouses", c="inv", f="index",
image = "graphic_warehouse.png",
title = "Warehouses",
text = "Stocks and relief items."),
DB("Assets", c="asset", f="index",
image = "graphic_assets.png",
title = "Assests",
text = "Manage office inventories and assets."),
DB("Assessments", c="survey", f="index",
image = "graphic_assessments.png",
title = "Assessments",
text = "Design, deploy & analyze surveys."),
DB("Projects", c="project", f="index",
image = "graphic_tools.png",
title = "Projects",
text = "Tracking and analysis of Projects and Activities.")
)
else:
dashboard = None
return dashboard
# -------------------------------------------------------------------------
@classmethod
def menu_personal(cls):
""" Custom Personal Menu """
auth = current.auth
s3 = current.response.s3
settings = current.deployment_settings
# Language selector
menu_lang = ML("Language", right=True)
for language in s3.l10n_languages.items():
code, name = language
menu_lang(
ML(name, translate=False, lang_code=code, lang_name=name)
)
if not auth.is_logged_in():
request = current.request
login_next = URL(args=request.args, vars=request.vars)
if request.controller == "default" and \
request.function == "user" and \
"_next" in request.get_vars:
login_next = request.get_vars["_next"]
self_registration = settings.get_security_self_registration()
menu_personal = MP()(
MP("Register", c="default", f="user",
m="register", check=self_registration),
MP("Login", c="default", f="user",
m="login", vars=dict(_next=login_next)),
MP("Lost Password", c="default", f="user",
m="retrieve_password"),
menu_lang
)
else:
s3_has_role = auth.s3_has_role
is_org_admin = lambda i: s3_has_role("ORG_ADMIN") and \
not s3_has_role("ADMIN")
menu_personal = MP()(
MP("Administration", c="admin", f="index",
check=s3_has_role("ADMIN")),
MP("Administration", c="admin", f="user",
check=is_org_admin),
MP("Profile", c="default", f="person"),
MP("Change Password", c="default", f="user",
m="change_password"),
MP("Logout", c="default", f="user",
m="logout"),
menu_lang,
)
return menu_personal
# =============================================================================
class S3OptionsMenu(default.S3OptionsMenu):
""" Custom Controller Menus """
# -------------------------------------------------------------------------
def hrm(self):
""" HRM Human Resource Management """
session = current.session
s3 = current.session.s3
ADMIN = s3.system_roles.ADMIN
if "hrm" not in s3:
current.s3db.hrm_vars()
hrm_vars = s3.hrm<|fim▁hole|> SECTORS = "Clusters" if current.deployment_settings.get_ui_label_cluster() \
else "Sectors"
manager_mode = lambda i: hrm_vars.mode is None
personal_mode = lambda i: hrm_vars.mode is not None
is_org_admin = lambda i: hrm_vars.orgs and True or \
ADMIN in s3.roles
is_super_editor = lambda i: current.auth.s3_has_role("staff_super") or \
current.auth.s3_has_role("vol_super")
staff = {"group": "staff"}
return M()(
M("Staff", c="hrm", f=("staff", "person"),
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Import", f="person", m="import",
vars=staff, p="create"),
),
M("Staff & Volunteers (Combined)",
c="hrm", f="human_resource", m="summary",
check=[manager_mode, is_super_editor]),
M("Teams", c="hrm", f="group",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Search Members", f="group_membership"),
M("Import", f="group_membership", m="import"),
),
M("National Societies", c="org",
f="organisation",
vars=red_cross_filter,
check=manager_mode)(
M("New", m="create",
vars=red_cross_filter
),
M("List All",
vars=red_cross_filter
),
M("Search", m="search",
vars=red_cross_filter
),
M("Import", m="import", p="create", check=is_org_admin)
),
M("Offices", c="org", f="office",
check=manager_mode)(
M("New", m="create"),
M("List All"),
#M("Search", m="search"),
M("Import", m="import", p="create"),
),
M("Department Catalog", c="hrm", f="department",
check=manager_mode)(
M("New", m="create"),
M("List All"),
),
M("Job Title Catalog", c="hrm", f="job_title",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Import", m="import", p="create", check=is_org_admin),
),
#M("Skill Catalog", f="skill",
#check=manager_mode)(
#M("New", m="create"),
#M("List All"),
##M("Skill Provisions", f="skill_provision"),
#),
M("Training Events", c="hrm", f="training_event",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Search Training Participants", f="training"),
M("Import Participant List", f="training", m="import"),
),
M("Reports", c="hrm", f="staff", m="report",
check=manager_mode)(
M("Staff Report", m="report"),
M("Expiring Staff Contracts Report",
vars=dict(expiring="1")),
M("Training Report", f="training", m="report2"),
),
M("Training Course Catalog", c="hrm", f="course",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Import", m="import", p="create", check=is_org_admin),
M("Course Certificates", f="course_certificate"),
),
M("Certificate Catalog", c="hrm", f="certificate",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Import", m="import", p="create", check=is_org_admin),
#M("Skill Equivalence", f="certificate_skill"),
),
M("Organization Types", c="org", f="organisation_type",
restrict=[ADMIN],
check=manager_mode)(
M("New", m="create"),
M("List All"),
),
M("Office Types", c="org", f="office_type",
restrict=[ADMIN],
check=manager_mode)(
M("New", m="create"),
M("List All"),
),
#M("Facility Types", c="org", f="facility_type",
# restrict=[ADMIN],
# check=manager_mode)(
# M("New", m="create"),
# M("List All"),
#),
M(SECTORS, f="sector", c="org", restrict=[ADMIN],
check=manager_mode)(
M("New", m="create"),
M("List All"),
),
#M("My Profile", c="hrm", f="person",
# check=personal_mode, vars=dict(mode="personal")),
# This provides the link to switch to the manager mode:
M("Human Resources", c="hrm", f="index",
check=[personal_mode, is_org_admin]),
# This provides the link to switch to the personal mode:
#M("Personal Profile", c="hrm", f="person",
# check=manager_mode, vars=dict(mode="personal"))
)
# -------------------------------------------------------------------------
def vol(self):
""" Volunteer Management """
s3 = current.session.s3
ADMIN = s3.system_roles.ADMIN
# Custom conditions for the check-hook, as lambdas in order
# to have them checked only immediately before rendering:
manager_mode = lambda i: s3.hrm.mode is None
personal_mode = lambda i: s3.hrm.mode is not None
is_org_admin = lambda i: s3.hrm.orgs and True or \
ADMIN in s3.roles
is_super_editor = lambda i: current.auth.s3_has_role("vol_super") or \
current.auth.s3_has_role("staff_super")
settings = current.deployment_settings
show_programmes = lambda i: settings.get_hrm_vol_experience() == "programme"
show_tasks = lambda i: settings.has_module("project") and \
settings.get_project_mode_task()
teams = settings.get_hrm_teams()
use_teams = lambda i: teams
check_org_dependent_field = lambda tablename, fieldname: \
settings.set_org_dependent_field(tablename, fieldname,
enable_field = False)
return M(c="vol")(
M("Volunteers", f="volunteer",
check=[manager_mode])(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Import", f="person", m="import",
vars={"group":"volunteer"}, p="create"),
),
M("Staff & Volunteers (Combined)",
c="vol", f="human_resource", m="summary",
check=[manager_mode, is_super_editor]),
M(teams, f="group",
check=[manager_mode, use_teams])(
M("New", m="create"),
M("List All"),
M("Search Members", f="group_membership"),
M("Import", f="group_membership", m="import"),
),
#M("Department Catalog", f="department",
# check=manager_mode)(
# M("New", m="create"),
# M("List All"),
#),
M("Volunteer Role Catalog", f="job_title",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Import", m="import", p="create", check=is_org_admin),
),
#M("Skill Catalog", f="skill",
# check=manager_mode)(
# M("New", m="create"),
# M("List All"),
# #M("Skill Provisions", f="skill_provision"),
#),
M("Training Events", f="training_event",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Search Training Participants", f="training"),
M("Import Participant List", f="training", m="import"),
),
M("Training Course Catalog", f="course",
check=manager_mode)(
M("New", m="create"),
M("List All"),
#M("Course Certificates", f="course_certificate"),
),
M("Certificate Catalog", f="certificate",
check=manager_mode)(
M("New", m="create"),
M("List All"),
#M("Skill Equivalence", f="certificate_skill"),
),
M("Programs", f="programme",
check=[manager_mode, show_programmes])(
M("New", m="create"),
M("List All"),
M("Import Hours", f="programme_hours", m="import"),
),
M("Awards", f="award",
check=[manager_mode, is_org_admin])(
M("New", m="create"),
M("List All"),
),
M("Volunteer Cluster Type", f="cluster_type",
check = check_org_dependent_field("vol_volunteer_cluster",
"vol_cluster_type_id"))(
M("New", m="create"),
M("List All"),
),
M("Volunteer Cluster", f="cluster",
check = check_org_dependent_field("vol_volunteer_cluster",
"vol_cluster_id"))(
M("New", m="create"),
M("List All"),
),
M("Volunteer Cluster Position", f="cluster_position",
check = check_org_dependent_field("vol_volunteer_cluster",
"vol_cluster_position_id"))(
M("New", m="create"),
M("List All"),
),
M("Reports", f="volunteer", m="report",
check=manager_mode)(
M("Volunteer Report", m="report"),
M("Hours by Role Report", f="programme_hours", m="report2",
vars=Storage(rows="job_title_id",
cols="month",
fact="sum(hours)"),
check=show_programmes),
M("Hours by Program Report", f="programme_hours", m="report2",
vars=Storage(rows="programme_id",
cols="month",
fact="sum(hours)"),
check=show_programmes),
M("Training Report", f="training", m="report2"),
),
#M("My Profile", f="person",
# check=personal_mode, vars=dict(mode="personal")),
M("My Tasks", f="task",
check=[personal_mode, show_tasks],
vars=dict(mode="personal",
mine=1)),
# This provides the link to switch to the manager mode:
M("Volunteer Management", f="index",
check=[personal_mode, is_org_admin]),
# This provides the link to switch to the personal mode:
#M("Personal Profile", f="person",
# check=manager_mode, vars=dict(mode="personal"))
)
# -------------------------------------------------------------------------
def inv(self):
""" INV / Inventory """
ADMIN = current.session.s3.system_roles.ADMIN
current.s3db.inv_recv_crud_strings()
crud_strings = current.response.s3.crud_strings
inv_recv_list = crud_strings.inv_recv.title_list
inv_recv_search = crud_strings.inv_recv.title_search
use_commit = lambda i: current.deployment_settings.get_req_use_commit()
return M()(
#M("Home", f="index"),
M("Warehouses", c="inv", f="warehouse")(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Import", m="import", p="create"),
),
M("Warehouse Stock", c="inv", f="inv_item")(
M("Search", f="inv_item", m="search"),
M("Search Shipped Items", f="track_item", m="search"),
M("Adjust Stock Levels", f="adj"),
#M("Kitting", f="kit"),
M("Import", f="inv_item", m="import", p="create"),
),
M("Reports", c="inv", f="inv_item")(
M("Warehouse Stock", f="inv_item",m="report"),
#M("Expiration Report", c="inv", f="track_item",
# m="search", vars=dict(report="exp")),
#M("Monetization Report", c="inv", f="inv_item",
# m="search", vars=dict(report="mon")),
#M("Utilization Report", c="inv", f="track_item",
# m="search", vars=dict(report="util")),
#M("Summary of Incoming Supplies", c="inv", f="track_item",
# m="search", vars=dict(report="inc")),
# M("Summary of Releases", c="inv", f="track_item",
# m="search", vars=dict(report="rel")),
),
M(inv_recv_list, c="inv", f="recv")(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
),
M("Sent Shipments", c="inv", f="send")(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Search Shipped Items", f="track_item", m="search"),
),
M("Items", c="supply", f="item")(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Report", m="report"),
M("Import", f="catalog_item", m="import", p="create"),
),
# Catalog Items moved to be next to the Item Categories
#M("Catalog Items", c="supply", f="catalog_item")(
#M("New", m="create"),
#M("List All"),
#M("Search", m="search"),
#),
#M("Brands", c="supply", f="brand",
# restrict=[ADMIN])(
# M("New", m="create"),
# M("List All"),
#),
M("Catalogs", c="supply", f="catalog")(
M("New", m="create"),
M("List All"),
#M("Search", m="search"),
),
M("Item Categories", c="supply", f="item_category",
restrict=[ADMIN])(
M("New", m="create"),
M("List All"),
),
M("Suppliers", c="inv", f="supplier")(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Import", m="import", p="create"),
),
M("Facilities", c="inv", f="facility")(
M("New", m="create", t="org_facility"),
M("List All"),
#M("Search", m="search"),
),
M("Facility Types", c="inv", f="facility_type",
restrict=[ADMIN])(
M("New", m="create"),
M("List All"),
#M("Search", m="search"),
),
M("Requests", c="req", f="req")(
M("New", m="create"),
M("List All"),
M("Requested Items", f="req_item"),
#M("Search Requested Items", f="req_item", m="search"),
),
M("Commitments", c="req", f="commit", check=use_commit)(
M("List All")
),
)
# -------------------------------------------------------------------------
def irs(self):
""" IRS Incident Reporting """
return M()(
M("Events", c="event", f="event")(
M("New", m="create"),
M("List All"),
),
M("Incident Reports", c="irs", f="ireport")(
M("New", m="create"),
M("List All"),
M("Open Incidents", vars={"open": 1}),
M("Map", m="map"),
M("Timeline", args="timeline"),
M("Report", m="report2")
),
M("Incident Categories", c="irs", f="icategory",
check=current.auth.s3_has_role(current.session.s3.system_roles.ADMIN))(
M("New", m="create"),
M("List All"),
),
M("Reports", c="irs", f="ireport", m="report")(
M("Incidents", m="report"),
),
)
# -------------------------------------------------------------------------
def org(self):
""" Organisation Management """
# Same as HRM
return self.hrm()
# -------------------------------------------------------------------------
def req(self):
""" Organisation Management """
# Same as Inventory
return self.inv()
# -------------------------------------------------------------------------
def event(self):
""" Event Management """
# Same as IRS
return self.irs()
# -------------------------------------------------------------------------
def deploy(self):
""" RDRT Alerting and Deployments """
return M()(
M("Missions",
c="deploy", f="mission", m="summary")(
M("New", m="create"),
),
M("Alerts",
c="deploy", f="alert")(
M("New", m="create"),
M("InBox",
c="deploy", f="email_inbox",
),
M("Settings",
c="deploy", f="email_channel",
),
),
M("RDRT Members",
c="deploy", f="human_resource", m="summary")(
M("Add Member", c="deploy", f="application", m="select"),
M("Import Members", c="deploy", f="person", m="import"),
),
)
# END =========================================================================<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from app import db
class Alternative(db.Model):
id = db.Column(db.Integer, primary_key=True)
experiment = db.Column(db.String(500), unique=True)
copy = db.Column(db.String(2500))
def __init__(self, id, experiment, copy):<|fim▁hole|> self.id = id
self.experiment = experiment
self.copy = copy
def __repr__(self):
return "<Alt {0} {1} {2}>".format(self.id, self.experiment, self.copy)<|fim▁end|> | |
<|file_name|>Help.cpp<|end_file_name|><|fim▁begin|>#include "stdafx.h"
#include "Help.h"
namespace LiteCppDB_Console_Commands
{
DataAccess Help::getAccess() noexcept
{
return DataAccess::None;
}
bool Help::IsCommand(LiteCppDB::StringScanner& s) noexcept
{
return s.Scan("help[[:s:]]*").length() > 0;
}
void Help::Execute(LiteCppDB::LiteEngine engine, LiteCppDB::StringScanner& s, LiteCppDB_Console::Display d, LiteCppDB_Console::InputCommand input, LiteCppDB_Console::Env env) noexcept
{
const auto full = s.Match("full");
if (!full)
{
d.WriteHelp();
d.WriteHelp("Basic Shell Commands - try `help full` for all commands");
d.WriteHelp("=======================================================");
d.WriteHelp("> open <filename>|<connectionString>", "Open/Crete a new database");
d.WriteHelp("> show collections", "List all collections inside database");
d.WriteHelp("> db.<collection>.insert <jsonDoc>", "Insert a new document into collection");
d.WriteHelp("> db.<collection>.update <jsonDoc>", "Update a document inside collection");
d.WriteHelp("> db.<collection>.delete <filter>", "Delete documents using a filter clausule (see find)");
d.WriteHelp("> db.<collection>.find <filter> [skip N][limit N]", "Show filtered documents based on index search");
d.WriteHelp("> db.<collection>.count <filter>", "Show count rows according query filter");
d.WriteHelp("> db.<collection>.ensureIndex <field> [true|{options}]", "Create a new index document field. For unique key, use true");
d.WriteHelp("> db.<collection>.indexes", "List all indexes in this collection");
d.WriteHelp("<filter> = <field> [=|>|>=|<|<=|!=|like|between] <jsonValue>", "Filter query syntax");
d.WriteHelp("<filter> = (<filter> [and|or] <filter> [and|or] ...)", "Multi queries syntax");
d.WriteHelp("Try:");
d.WriteHelp(" > db.customers.insert { _id:1, name:\"John Doe\", age: 37 }");
d.WriteHelp(" > db.customers.ensureIndex name");
d.WriteHelp(" > db.customers.find name like \"John\"");
d.WriteHelp(" > db.customers.find name like \"John\" and _id between [0, 100] limit 10");
}
else
{
d.WriteHelp("Shell commands");
d.WriteHelp("==============");
d.WriteHelp("> open <filename>|<connectionString>", "Open a new database");
d.WriteHelp("> run <filename>", "Run commands inside filename");
d.WriteHelp("> pretty on|off", "Turns on/off pretty json format");
d.WriteHelp("> timer", "Show timer before prompt");
d.WriteHelp("> ed", "Open notepad with last command to edit and execute");
d.WriteHelp("> spool on|off", "Spool all output in a spool file");
d.WriteHelp("> -- comment", "Do nothing, its just a comment");
d.WriteHelp("> /<command>/", "Support for multi line command");
d.WriteHelp("> debug on|off", "Enabled debug messages from dbengine");
d.WriteHelp("> upgrade <connectionString>", "Upgrade an old datafile (LiteDB v2) to new LiteDB v3 format.");<|fim▁hole|> d.WriteHelp("> version", "Show LiteDB version");
d.WriteHelp("> exit", "Close LiteDB shell");
d.WriteHelp();
d.WriteHelp("Collections commands");
d.WriteHelp("====================");
d.WriteHelp("> show collections", "List all collections inside database");
d.WriteHelp("> db.<collection>.insert <jsonDoc>", "Insert a new document into collection");
d.WriteHelp("> db.<collection>.update <jsonDoc>", "Update a document inside collection");
d.WriteHelp("> db.<collection>.delete <filter>", "Delete documents using a filter clausule (see find)");
d.WriteHelp("> db.<collection>.bulk <filename>", "Bulk insert a json file as documents");
d.WriteHelp("> db.<collection>.find [skip N][limit N]", "Show all documents. Can limit/skip results");
d.WriteHelp("> db.<collection>.find <filter> [skip N][limit N]", "Show filtered documents based on index search. See <filter> syntax below");
d.WriteHelp("> db.<collection>.count <filter>", "Show count rows according query filter");
d.WriteHelp("> db.<collection>.ensureIndex <field> [unique]", "Create a new index document field");
d.WriteHelp("> db.<collection>.indexes", "List all indexes in this collection");
d.WriteHelp("> db.<collection>.drop", "Drop collection and destroy all documents inside");
d.WriteHelp("> db.<collection>.dropIndex <field>", "Drop a index and make index area free to use with another index");
d.WriteHelp("> db.<collection>.rename <newCollectionName>", "Rename a collection");
d.WriteHelp("> db.<collection>.min <field>", "Returns min/first value from collection using index field");
d.WriteHelp("> db.<collection>.max <field>", "Returns max/last value from collection using index field");
d.WriteHelp("<filter> = <field> [=|>|>=|<|<=|!=|like|contains|in|between] <jsonValue>", "Filter query syntax");
d.WriteHelp("<filter> = (<filter> [and|or] <filter> [and|or] ...)", "Multi queries syntax");
d.WriteHelp("<jsonDoc> = {_id: ... , key: value, key1: value1 }", "Represent a json (extended version) for a BsonDocument. See special data types");
d.WriteHelp("Json Date", "{ field: { $date :\"2015-01-01T23:59:59Z\"} }");
d.WriteHelp("Json Guid", "{ field: { $guid :\"3a1c34b3-9f66-4d8e-975a-d545d898a4ba\"} }");
d.WriteHelp("Json Int64", "{ field: { $numberLong :\"1234556788997\"} }");
d.WriteHelp("Json Decimal", "{ field: { $numberDecimal :\"123.456789\"} }");
d.WriteHelp("Json Binary", "{ field: { $binary :\"base64 byte array\"} }");
d.WriteHelp();
d.WriteHelp("File storage commands");
d.WriteHelp("=====================");
d.WriteHelp("> fs.find", "List all files on database");
d.WriteHelp("> fs.find <fileId>", "List file info from a key. Supports * for starts with key");
d.WriteHelp("> fs.upload <fileId> <filename>", "Insert a new file inside database");
d.WriteHelp("> fs.download <fileId> <filename>", "Save a file to disk passing a file key and filename");
d.WriteHelp("> fs.update <fileId> {key:value}", "Update metadata file");
d.WriteHelp("> fs.delete <fileId>", "Remove a file inside database");
d.WriteHelp();
d.WriteHelp("Other commands");
d.WriteHelp("==============");
d.WriteHelp("> db.userversion [N]", "Get/Set user database file version");
d.WriteHelp("> db.shrink [password]", "Reduce database removing empty pages and change password (optional)");
}
}
}<|fim▁end|> | |
<|file_name|>toaiff.py<|end_file_name|><|fim▁begin|>"""Convert "arbitrary" sound files to AIFF (Apple and SGI's audio format).
Input may be compressed.
Uncompressed file type may be AIFF, WAV, VOC, 8SVX, NeXT/Sun, and others.
An exception is raised if the file is not of a recognized type.
Returned filename is either the input filename or a temporary filename;
in the latter case the caller must ensure that it is removed.
Other temporary files used are removed by the function.
"""
import os
import tempfile
import pipes
import sndhdr
__all__ = ["error", "toaiff"]
table = {}
t = pipes.Template()
t.append('sox -t au - -t aiff -r 8000 -', '--')
table['au'] = t
# XXX The following is actually sub-optimal.
# XXX The HCOM sampling rate can be 22k, 22k/2, 22k/3 or 22k/4.
# XXX We must force the output sampling rate else the SGI won't play
# XXX files sampled at 5.5k or 7.333k; however this means that files
# XXX sampled at 11k are unnecessarily expanded.
# XXX Similar comments apply to some other file types.
t = pipes.Template()
t.append('sox -t hcom - -t aiff -r 22050 -', '--')
table['hcom'] = t
t = pipes.Template()
t.append('sox -t voc - -t aiff -r 11025 -', '--')
table['voc'] = t
t = pipes.Template()
t.append('sox -t wav - -t aiff -', '--')
table['wav'] = t
t = pipes.Template()
t.append('sox -t 8svx - -t aiff -r 16000 -', '--')
table['8svx'] = t
t = pipes.Template()
t.append('sox -t sndt - -t aiff -r 16000 -', '--')
table['sndt'] = t
t = pipes.Template()
t.append('sox -t sndr - -t aiff -r 16000 -', '--')
table['sndr'] = t
uncompress = pipes.Template()
uncompress.append('uncompress', '--')
class error(Exception):
pass
def toaiff(filename):
temps = []
ret = None
try:
ret = _toaiff(filename, temps)
finally:
for temp in temps[:]:
if temp != ret:
try:
os.unlink(temp)
except os.error:
pass
temps.remove(temp)
return ret
def _toaiff(filename, temps):<|fim▁hole|> if filename[-2:] == '.Z':
fname = tempfile.mktemp()
temps.append(fname)
sts = uncompress.copy(filename, fname)
if sts:
raise error, filename + ': uncompress failed'
else:
fname = filename
try:
ftype = sndhdr.whathdr(fname)
if ftype:
ftype = ftype[0] # All we're interested in
except IOError, msg:
if type(msg) == type(()) and len(msg) == 2 and \
type(msg[0]) == type(0) and type(msg[1]) == type(''):
msg = msg[1]
if type(msg) != type(''):
msg = `msg`
raise error, filename + ': ' + msg
if ftype == 'aiff':
return fname
if ftype is None or not table.has_key(ftype):
raise error, \
filename + ': unsupported audio file type ' + `ftype`
temp = tempfile.mktemp()
temps.append(temp)
sts = table[ftype].copy(fname, temp)
if sts:
raise error, filename + ': conversion to aiff failed'
return temp<|fim▁end|> | |
<|file_name|>network.py<|end_file_name|><|fim▁begin|>import requests
from PIL import Image
from course import *
<|fim▁hole|>from io import BytesIO
url_course = 'https://wx.idsbllp.cn/redapi2/api/kebiao'
url_stu_info = 'https://we.cqu.pt/api/others/student.php?key='
url_photo = 'https://we.cqu.pt/api/others/photos.php?id='
data = {
'stuNum': -1,
'week': -1
}
header = {
'API_APP': 'android',
'Content-Type': 'application/x-www-form-urlencoded'
}
def get_courses(stu_num, week=0, offset=0):
data['stuNum'] = stu_num
data['week'] = week
response = requests.post(url_course, data=data).json()
now_week = get_week(response['nowWeek'], offset)
courses = response['data']
courses = filter(lambda x: now_week in x['week'] and is_on_time(x['hash_day'], offset), courses)
this_week_course = list(map(lambda x: Course(x['course'], x['teacher'], x['classroom'], x['lesson']), courses))
return ''.join(i.get_course() for i in this_week_course)
def get_name_by_stu_num(stu_num):
resp = requests.get(url_stu_info + str(stu_num)).json()
return resp['data']['rows'][0]['xm'] if resp['data']['total'] == 1 else '亲你学号输错啦'
def get_stu_infos_by_info(info):
return requests.get(url_stu_info + info).json()['data']['rows']
def get_photo(stu_num_photo):
url = 'https://we.cqu.pt/api/others/photos.php?id=%s' % (stu_num_photo,)
photo_url = requests.get(url).json()['data']
photo = requests.get(photo_url).content
image = Image.open(BytesIO(photo))
bio = BytesIO(photo)
bio.name = 'image.jpeg'
image.save(bio, 'JPEG')
bio.seek(0)
return bio<|fim▁end|> | from utils import is_on_time, get_week
|
<|file_name|>block_volume_entry.go<|end_file_name|><|fim▁begin|>//
// Copyright (c) 2017 The heketi Authors
//
// This file is licensed to you under your choice of the GNU Lesser
// General Public License, version 3 or any later version (LGPLv3 or
// later), or the GNU General Public License, version 2 (GPLv2), in all
// cases as published by the Free Software Foundation.
//
package glusterfs
import (
"bytes"
"encoding/gob"
"fmt"
"github.com/boltdb/bolt"
"github.com/heketi/heketi/executors"
wdb "github.com/heketi/heketi/pkg/db"
"github.com/heketi/heketi/pkg/glusterfs/api"
"github.com/heketi/heketi/pkg/idgen"
"github.com/heketi/heketi/pkg/sortedstrings"
"github.com/lpabon/godbc"
)
type BlockVolumeEntry struct {
Info api.BlockVolumeInfo
Pending PendingItem
}
func BlockVolumeList(tx *bolt.Tx) ([]string, error) {
list := EntryKeys(tx, BOLTDB_BUCKET_BLOCKVOLUME)
if list == nil {
return nil, ErrAccessList
}
return list, nil
}
func NewVolumeEntryForBlockHosting(clusters []string) (*VolumeEntry, error) {
var msg api.VolumeCreateRequest
msg.Clusters = clusters
msg.Durability.Type = api.DurabilityReplicate
msg.Size = BlockHostingVolumeSize
msg.Durability.Replicate.Replica = 3
msg.Block = true
vol := NewVolumeEntryFromRequest(&msg)
if !CreateBlockHostingVolumes {
return nil, fmt.Errorf("Block Hosting Volume Creation is " +
"disabled. Create a Block hosting volume and try " +
"again.")
}
if uint64(msg.Size)*GB < vol.Durability.MinVolumeSize() {
return nil, fmt.Errorf("Requested volume size (%v GB) is "+
"smaller than the minimum supported volume size (%v)",
msg.Size, vol.Durability.MinVolumeSize())
}
return vol, nil
}
func NewBlockVolumeEntry() *BlockVolumeEntry {
entry := &BlockVolumeEntry{}
return entry
}
func NewBlockVolumeEntryFromRequest(req *api.BlockVolumeCreateRequest) *BlockVolumeEntry {
godbc.Require(req != nil)
vol := NewBlockVolumeEntry()
vol.Info.Id = idgen.GenUUID()
vol.Info.Size = req.Size
vol.Info.Auth = req.Auth<|fim▁hole|> vol.Info.Name = req.Name
}
// If Clusters is zero, then it will be assigned during volume creation
vol.Info.Clusters = req.Clusters
vol.Info.Hacount = req.Hacount
return vol
}
func NewBlockVolumeEntryFromId(tx *bolt.Tx, id string) (*BlockVolumeEntry, error) {
godbc.Require(tx != nil)
entry := NewBlockVolumeEntry()
err := EntryLoad(tx, entry, id)
if err != nil {
return nil, err
}
return entry, nil
}
func (v *BlockVolumeEntry) BucketName() string {
return BOLTDB_BUCKET_BLOCKVOLUME
}
func (v *BlockVolumeEntry) Visible() bool {
return v.Pending.Id == ""
}
func (v *BlockVolumeEntry) Save(tx *bolt.Tx) error {
godbc.Require(tx != nil)
godbc.Require(len(v.Info.Id) > 0)
return EntrySave(tx, v, v.Info.Id)
}
func (v *BlockVolumeEntry) Delete(tx *bolt.Tx) error {
return EntryDelete(tx, v, v.Info.Id)
}
func (v *BlockVolumeEntry) NewInfoResponse(tx *bolt.Tx) (*api.BlockVolumeInfoResponse, error) {
godbc.Require(tx != nil)
info := api.NewBlockVolumeInfoResponse()
info.Id = v.Info.Id
info.Cluster = v.Info.Cluster
info.BlockVolume = v.Info.BlockVolume
info.Size = v.Info.Size
info.Name = v.Info.Name
info.Hacount = v.Info.Hacount
info.BlockHostingVolume = v.Info.BlockHostingVolume
return info, nil
}
func (v *BlockVolumeEntry) Marshal() ([]byte, error) {
var buffer bytes.Buffer
enc := gob.NewEncoder(&buffer)
err := enc.Encode(*v)
return buffer.Bytes(), err
}
func (v *BlockVolumeEntry) Unmarshal(buffer []byte) error {
dec := gob.NewDecoder(bytes.NewReader(buffer))
err := dec.Decode(v)
if err != nil {
return err
}
return nil
}
func (v *BlockVolumeEntry) eligibleClustersAndVolumes(db wdb.RODB) (
possibleClusters []string, volumes []*VolumeEntry, e error) {
if len(v.Info.Clusters) == 0 {
err := db.View(func(tx *bolt.Tx) error {
var err error
possibleClusters, err = ClusterList(tx)
return err
})
if err != nil {
e = err
return
}
} else {
possibleClusters = v.Info.Clusters
}
// find clusters that support block volumes
cr := clusterReq{allowCreate: false, allowBlock: true}
possibleClusters, e = eligibleClusters(db, cr, possibleClusters)
if e != nil {
return
}
logger.Debug("Using the following clusters: %+v", possibleClusters)
var possibleVolumes []string
for _, clusterId := range possibleClusters {
err := db.View(func(tx *bolt.Tx) error {
var err error
c, err := NewClusterEntryFromId(tx, clusterId)
for _, vol := range c.Info.Volumes {
volEntry, err := NewVolumeEntryFromId(tx, vol)
if err != nil {
return err
}
if volEntry.Info.Block && volEntry.Pending.Id == "" {
possibleVolumes = append(possibleVolumes, vol)
}
}
return err
})
if err != nil {
e = err
return
}
}
logger.Debug("Using the following possible block hosting volumes: %+v", possibleVolumes)
for _, vol := range possibleVolumes {
err := db.View(func(tx *bolt.Tx) error {
volEntry, err := NewVolumeEntryFromId(tx, vol)
if err != nil {
return err
}
if ok, err := canHostBlockVolume(tx, v, volEntry); ok {
volumes = append(volumes, volEntry)
} else if err != nil {
return err
}
return nil
})
if err != nil {
e = err
return
}
}
if len(volumes) == 0 {
// now filter out any clusters that can't support an additional BHV
possibleClusters, e = eligibleClusters(
db, clusterReq{allowCreate: true, allowBlock: true},
possibleClusters)
}
return
}
func (v *BlockVolumeEntry) Create(db wdb.DB,
executor executors.Executor) (e error) {
return RunOperation(
NewBlockVolumeCreateOperation(v, db),
executor)
}
func (v *BlockVolumeEntry) saveNewEntry(db wdb.DB) error {
return db.Update(func(tx *bolt.Tx) error {
err := v.Save(tx)
if err != nil {
return err
}
cluster, err := NewClusterEntryFromId(tx, v.Info.Cluster)
if err != nil {
return err
}
cluster.BlockVolumeAdd(v.Info.Id)
err = cluster.Save(tx)
if err != nil {
return err
}
volume, err := NewVolumeEntryFromId(tx, v.Info.BlockHostingVolume)
if err != nil {
return err
}
if err := volume.ModifyFreeSize(-v.Info.Size); err != nil {
return err
}
logger.Debug("Reduced free size on volume %v by %v",
volume.Info.Id, v.Info.Size)
volume.BlockVolumeAdd(v.Info.Id)
err = volume.Save(tx)
if err != nil {
return err
}
return err
})
}
func (v *BlockVolumeEntry) blockHostingVolumeName(db wdb.RODB) (name string, e error) {
e = db.View(func(tx *bolt.Tx) error {
volume, err := NewVolumeEntryFromId(tx, v.Info.BlockHostingVolume)
if err != nil {
logger.LogError("Unable to load block hosting volume: %v", err)
return err
}
name = volume.Info.Name
return nil
})
return
}
func (v *BlockVolumeEntry) deleteBlockVolumeExec(db wdb.RODB,
hvname string,
executor executors.Executor) error {
executorhost, err := GetVerifiedManageHostname(db, executor, v.Info.Cluster)
if err != nil {
return err
}
logger.Debug("Using executor host [%v]", executorhost)
return v.destroyFromHost(executor, hvname, executorhost)
}
// destroyFromHost removes the block volume using the provided
// executor, block hosting volume name, and host.
func (v *BlockVolumeEntry) destroyFromHost(
executor executors.Executor, hvname, h string) error {
err := executor.BlockVolumeDestroy(h, hvname, v.Info.Name)
if _, ok := err.(*executors.VolumeDoesNotExistErr); ok {
logger.Warning(
"Block volume %v (%v) does not exist: assuming already deleted",
v.Info.Id, v.Info.Name)
} else if err != nil {
logger.LogError("Unable to delete volume: %v", err)
return err
}
return nil
}
func (v *BlockVolumeEntry) removeComponents(db wdb.DB, keepSize bool) error {
return db.Update(func(tx *bolt.Tx) error {
// Remove volume from cluster
cluster, err := NewClusterEntryFromId(tx, v.Info.Cluster)
if err != nil {
logger.Err(err)
// Do not return here.. keep going
}
cluster.BlockVolumeDelete(v.Info.Id)
err = cluster.Save(tx)
if err != nil {
logger.Err(err)
// Do not return here.. keep going
}
blockHostingVolume, err := NewVolumeEntryFromId(tx, v.Info.BlockHostingVolume)
if err != nil {
logger.Err(err)
// Do not return here.. keep going
}
blockHostingVolume.BlockVolumeDelete(v.Info.Id)
if err != nil {
logger.Err(err)
// Do not return here.. keep going
}
if !keepSize {
if err := blockHostingVolume.ModifyFreeSize(v.Info.Size); err != nil {
return err
}
}
blockHostingVolume.Save(tx)
if err != nil {
logger.Err(err)
// Do not return here.. keep going
}
v.Delete(tx)
return nil
})
}
func (v *BlockVolumeEntry) Destroy(db wdb.DB, executor executors.Executor) error {
logger.Info("Destroying volume %v", v.Info.Id)
return RunOperation(
NewBlockVolumeDeleteOperation(v, db),
executor)
}
// canHostBlockVolume returns true if the existing volume entry object
// can host the incoming block volume. It returns false (and nil error) if
// the volume is incompatible. It returns false, and an error if the
// database operation fails.
func canHostBlockVolume(tx *bolt.Tx, bv *BlockVolumeEntry, vol *VolumeEntry) (bool, error) {
if vol.Info.BlockInfo.Restriction != api.Unrestricted {
logger.Warning("Block hosting volume %v usage is restricted: %v",
vol.Info.Id, vol.Info.BlockInfo.Restriction)
return false, nil
}
if vol.Info.BlockInfo.FreeSize < bv.Info.Size {
logger.Warning("Free size %v is less than the requested block volume size %v",
vol.Info.BlockInfo.FreeSize, bv.Info.Size)
return false, nil
}
for _, blockvol := range vol.Info.BlockInfo.BlockVolumes {
existingbv, err := NewBlockVolumeEntryFromId(tx, blockvol)
if err != nil {
return false, err
}
if bv.Info.Name == existingbv.Info.Name {
logger.Warning("Name %v already in use in file volume %v",
bv.Info.Name, vol.Info.Name)
return false, nil
}
}
return true, nil
}
func (v *BlockVolumeEntry) updateHosts(hosts []string) {
v.Info.BlockVolume.Hosts = hosts
}
// hosts returns a node-to-host mapping for all nodes suitable
// for running commands related to this block volume
func (v *BlockVolumeEntry) hosts(db wdb.RODB) (nodeHosts, error) {
var hosts nodeHosts
err := db.View(func(tx *bolt.Tx) error {
cluster, err := NewClusterEntryFromId(tx, v.Info.Cluster)
if err != nil {
return err
}
hosts, err = cluster.hosts(wdb.WrapTx(tx))
return err
})
return hosts, err
}
// hasPendingBlockHostingVolume returns true if the db contains pending
// block hosting volumes.
func hasPendingBlockHostingVolume(tx *bolt.Tx) (bool, error) {
pmap, err := MapPendingVolumes(tx)
if err != nil {
return false, err
}
// filter out any volumes that are not marked for block
for volId, popId := range pmap {
vol, err := NewVolumeEntryFromId(tx, volId)
if err != nil {
return false, err
}
if !vol.Info.Block {
// drop volumes that are not BHVs
delete(pmap, volId)
}
pop, err := NewPendingOperationEntryFromId(tx, popId)
if err != nil {
return false, err
}
if pop.Status != NewOperation {
// drop pending operations that are not being worked on
// e.g. stale pending ops
delete(pmap, volId)
}
}
return (len(pmap) != 0), nil
}
// consistencyCheck ... verifies that a blockVolumeEntry is consistent with rest of the database.
// It is a method on blockVolumeEntry and needs rest of the database as its input.
func (v *BlockVolumeEntry) consistencyCheck(db Db) (response DbEntryCheckResponse) {
// No consistency check required for following attributes
// Id
// Name
// Size
// HaCount
// Auth
// PendingId
if v.Pending.Id != "" {
response.Pending = true
if _, found := db.PendingOperations[v.Pending.Id]; !found {
response.Inconsistencies = append(response.Inconsistencies, fmt.Sprintf("BlockVolume %v marked pending but no pending op %v", v.Info.Id, v.Pending.Id))
}
// TODO: Validate back the pending operations' relationship to the blockVolume
// This is skipped because some of it is handled in auto cleanup code.
}
// Cluster
if clusterEntry, found := db.Clusters[v.Info.Cluster]; !found {
response.Inconsistencies = append(response.Inconsistencies, fmt.Sprintf("BlockVolume %v unknown cluster %v", v.Info.Id, v.Info.Cluster))
} else {
if !sortedstrings.Has(clusterEntry.Info.BlockVolumes, v.Info.Id) {
response.Inconsistencies = append(response.Inconsistencies, fmt.Sprintf("BlockVolume %v no link back to blockVolume from cluster %v", v.Info.Id, v.Info.Cluster))
}
// TODO: Check if BlockVolume Hosts belong to the cluster.
}
// Volume
if volumeEntry, found := db.Volumes[v.Info.BlockHostingVolume]; !found {
response.Inconsistencies = append(response.Inconsistencies, fmt.Sprintf("BlockVolume %v unknown volume %v", v.Info.Id, v.Info.BlockHostingVolume))
} else {
if !sortedstrings.Has(volumeEntry.Info.BlockInfo.BlockVolumes, v.Info.Id) {
response.Inconsistencies = append(response.Inconsistencies, fmt.Sprintf("BlockVolume %v no link back to blockVolume from volume %v", v.Info.Id, v.Info.BlockHostingVolume))
}
// TODO: Check if BlockVolume Hosts belong to the volume.
}
return
}<|fim▁end|> |
if req.Name == "" {
vol.Info.Name = "blockvol_" + vol.Info.Id
} else { |
<|file_name|>string.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Daniel P. Clark & array_tool Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
/// A grapheme iterator that produces the bytes for each grapheme.
#[derive(Debug)]
pub struct GraphemeBytesIter<'a> {
source: &'a str,
offset: usize,
grapheme_count: usize,
}
impl<'a> GraphemeBytesIter<'a> {
/// Creates a new grapheme iterator from a string source.
pub fn new(source: &'a str) -> GraphemeBytesIter<'a> {
GraphemeBytesIter {
source: source,
offset: 0,
grapheme_count: 0,
}
}
}
impl<'a> Iterator for GraphemeBytesIter<'a> {
type Item = &'a [u8];
fn next(&mut self) -> Option<&'a [u8]> {
let mut result: Option<&[u8]> = None;
let mut idx = self.offset;
for _ in self.offset..self.source.len() {
idx += 1;
if self.offset < self.source.len() {
if self.source.is_char_boundary(idx) {
let slice: &[u8] = self.source[self.offset..idx].as_bytes();
self.grapheme_count += 1;
self.offset = idx;
result = Some(slice);
break
}
}
}
result
}
}
impl<'a> ExactSizeIterator for GraphemeBytesIter<'a> {
fn len(&self) -> usize {
self.source.chars().count()
}
}
/// ToGraphemeBytesIter - create an iterator to return bytes for each grapheme in a string.
pub trait ToGraphemeBytesIter<'a> {
/// Returns a GraphemeBytesIter which you may iterate over.
///
/// # Example
/// ```
/// use array_tool::string::ToGraphemeBytesIter;
///
/// let string = "a s—d féZ";
/// let mut graphemes = string.grapheme_bytes_iter();
/// graphemes.skip(3).next();
/// ```
///
/// # Output
/// ```text
/// [226, 128, 148]
/// ```
fn grapheme_bytes_iter(&'a self) -> GraphemeBytesIter<'a>;
}
impl<'a> ToGraphemeBytesIter<'a> for str {
fn grapheme_bytes_iter(&'a self) -> GraphemeBytesIter<'a> {
GraphemeBytesIter::new(&self)
}
}
/// Squeeze - squeezes duplicate characters down to one each
pub trait Squeeze {
/// # Example
/// ```
/// use array_tool::string::Squeeze;
///
/// "yellow moon".squeeze("");
/// ```
///
/// # Output
/// ```text
/// "yelow mon"
/// ```
fn squeeze(&self, targets: &'static str) -> String;
}
impl Squeeze for str {
fn squeeze(&self, targets: &'static str) -> String {
let mut output = Vec::<u8>::with_capacity(self.len());
let everything: bool = targets.is_empty();
let chars = targets.grapheme_bytes_iter().collect::<Vec<&[u8]>>();
let mut last: &[u8] = &[0];
for character in self.grapheme_bytes_iter() {
if last != character {
output.extend_from_slice(character);
} else if !(everything || chars.contains(&character)) {
output.extend_from_slice(character);
}
last = character;
}
String::from_utf8(output).expect("squeeze failed to render String!")
}
}
/// Justify - expand line to given width.
pub trait Justify {
/// # Example
/// ```
/// use array_tool::string::Justify;
///
/// "asd asdf asd".justify_line(14);
/// ```
///
/// # Output
/// ```text
/// "asd asdf asd"
/// ```
fn justify_line(&self, width: usize) -> String;
}
impl Justify for str {
fn justify_line(&self, width: usize) -> String {
if self.is_empty() { return format!("{}", self) };
let trimmed = self.trim() ;
let len = trimmed.chars().count();
if len >= width { return self.to_string(); };
let difference = width - len;<|fim▁hole|> let iter = trimmed.split_whitespace();
let spaces = iter.count() - 1;
let mut iter = trimmed.split_whitespace().peekable();
if spaces == 0 { return self.to_string(); }
let mut obj = String::with_capacity(trimmed.len() + spaces);
let div = difference / spaces;
let mut remainder = difference % spaces;
while let Some(x) = iter.next() {
obj.push_str( x );
let val = if remainder > 0 {
remainder = remainder - 1;
div + 1
} else { div };
for _ in 0..val+1 {
if let Some(_) = iter.peek() { // Don't add spaces if last word
obj.push_str( " " );
}
}
}
obj
}
}
/// Substitute string character for each index given.
pub trait SubstMarks {
/// # Example
/// ```
/// use array_tool::string::SubstMarks;
///
/// "asdf asdf asdf".subst_marks(vec![0,5,8], "Z");
/// ```
///
/// # Output
/// ```text
/// "Zsdf ZsdZ asdf"
/// ```
fn subst_marks(&self, marks: Vec<usize>, chr: &'static str) -> String;
}
impl SubstMarks for str {
fn subst_marks(&self, marks: Vec<usize>, chr: &'static str) -> String {
let mut output = Vec::<u8>::with_capacity(self.len());
let mut count = 0;
let mut last = 0;
for i in 0..self.len() {
let idx = i + 1;
if self.is_char_boundary(idx) {
if marks.contains(&count) {
count += 1;
last = idx;
output.extend_from_slice(chr.as_bytes());
continue
}
let slice: &[u8] = self[last..idx].as_bytes();
output.extend_from_slice(slice);
count += 1;
last = idx
}
}
String::from_utf8(output).expect("subst_marks failed to render String!")
}
}
/// After whitespace
pub trait AfterWhitespace {
/// Given offset method will seek from there to end of string to find the first
/// non white space. Resulting value is counted from offset.
///
/// # Example
/// ```
/// use array_tool::string::AfterWhitespace;
///
/// assert_eq!(
/// "asdf asdf asdf".seek_end_of_whitespace(6),
/// Some(9)
/// );
/// ```
fn seek_end_of_whitespace(&self, offset: usize) -> Option<usize>;
}
impl AfterWhitespace for str {
fn seek_end_of_whitespace(&self, offset: usize) -> Option<usize> {
if self.len() < offset { return None; };
let mut seeker = self[offset..self.len()].chars();
let mut val = None;
let mut indx = 0;
while let Some(x) = seeker.next() {
if x.ne(&" ".chars().next().unwrap()) {
val = Some(indx);
break;
}
indx += 1;
}
val
}
}
/// Word wrapping
pub trait WordWrap {
/// White space is treated as valid content and new lines will only be swapped in for
/// the last white space character at the end of the given width. White space may reach beyond
/// the width you've provided. You will need to trim end of lines in your own output (e.g.
/// splitting string at each new line and printing the line with trim_right). Or just trust
/// that lines that are beyond the width are just white space and only print the width -
/// ignoring tailing white space.
///
/// # Example
/// ```
/// use array_tool::string::WordWrap;
///
/// "asd asdf asd".word_wrap(8);
/// ```
///
/// # Output
/// ```text
/// "asd asdf\nasd"
/// ```
fn word_wrap(&self, width: usize) -> String;
}
// No need to worry about character encoding since we're only checking for the
// space and new line characters.
impl WordWrap for &'static str {
fn word_wrap(&self, width: usize) -> String {
let mut markers = vec![];
fn wordwrap(t: &'static str, chunk: usize, offset: usize, mrkrs: &mut Vec<usize>) -> String {
match t[offset..*vec![offset+chunk,t.len()].iter().min().unwrap()].rfind("\n") {
None => {
match t[offset..*vec![offset+chunk,t.len()].iter().min().unwrap()].rfind(" ") {
Some(x) => {
let mut eows = x; // end of white space
if offset+chunk < t.len() { // check if white space continues
match t.seek_end_of_whitespace(offset+x) {
Some(a) => {
if a.ne(&0) {
eows = x+a-1;
}
},
None => {},
}
}
if offset+chunk < t.len() { // safe to seek ahead by 1 or not end of string
if !["\n".chars().next().unwrap(), " ".chars().next().unwrap()].contains(
&t[offset+eows+1..offset+eows+2].chars().next().unwrap()
) {
mrkrs.push(offset+eows)
}
};
wordwrap(t, chunk, offset+eows+1, mrkrs)
},
None => {
if offset+chunk < t.len() { // String may continue
wordwrap(t, chunk, offset+1, mrkrs) // Recurse + 1 until next space
} else {
use string::SubstMarks;
return t.subst_marks(mrkrs.to_vec(), "\n")
}
},
}
},
Some(x) => {
wordwrap(t, chunk, offset+x+1, mrkrs)
},
}
};
wordwrap(self, width+1, 0, &mut markers)
}
}<|fim▁end|> | |
<|file_name|>chordangle.rs<|end_file_name|><|fim▁begin|>/*
Copyright 2015 Google Inc. All rights reserved.
Copyright 2017 Jihyun Yu. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use std;
use std::f64::consts::PI;
use crate::consts::*;
use crate::s1::angle::*;
use float_extras::f64::nextafter;
/// ChordAngle represents the angle subtended by a chord (i.e., the straight
/// line segment connecting two points on the sphere). Its representation
/// makes it very efficient for computing and comparing distances, but unlike
/// Angle it is only capable of representing angles between 0 and π radians.
/// Generally, ChordAngle should only be used in loops where many angles need
/// to be calculated and compared. Otherwise it is simpler to use Angle.
///
/// ChordAngle loses some accuracy as the angle approaches π radians.
/// Specifically, the representation of (π - x) radians has an error of about
/// (1e-15 / x), with a maximum error of about 2e-8 radians (about 13cm on the
/// Earth's surface). For comparison, for angles up to π/2 radians (10000km)
/// the worst-case representation error is about 2e-16 radians (1 nanonmeter),
/// which is about the same as Angle.
///
/// ChordAngles are represented by the squared chord length, which can
/// range from 0 to 4. Positive infinity represents an infinite squared length.
#[derive(Clone, Copy, PartialEq, PartialOrd, Debug, Default)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ChordAngle(pub f64);
/// NEGATIVE represents a chord angle smaller than the zero angle.
/// The only valid operations on a NegativeChordAngle are comparisons and
/// Angle conversions.
pub const NEGATIVE: ChordAngle = ChordAngle(-1f64);
/// RIGHT represents a chord angle of 90 degrees (a "right angle").
pub const RIGHT: ChordAngle = ChordAngle(2f64);
/// STRAIGHT represents a chord angle of 180 degrees (a "straight angle").
/// This is the maximum finite chord angle.
pub const STRAIGHT: ChordAngle = ChordAngle(4f64);
// MAXLENGTH2 is the square of the maximum length allowed in a ChordAngle.
pub const MAXLENGTH2: f64 = 4.0;
impl<'a> From<&'a Angle> for ChordAngle {
/// returns a ChordAngle from the given Angle.
fn from(a: &'a Angle) -> Self {
if a.rad() < 0. {
NEGATIVE
} else if a.is_infinite() {
ChordAngle::inf()
} else {
let l = 2. * (0.5 * a.rad().min(PI)).sin();
ChordAngle(l * l)
}
}
}
impl From<Angle> for ChordAngle {
/// returns a ChordAngle from the given Angle.
fn from(a: Angle) -> Self {
ChordAngle::from(&a)
}
}
impl<'a> From<&'a Deg> for ChordAngle {
fn from(a: &'a Deg) -> Self {
Angle::from(a).into()
}
}
impl From<Deg> for ChordAngle {
fn from(a: Deg) -> Self {
Angle::from(&a).into()
}
}
impl<'a> From<&'a ChordAngle> for Angle {
/// converts this ChordAngle to an Angle.
fn from(ca: &'a ChordAngle) -> Self {
if ca.0 < 0. {
Rad(-1.).into()
} else if ca.is_infinite() {
Angle::inf()
} else {
Rad(2. * (0.5 * ca.0.sqrt()).asin()).into()
}
}
}
impl From<ChordAngle> for Angle {
/// converts this ChordAngle to an Angle.
fn from(ca: ChordAngle) -> Self {
Angle::from(&ca)
}
}
impl<'a, 'b> std::ops::Add<&'a ChordAngle> for &'b ChordAngle {
type Output = ChordAngle;
/// add adds the other ChordAngle to this one and returns the resulting value.
/// This method assumes the ChordAngles are not special.
fn add(self, other: &'a ChordAngle) -> Self::Output {
// Note that this method (and Sub) is much more efficient than converting
// the ChordAngle to an Angle and adding those and converting back. It
// requires only one square root plus a few additions and multiplications.
if other.0 == 0.0 {
// Optimization for the common case where b is an error tolerance
// parameter that happens to be set to zero.
*self
} else if self.0 + other.0 >= 4. {
// Clamp the angle sum to at most 180 degrees.
STRAIGHT
} else {
// Let a and b be the (non-squared) chord lengths, and let c = a+b.
// Let A, B, and C be the corresponding half-angles (a = 2*sin(A), etc).
// Then the formula below can be derived from c = 2 * sin(A+B) and the
// relationships sin(A+B) = sin(A)*cos(B) + sin(B)*cos(A)
// cos(X) = sqrt(1 - sin^2(X))
let x = self.0 * (1. - 0.25 * other.0);
let y = other.0 * (1. - 0.25 * self.0);
ChordAngle(4f64.min(x + y + 2f64 * (x * y).sqrt()))
}
}
}
impl std::ops::Add<ChordAngle> for ChordAngle {
type Output = ChordAngle;
fn add(self, other: ChordAngle) -> Self::Output {
&self + &other
}
}
impl std::ops::Sub<ChordAngle> for ChordAngle {
type Output = ChordAngle;
/// sub subtracts the other ChordAngle from this one and returns the resulting<|fim▁hole|> } else if self.0 <= other.0 {
ChordAngle(0f64)
} else {
let x = self.0 * (1. - 0.25 * other.0);
let y = other.0 * (1. - 0.25 * self.0);
ChordAngle(0f64.max(x + y - 2. * (x * y).sqrt()))
}
}
}
impl ChordAngle {
/// inf returns a chord angle larger than any finite chord angle.
/// The only valid operations on an InfChordAngle are comparisons and Angle conversions.
pub fn inf() -> Self {
ChordAngle(std::f64::INFINITY)
}
/// is_infinite reports whether this ChordAngle is infinite.
pub fn is_infinite(&self) -> bool {
self.0.is_infinite()
}
/// from_squared_length returns a ChordAngle from the squared chord length.
/// Note that the argument is automatically clamped to a maximum of 4.0 to
/// handle possible roundoff errors. The argument must be non-negative.
pub fn from_squared_length(length2: f64) -> Self {
if length2 > 4. {
STRAIGHT
} else {
ChordAngle(length2)
}
}
/// expanded returns a new ChordAngle that has been adjusted by the given error
/// bound (which can be positive or negative). Error should be the value
/// returned by either MaxPointError or MaxAngleError. For example:
/// let a = ChordAngle::from_points(x, y)
/// let a1 = a.expanded(a.max_point_error())
pub fn expanded(&self, e: f64) -> Self {
// If the angle is special, don't change it. Otherwise clamp it to the valid range.
if self.is_special() {
*self
} else {
ChordAngle(0f64.max(4f64.min(self.0 + e)))
}
}
/// is_special reports whether this ChordAngle is one of the special cases.
pub fn is_special(&self) -> bool {
self.0 < 0. || self.0.is_infinite()
}
/// is_valid reports whether this ChordAngle is valid or not.
pub fn is_valid(&self) -> bool {
self.0 >= 0. && self.0 <= 4. || self.is_special()
}
pub fn max(self, other: Self) -> Self {
if self.0 < other.0 {
return other;
} else {
return self;
}
}
/// max_point_error returns the maximum error size for a ChordAngle constructed
/// from 2 Points x and y, assuming that x and y are normalized to within the
/// bounds guaranteed by s2.Point.Normalize. The error is defined with respect to
/// the true distance after the points are projected to lie exactly on the sphere.
pub fn max_point_error(&self) -> f64 {
// There is a relative error of (2.5*DBL_EPSILON) when computing the squared
// distance, plus an absolute error of (16 * DBL_EPSILON**2) because the
// lengths of the input points may differ from 1 by up to (2*DBL_EPSILON) each.
2.5 * DBL_EPSILON * self.0 + 16. * DBL_EPSILON * DBL_EPSILON
}
/// max_angle_error returns the maximum error for a ChordAngle constructed
/// as an Angle distance.
pub fn max_angle_error(&self) -> f64 {
DBL_EPSILON * self.0
}
/// sin returns the sine of this chord angle. This method is more efficient
/// than converting to Angle and performing the computation.
pub fn sin(&self) -> f64 {
self.sin2().sqrt()
}
/// sin2 returns the square of the sine of this chord angle.
/// It is more efficient than Sin.
pub fn sin2(&self) -> f64 {
// Let a be the (non-squared) chord length, and let A be the corresponding
// half-angle (a = 2*sin(A)). The formula below can be derived from:
// sin(2*A) = 2 * sin(A) * cos(A)
// cos^2(A) = 1 - sin^2(A)
// This is much faster than converting to an angle and computing its sine.
self.0 * (1. - 0.25 * self.0)
}
/// cos returns the cosine of this chord angle. This method is more efficient
/// than converting to Angle and performing the computation.
pub fn cos(&self) -> f64 {
// cos(2*A) = cos^2(A) - sin^2(A) = 1 - 2*sin^2(A)
1.0 - 0.5 * self.0
}
/// tan returns the tangent of this chord angle.
pub fn tan(&self) -> f64 {
self.sin() / self.cos()
}
pub fn successor(&self) -> Self {
if self.0 >= MAXLENGTH2 {
return ChordAngle::inf();
} else if self.0 < 0. {
return ChordAngle(0.);
} else {
return ChordAngle(nextafter(self.0, 10.));
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_chordangle_basics_case(ca1: ChordAngle, ca2: ChordAngle, less_than: bool, equal: bool) {
assert_eq!(less_than, ca1 < ca2);
assert_eq!(equal, ca1 == ca2);
}
#[test]
fn test_chordangle_basics() {
let zero = ChordAngle::default();
test_chordangle_basics_case(NEGATIVE, NEGATIVE, false, true);
test_chordangle_basics_case(NEGATIVE, zero, true, false);
test_chordangle_basics_case(NEGATIVE, STRAIGHT, true, false);
test_chordangle_basics_case(NEGATIVE, ChordAngle::inf(), true, false);
test_chordangle_basics_case(zero, zero, false, true);
test_chordangle_basics_case(zero, STRAIGHT, true, false);
test_chordangle_basics_case(zero, ChordAngle::inf(), true, false);
test_chordangle_basics_case(STRAIGHT, STRAIGHT, false, true);
test_chordangle_basics_case(STRAIGHT, ChordAngle::inf(), true, false);
test_chordangle_basics_case(ChordAngle::inf(), ChordAngle::inf(), false, true);
test_chordangle_basics_case(ChordAngle::inf(), ChordAngle::inf(), false, true);
}
fn test_chordangle_is_functions_case(
ca: ChordAngle,
is_neg: bool,
is_zero: bool,
is_inf: bool,
is_special: bool,
) {
assert_eq!(is_neg, ca.0 < 0.);
assert_eq!(is_zero, ca.0 == 0.);
assert_eq!(is_inf, ca.is_infinite());
assert_eq!(is_special, ca.is_special());
}
#[test]
fn test_chordangle_is_functions() {
let zero: ChordAngle = Default::default();
test_chordangle_is_functions_case(zero, false, true, false, false);
test_chordangle_is_functions_case(NEGATIVE, true, false, false, true);
test_chordangle_is_functions_case(zero, false, true, false, false);
test_chordangle_is_functions_case(STRAIGHT, false, false, false, false);
test_chordangle_is_functions_case(ChordAngle::inf(), false, false, true, true);
}
#[test]
fn test_chordangle_from_angle() {
let angles = vec![
Angle::from(Rad(0.)),
Angle::from(Rad(1.)),
Angle::from(Rad(-1.)),
Angle::from(Rad(PI)),
];
for angle in angles.into_iter() {
let ca = ChordAngle::from(angle);
let got = Angle::from(ca);
assert_eq!(got, angle);
}
assert_eq!(STRAIGHT, ChordAngle::from(Angle::from(Rad(PI))));
assert_eq!(Angle::inf(), Angle::from(ChordAngle::from(Angle::inf())));
}
fn chordangle_eq(a: ChordAngle, b: ChordAngle) {
assert_f64_eq!(a.0, b.0);
}
#[test]
fn test_chordangle_arithmetic() {
let zero = ChordAngle::default();
let deg_30 = ChordAngle::from(Deg(30.));
let deg_60 = ChordAngle::from(Deg(60.));
let deg_90 = ChordAngle::from(Deg(90.));
let deg_120 = ChordAngle::from(Deg(120.));
let deg_180 = STRAIGHT;
chordangle_eq(zero + zero, zero);
chordangle_eq(deg_60 + zero, deg_60);
chordangle_eq(zero + deg_60, deg_60);
chordangle_eq(deg_30 + deg_60, deg_90);
chordangle_eq(deg_60 + deg_30, deg_90);
chordangle_eq(deg_180 + zero, deg_180);
chordangle_eq(deg_60 + deg_30, deg_90);
chordangle_eq(deg_90 + deg_90, deg_180);
chordangle_eq(deg_120 + deg_90, deg_180);
chordangle_eq(deg_120 + deg_120, deg_180);
chordangle_eq(deg_30 + deg_180, deg_180);
chordangle_eq(deg_180 + deg_180, deg_180);
chordangle_eq(zero - zero, zero);
chordangle_eq(deg_60 - deg_60, zero);
chordangle_eq(deg_180 - deg_180, zero);
chordangle_eq(zero - deg_60, zero);
chordangle_eq(deg_30 - deg_90, zero);
chordangle_eq(deg_90 - deg_30, deg_60);
chordangle_eq(deg_90 - deg_60, deg_30);
chordangle_eq(deg_180 - zero, deg_180);
}
#[test]
fn test_chordangle_trigonometry() {
let iters = 40usize;
for i in 0..(iters + 1) {
let radians = PI * (i as f64) / (iters as f64);
let chordangle = ChordAngle::from(Angle::from(Rad(radians)));
assert_f64_eq!(radians.sin(), chordangle.sin());
assert_f64_eq!(radians.cos(), chordangle.cos());
// Since tan(x) is unbounded near pi/4, we map the result back to an
// angle before comparing. The assertion is that the result is equal to
// the tangent of a nearby angle.
assert_f64_eq!(radians.tan().atan(), chordangle.tan().atan());
}
let angle_90 = ChordAngle::from_squared_length(2.);
let angle_180 = ChordAngle::from_squared_length(4.);
assert_f64_eq!(1., angle_90.sin());
assert_f64_eq!(0., angle_90.cos());
assert!(angle_90.tan().is_infinite());
assert_f64_eq!(0., angle_180.sin());
assert_f64_eq!(-1., angle_180.cos());
assert_f64_eq!(0., angle_180.tan());
}
#[test]
fn test_chordangle_expanded() {
let zero = ChordAngle::default();
assert_eq!(NEGATIVE.expanded(5.), NEGATIVE.expanded(5.));
assert_eq!(ChordAngle::inf().expanded(-5.), ChordAngle::inf());
assert_eq!(zero.expanded(-5.), zero);
assert_eq!(
ChordAngle::from_squared_length(1.25).expanded(0.25),
ChordAngle::from_squared_length(1.5)
);
assert_eq!(
ChordAngle::from_squared_length(0.75).expanded(0.25),
ChordAngle::from_squared_length(1.)
);
}
}<|fim▁end|> | /// value. This method assumes the ChordAngles are not special.
fn sub(self, other: ChordAngle) -> Self::Output {
if other.0 == 0.0 {
self |
<|file_name|>Raspistill.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import sys
import time
from subprocess import call
#add the project folder to pythpath
sys.path.append('../../')
from library.components.SensorModule import SensorModule as Sensor
from library.components.MetaData import MetaData as MetaData
class Raspistill(Sensor):
def __init__(self):
super(Raspistill, self).__init__()
# ISO100
iso100MetaData = MetaData('ISO100')
iso100MetaData.setValueCallback(self.getIso100)
iso100MetaData.setUnitCallback(self.getUnit)
self.addMetaData(iso100MetaData)
# ISO200
iso200MetaData = MetaData('ISO200')
iso200MetaData.setValueCallback(self.getIso200)
iso200MetaData.setUnitCallback(self.getUnit)
self.addMetaData(iso200MetaData)
# ISO400'<|fim▁hole|> self.addMetaData(iso400MetaData)
# ISO800'
iso800MetaData = MetaData('ISO800')
iso800MetaData.setValueCallback(self.getIso800)
iso800MetaData.setUnitCallback(self.getUnit)
self.addMetaData(iso800MetaData)
def getIso100(self):
filename = "photos/" + str(time.time()) + "-iso100.jpg"
call(["raspistill", "--ISO", "100", "-o", filename])
return str(filename)
def getIso200(self):
filename = "photos/" + str(time.time()) + "-iso200.jpg"
call(["raspistill", "--ISO", "200", "-o", filename])
return str(filename)
def getIso400(self):
filename = "photos/" + str(time.time()) + "-iso400.jpg"
call(["raspistill", "--ISO", "400", "-o", filename])
return str(filename)
def getIso800(self):
filename = "photos/" + str(time.time()) + "-iso800.jpg"
call(["raspistill", "--ISO", "800", "-o", filename])
return str(filename)
def getUnit(self):
return " Photo"
def getMetaData(self):
return super(Raspistill, self).getMetaData()<|fim▁end|> | iso400MetaData = MetaData('ISO400')
iso400MetaData.setValueCallback(self.getIso400)
iso400MetaData.setUnitCallback(self.getUnit) |
<|file_name|>filelog.go<|end_file_name|><|fim▁begin|>// Copyright (C) 2010, Kyle Lemons <[email protected]>. All rights reserved.
package log4go
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"<|fim▁hole|>
// This log writer sends output to a file
type FileLogWriter struct {
rec chan *LogRecord
rot chan bool
// The opened file
filename string
file *os.File
// The logging format
format string
// File header/trailer
header, trailer string
// Rotate at linecount
maxlines int
maxlines_curlines int
// Rotate at size
maxsize int
maxsize_cursize int
maxtotalsize int64
// Rotate daily
daily bool
daily_opendate int
// Keep old logfiles (.001, .002, etc)
rotate bool
}
// This is the FileLogWriter's output method
func (w *FileLogWriter) LogWrite(rec *LogRecord) {
w.rec <- rec
}
func (w *FileLogWriter) Close() {
close(w.rec)
}
// NewFileLogWriter creates a new LogWriter which writes to the given file and
// has rotation enabled if rotate is true.
//
// If rotate is true, any time a new log file is opened, the old one is renamed
// with a .### extension to preserve it. The various Set* methods can be used
// to configure log rotation based on lines, size, and daily.
//
// The standard log-line format is:
// [%D %T] [%L] (%S) %M
func NewFileLogWriter(fname string, rotate bool) *FileLogWriter {
w := &FileLogWriter{
rec: make(chan *LogRecord, LogBufferLength),
rot: make(chan bool),
filename: fname,
format: "[%D %T] [%L] (%S) %M",
rotate: rotate,
}
// open the file for the first time
if err := w.intRotate(); err != nil {
fmt.Fprintf(os.Stderr, "FileLogWriter(%q): %s\n", w.filename, err)
return nil
}
go func() {
defer func() {
if w.file != nil {
fmt.Fprint(w.file, FormatLogRecord(w.trailer, &LogRecord{Created: time.Now()}))
w.file.Close()
}
w.Close()
}()
for {
select {
case <-w.rot:
if err := w.intRotate(); err != nil {
fmt.Fprintf(os.Stderr, "FileLogWriter(%q): %s\n", w.filename, err)
return
}
case rec, ok := <-w.rec:
if !ok {
return
}
now := time.Now()
if (w.maxlines > 0 && w.maxlines_curlines >= w.maxlines) ||
(w.maxsize > 0 && w.maxsize_cursize >= w.maxsize) ||
(w.daily && now.Day() != w.daily_opendate) {
if err := w.intRotate(); err != nil {
fmt.Fprintf(os.Stderr, "FileLogWriter(%q): %s\n", w.filename, err)
return
}
}
// Perform the write
n, err := fmt.Fprint(w.file, FormatLogRecord(w.format, rec))
if err != nil {
fmt.Fprintf(os.Stderr, "FileLogWriter(%q): %s\n", w.filename, err)
return
}
// Update the counts
w.maxlines_curlines++
w.maxsize_cursize += n
}
}
}()
return w
}
// Request that the logs rotate
func (w *FileLogWriter) Rotate() {
w.rot <- true
}
// If this is called in a threaded context, it MUST be synchronized
func (w *FileLogWriter) intRotate() error {
// Close any log file that may be open
if w.file != nil {
fmt.Fprint(w.file, FormatLogRecord(w.trailer, &LogRecord{Created: time.Now()}))
w.file.Close()
}
canReset := true
// If we are keeping log files, rename it
if w.rotate {
info, err := os.Lstat(w.filename)
if err == nil { // file exists
// new name
ext := filepath.Ext(w.filename)
fname := w.filename + fmt.Sprintf(".%04d%02d%02d.%02d%02d%02d.%03d%s",
info.ModTime().Year(), info.ModTime().Month(), info.ModTime().Day(),
info.ModTime().Hour(), info.ModTime().Minute(), info.ModTime().Second(),
info.ModTime().Nanosecond()/1000000,
ext)
// Rename the file to its newfound home
err = os.Rename(w.filename, fname)
if err != nil {
fmt.Printf("Rotation failed: %s\n", err)
canReset = false // will retry again next time...
}
}
}
w.checkTotalSize()
// Open the log file
fd, err := os.OpenFile(w.filename, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0660)
if err != nil {
return err
}
w.file = fd
now := time.Now()
fmt.Fprint(w.file, FormatLogRecord(w.header, &LogRecord{Created: now}))
if canReset {
// Set the daily open date to the current date
w.daily_opendate = now.Day()
// initialize rotation values
w.maxlines_curlines = 0
w.maxsize_cursize = 0
}
return nil
}
type ByDateASC []os.FileInfo
func (a ByDateASC) Len() int { return len(a) }
func (a ByDateASC) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a ByDateASC) Less(i, j int) bool { return a[i].ModTime().Before(a[j].ModTime()) }
func (w *FileLogWriter) checkTotalSize() {
if w.maxtotalsize == 0 {
// no file cleanup
return
}
dir := filepath.Dir(w.filename)
base := filepath.Base(w.filename)
infos, err := ioutil.ReadDir(dir)
if err != nil {
return
}
var totalsize int64
matchedFiles := make([]os.FileInfo, 0)
for _, info := range infos {
if !info.IsDir() && strings.HasPrefix(info.Name(), base) {
// one of our file
matchedFiles = append(matchedFiles, info)
totalsize += info.Size()
}
}
sort.Sort(ByDateASC(matchedFiles))
for _, info := range matchedFiles {
if w.maxtotalsize > totalsize {
break
}
totalsize -= info.Size()
os.Remove(filepath.Join(dir, info.Name()))
}
}
// Set the logging format (chainable). Must be called before the first log
// message is written.
func (w *FileLogWriter) SetFormat(format string) *FileLogWriter {
w.format = format
return w
}
// Set the logfile header and footer (chainable). Must be called before the first log
// message is written. These are formatted similar to the FormatLogRecord (e.g.
// you can use %D and %T in your header/footer for date and time).
func (w *FileLogWriter) SetHeadFoot(head, foot string) *FileLogWriter {
w.header, w.trailer = head, foot
if w.maxlines_curlines == 0 {
fmt.Fprint(w.file, FormatLogRecord(w.header, &LogRecord{Created: time.Now()}))
}
return w
}
// Set rotate at linecount (chainable). Must be called before the first log
// message is written.
func (w *FileLogWriter) SetRotateLines(maxlines int) *FileLogWriter {
//fmt.Fprintf(os.Stderr, "FileLogWriter.SetRotateLines: %v\n", maxlines)
w.maxlines = maxlines
return w
}
// Set rotate at size (chainable). Must be called before the first log message
// is written.
func (w *FileLogWriter) SetRotateSize(maxsize int) *FileLogWriter {
//fmt.Fprintf(os.Stderr, "FileLogWriter.SetRotateSize: %v\n", maxsize)
w.maxsize = maxsize
return w
}
func (w *FileLogWriter) SetMaxTotalSize(max int64) *FileLogWriter {
w.maxtotalsize = max
return w
}
// Set rotate daily (chainable). Must be called before the first log message is
// written.
func (w *FileLogWriter) SetRotateDaily(daily bool) *FileLogWriter {
//fmt.Fprintf(os.Stderr, "FileLogWriter.SetRotateDaily: %v\n", daily)
w.daily = daily
return w
}
// SetRotate changes whether or not the old logs are kept. (chainable) Must be
// called before the first log message is written. If rotate is false, the
// files are overwritten; otherwise, they are rotated to another file before the
// new log is opened.
func (w *FileLogWriter) SetRotate(rotate bool) *FileLogWriter {
//fmt.Fprintf(os.Stderr, "FileLogWriter.SetRotate: %v\n", rotate)
w.rotate = rotate
return w
}
// NewXMLLogWriter is a utility method for creating a FileLogWriter set up to
// output XML record log messages instead of line-based ones.
func NewXMLLogWriter(fname string, rotate bool) *FileLogWriter {
return NewFileLogWriter(fname, rotate).SetFormat(
` <record level="%L">
<timestamp>%D %T</timestamp>
<source>%S</source>
<message>%M</message>
</record>`).SetHeadFoot("<log created=\"%D %T\">", "</log>")
}<|fim▁end|> | "sort"
"strings"
"time"
) |
<|file_name|>isup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>import re
import sys
from urllib import urlopen
def isup(domain):
resp = urlopen("http://www.isup.me/%s" % domain).read()
return "%s" % ("UP" if re.search("It's just you.", resp,
re.DOTALL) else "DOWN")
if __name__ == '__main__':
if len(sys.argv) > 1:
print "\n".join(isup(d) for d in sys.argv[1:])
else:
print "usage: %s domain1 [domain2 .. domainN]" % sys.argv[0]<|fim▁end|> | |
<|file_name|>PlotTools.java<|end_file_name|><|fim▁begin|>package edu.indiana.soic.spidal.dapwc;
public class PlotTools
{
// TODO - fix PlotTools
/* public static void CreatePlotWithCenters(String centerFile, String pointsFile, String clusterNumberFile, int numberOfCenterPointsToIncludeInEachCenterType, String centerPlotFile, String plotDescription)
{
*//* Generate all types of center clusters per cluster
*
* Center clusters are,
* 1. Original Min Mean
* 2. MDS Min Mean
* 3. MDS Center of Gravity (CoG)
* 4. Overall Best
* 5. Bucket Fraction 0
* Bucket Fraction 1 and so on
*
* Number of center points to include in each center type = n
* n <= N, which is the number of center points found for each center type by PWC
* N is specified through NumberOfCenters parameter in PWC
*
* Assumes a center file from a PWC center finding run
* Assumes a points file, which has each point mapped to its cluster in the format
* PointNumber<TAB>Xcoord<TAB>Ycoord<TAB>Zcoord<TAB>ClusterNumber
*//*
*//* Colors to use with PlotViz
reads color info from Matlab50.txt file *//*
java.util.ArrayList<Color> matlab50Colors = GenerateMatlab50Colors();
*//* XML elements to hold points and clusters to be used in PlotViz file *//*
XElement clustersElement = new XElement("clusters");
XElement pointsElement = new XElement("points");
*//* Hashtable mapping point number to a PlotVizPoint data structure for the points in the given points file *//*
java.util.Hashtable existingPointsTable = new java.util.Hashtable();
*//* Maximum number of points int the points file *//*
int maxpnum = 0;
*//* Maximum number of clusters that points are mapped to in the points file*//*
int maxcnum = 0;
edu.indiana.soic.spidal.Boxspidal.general.Box<Integer> boxmaxpnum = new edu.indiana.soic.spidal.Boxspidal.general.Box<Integer>(maxpnum);
edu.indiana.soic.spidal.generaloic.spidal.Box<Integer> boxmaxcnum = new edu.indiana.soic.spidal.Boxspidal.general.Box<Integer>(maxcnum);
ProcessPointsFile(pointsFile, clusterNumberFile, clustersElement, pointsElement, boxmaxpnum, boxmaxcnum, existingPointsTable, matlab50Colors);
maxpnum = boxmaxpnum.content;
maxcnum = boxmaxcnum.content;
*//* Table mapping each cluster (i.e. group) number to another table called method table
* method table maps each method (e.g. smallest distance mean, smallest MDS distance mean, etc.) name to the list center points for that particular method
* the order of points in the list is as same as in the given center file *//*
java.util.Hashtable groupTable = ProcessCenterFile(centerFile);
CreatePlotWithCentersInternal(centerPlotFile, plotDescription, clustersElement, pointsElement, maxpnum, existingPointsTable, maxcnum, matlab50Colors, groupTable, numberOfCenterPointsToIncludeInEachCenterType);
}
private static void CreatePlotWithCentersInternal(String centerPlotFile, String plotDescription, XElement clustersElement, XElement pointsElement, int maxpnum, java.util.Hashtable existingPointsTable, int maxcnum, java.util.ArrayList<Color> matlab50Colors, java.util.Hashtable groupTable, int numberOfCenterPointsToIncludeInEachCenterType)
{
++maxcnum;
for (DictionaryEntry groupToMethodTable : groupTable)
{
int group = (int)groupToMethodTable.Key; // group is the original cluster number
java.util.Hashtable methodTable = (java.util.Hashtable)groupToMethodTable.Value;
int methodCount = methodTable.size();
int tempCount = methodCount;
for (DictionaryEntry methodToCenterPoints : methodTable)
{
String method = (String)methodToCenterPoints.Key; // method is one of smallest distance mean, smallest MDS mean, etc.
// cluster number to be used in PlotViz for this center type
int methodNumber = methodCount - tempCount--;
int clusterNumberForCenterType = group * methodCount + methodNumber + maxcnum;
// cluster name to be used in PlotViz for this center type
String centerTypeName = group + "" + method + ".centerpoints";
// add an XML element to represent this center type as a cluster in PlotViz
clustersElement.Add(CreateClusterElement(clusterNumberForCenterType, centerTypeName, matlab50Colors.get(group % matlab50Colors.size()), false, 2.0, methodNumber));
java.util.ArrayList<CenterInfo> cps = (java.util.ArrayList<CenterInfo>)methodToCenterPoints.Value;
// Picking the topmost n point for each method
for (int i = 0; i < numberOfCenterPointsToIncludeInEachCenterType; i++)
{
CenterInfo cp = cps.get(i);
PlotVizPoint p = (PlotVizPoint)existingPointsTable.get(cp.getPnum());
pointsElement.Add(CreatePointElement(++maxpnum, clusterNumberForCenterType, ("cluster:" + group + "-idx:" + p.getIndex() + "method:" + method), p.getX(), p.getY(), p.getZ()));
}
}
}
XElement plotElement = CreatePlotElement(plotDescription, true);
XElement plotvizElement = new XElement("plotviz");
plotvizElement.Add(plotElement);
plotvizElement.Add(clustersElement);
plotvizElement.Add(pointsElement);
plotvizElement.Save(centerPlotFile);
}
private static void ProcessPointsFile(String pointsFile, String clusterNumberFile, XElement clusters, XElement points, edu.indiana.soic.spidal.generaloic.spidal.Box<Integer> maxpnum, edu.indiana.soic.spidal.generaloic.spidal.Box<Integer> maxcnum, java.util.Hashtable pointsTable, java.util.ArrayList<Color> matlab50Colors)
{
//C# TO JAVA CONVERTER NOTE: The following 'using' block is replaced by its Java equivalent:
// using (StreamReader preader = new StreamReader(pointsFile), creader = new StreamReader(clusterNumberFile))
StreamReader preader = new StreamReader(pointsFile);
StreamReader creader = new StreamReader(clusterNumberFile);
try
{
java.util.HashSet<Integer> clusterNumbers = new java.util.HashSet<Integer>();
maxpnum.content = -1;
while (!preader.EndOfStream)
{
String pline = preader.ReadLine();
String cline = creader.ReadLine();
if (!tangible.DotNetToJavaStringHelper.isNullOrEmpty(pline) && !tangible.DotNetToJavaStringHelper.isNullOrEmpty(cline))
{
PlotVizPoint p = ReadPointLine(pline.trim());
if (maxpnum.content < p.getIndex())
{
maxpnum.content = p.getIndex();
}
pointsTable.put(p.getIndex(), p);
int cnum = ReadCnum(cline);
p.setCluster(cnum);
if (!clusterNumbers.contains(p.getCluster()))
{
clusterNumbers.add(p.getCluster());
clusters.Add(CreateClusterElement(p.getCluster(), (new Integer(p.getCluster())).toString(CultureInfo.InvariantCulture), matlab50Colors.get(p.getCluster() % matlab50Colors.size()), true, 0.1, Glyphs.Hexagon2D));
}
points.Add(CreatePointElement(p.getIndex(), p.getCluster(), "", p.getX(), p.getY(), p.getZ()));
}
}
maxcnum.content = clusterNumbers.Max();
}
finally
{
preader.dispose();
creader.dispose();
}
}
private static int ReadCnum(String line)
{
char[] sep = new char[] {' ', '\t'};
String[] splits = line.split(sep, StringSplitOptions.RemoveEmptyEntries);
return splits.length == 2 ? Integer.parseInt(splits[1]) : splits.length == 5 ? Integer.parseInt(splits[4]) : 0;
}
private static java.util.ArrayList<Color> GenerateMatlab50Colors()
{
//C# TO JAVA CONVERTER NOTE: The following 'using' block is replaced by its Java equivalent:
// using (Stream stream = Assembly.GetExecutingAssembly().GetManifestResourceStream("Salsa.PairwiseClusteringTPL.Matlab50.txt"))
Stream stream = Assembly.GetExecutingAssembly().GetManifestResourceStream("Salsa.PairwiseClusteringTPL.Matlab50.txt");
try
{
if (stream != null)
{
//C# TO JAVA CONVERTER NOTE: The following 'using' block is replaced by its Java equivalent:
// using (StreamReader reader = new StreamReader(stream))
StreamReader reader = new StreamReader(stream);
try
{
java.util.ArrayList<Color> colors = new java.util.ArrayList<Color>();
char[] sep = new char[] {' ', '\t'};
String[] splits;
String split;
int startIdx = 3;
int r, g, b, a;
while (!reader.EndOfStream)
{
String line = reader.ReadLine();
if (!tangible.DotNetToJavaStringHelper.isNullOrEmpty(line))
{
splits = line.trim().split(java.util.regex.Pattern.quote(sep.toString()), -1);
split = splits[0];
r = Integer.parseInt(split.substring(startIdx, startIdx + (split.length() - (startIdx + 1))));
split = splits[1];
g = Integer.parseInt(split.substring(startIdx, startIdx + (split.length() - (startIdx + 1))));
split = splits[2];
b = Integer.parseInt(split.substring(startIdx, startIdx + (split.length() - (startIdx + 1))));
split = splits[3];
a = Integer.parseInt(split.substring(startIdx, startIdx + (split.length() - (startIdx + 1))));
colors.add(Color.FromArgb(a, r, g, b));
}
}
return colors;
}
finally
{
reader.dispose();
}
}
else
{
throw new RuntimeException("Unable to load embedded resource: Matlab50.txt");
}
}
finally
{
stream.dispose();
}
}
private static PlotVizPoint ReadPointLine(String line)
{
char[] sep = new char[] {' ', '\t'};
String[] splits = line.split(sep, StringSplitOptions.RemoveEmptyEntries);
PlotVizPoint p = new PlotVizPoint(Double.parseDouble(splits[1]), Double.parseDouble(splits[2]), Double.parseDouble(splits[3]), Integer.parseInt(splits[0]), Integer.parseInt(splits[4]));
return p;
}
private static CenterInfo ReadCenterLine(String line)
{
char[] sep = new char[] {' ', '\t'};
char[] eqsep = new char[] {'='};
String[] splits = line.split(sep, StringSplitOptions.RemoveEmptyEntries);
int pnum = Integer.parseInt(splits[0].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]);
double measure = Double.parseDouble(splits[1].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]);
int methodIdx = 2;<|fim▁hole|> double count = 0.0;
if (splits[2].startsWith("Count"))
{
methodIdx = 4;
count = Double.parseDouble(splits[2].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]);
source = splits[3].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1];
}
String method = splits[methodIdx].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1];
int group = Integer.parseInt(splits[methodIdx + 1].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]);
String seqName = splits[methodIdx + 2].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1];
for (int i = methodIdx + 3; i < splits.length - 4; ++i)
{
seqName += (" " + splits[i]);
}
int seqLength = Integer.parseInt(splits[splits.length - 4].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]);
return new CenterInfo(pnum, measure, method, group, seqName, seqLength, source, count);
}
private static java.util.Hashtable ProcessCenterFile(String centerFile)
{
//C# TO JAVA CONVERTER NOTE: The following 'using' block is replaced by its Java equivalent:
// using (StreamReader reader = new StreamReader(centerFile))
StreamReader reader = new StreamReader(centerFile);
try
{
java.util.Hashtable groupTable = new java.util.Hashtable();
while (!reader.EndOfStream)
{
CenterInfo cp = ReadCenterLine(reader.ReadLine());
AddToGroupTable(groupTable, cp);
}
return groupTable;
}
finally
{
reader.dispose();
}
}
private static void AddToGroupTable(java.util.Hashtable groupTable, CenterInfo cp)
{
if (groupTable.containsKey(cp.getCluster()))
{
java.util.Hashtable methodTable = (java.util.Hashtable)groupTable.get(cp.getCluster());
if (methodTable.containsKey(cp.getMethod()))
{
// Need a list to maintain the order of points
java.util.ArrayList<CenterInfo> cps = (java.util.ArrayList<CenterInfo>)methodTable.get(cp.getMethod());
cps.add(cp);
}
else
{
// Need a list to maintain the order of points
java.util.ArrayList<CenterInfo> cps = new java.util.ArrayList<CenterInfo>(java.util.Arrays.asList(new CenterInfo[] {cp}));
methodTable.put(cp.getMethod(), cps);
}
}
else
{
// Need a list to maintain the order of points
java.util.ArrayList<CenterInfo> cps = new java.util.ArrayList<CenterInfo>(java.util.Arrays.asList(new CenterInfo[] {cp}));
java.util.Hashtable methodTable = new java.util.Hashtable();
methodTable.put(cp.getMethod(), cps);
groupTable.put(cp.getCluster(), methodTable);
}
}
private static XElement CreatePlotElement(String name, boolean glyphVisible)
{
XElement plot = new XElement("plot", new XElement("title", name), new XElement("pointsize", 1), new XElement("glyph", new XElement("visible", glyphVisible ? 1 : 0), new XElement("scale", 1)), new XElement("camera", new XElement("focumode", 0), new XElement("focus", new XAttribute("x", 0), new XAttribute("y", 0), new XAttribute("z", 0))));
return plot;
}
private static XElement CreateClusterElement(int key, String label, Color color, boolean isDefault, double size, int shape)
{
XElement cluster = new XElement("cluster", new XElement("key", key), new XElement("label", label), new XElement("visible", 1), new XElement("default", isDefault ? 1 : 0), new XElement("color", new XAttribute("r", color.R), new XAttribute("g", color.G), new XAttribute("b", color.B), new XAttribute("a", color.A)), new XElement("size", size), new XElement("shape", shape));
return cluster;
}
private static XElement CreatePointElement(int key, int clusterKey, String label, double x, double y, double z)
{
XElement point = new XElement("point", new XElement("key", key), new XElement("clusterkey", clusterKey), new XElement("label", label), new XElement("location", new XAttribute("x", x), new XAttribute("y", y), new XAttribute("z", z)));
return point;
}
//C# TO JAVA CONVERTER WARNING: Java does not allow user-defined value types. The behavior of this class will differ from the original:
//ORIGINAL LINE: struct Glyphs
private final static class Glyphs
{
public static int Triangle2D = 0;
public static int Rectangle2D = 1;
public static int Pentagon2D = 2;
public static int Hexagon2D = 3;
public static int Tetrahedron3D = 4;
public static int Cube3D = 5;
public static int Sphere3D = 6;
public static int Cylinder3D = 7;
}*/
}<|fim▁end|> | String source = ""; |
<|file_name|>test_idct.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/<|fim▁hole|>from essentia_test import *
class TestIDCT(TestCase):
def testInvalidParam(self):
self.assertConfigureFails(IDCT(), { 'inputSize': 0, 'outputSize': 2 })
self.assertConfigureFails(IDCT(), { 'inputSize': 6, 'outputSize': 0 })
def testRegression(self):
# values from Matlab/Octave
inputArray = [ 0.89442718, -0.60150099, -0.12078822, -0.37174806, 0.82789522]
expected = [ 0, 0, 1, 0, 1 ]
self.assertAlmostEqualVector(IDCT(outputSize=len(expected), inputSize = len(inputArray))(inputArray), expected, 1e-6)
def testLifteringRegression(self):
# DCT III and Liftening computed using PLP and RASTA matlab toolbox.
# A big tolerance is necessary due to the smoothing caused by the smaller amount of bins in the DCT domain.
inputArray = [ 1.89736652, 0.95370573, 3.39358997, -3.35009956]
expected = [1, 1, 0, 0, 1]
self.assertAlmostEqualVector(IDCT(inputSize=len(inputArray),
outputSize=len(expected),
dctType = 3,
liftering = 22)(inputArray), expected, 1e0)
def testZero(self):
self.assertEqualVector(IDCT(outputSize=10)(zeros(5)), zeros(10))
def testInvalidInput(self):
self.assertComputeFails(IDCT(), []) # = testEmpty
self.assertComputeFails(IDCT(outputSize = 2, inputSize = 1), [ 0, 2, 4 ])
suite = allTests(TestIDCT)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)<|fim▁end|> | |
<|file_name|>scrollable.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license<|fim▁hole|>import {
getRtlScrollAxisType,
RtlScrollAxisType,
supportsScrollBehavior,
} from '@angular/cdk/platform';
import {Directive, ElementRef, NgZone, OnDestroy, OnInit, Optional} from '@angular/core';
import {fromEvent, Observable, Subject, Observer} from 'rxjs';
import {takeUntil} from 'rxjs/operators';
import {ScrollDispatcher} from './scroll-dispatcher';
export type _Without<T> = {[P in keyof T]?: never};
export type _XOR<T, U> = (_Without<T> & U) | (_Without<U> & T);
export type _Top = {top?: number};
export type _Bottom = {bottom?: number};
export type _Left = {left?: number};
export type _Right = {right?: number};
export type _Start = {start?: number};
export type _End = {end?: number};
export type _XAxis = _XOR<_XOR<_Left, _Right>, _XOR<_Start, _End>>;
export type _YAxis = _XOR<_Top, _Bottom>;
/**
* An extended version of ScrollToOptions that allows expressing scroll offsets relative to the
* top, bottom, left, right, start, or end of the viewport rather than just the top and left.
* Please note: the top and bottom properties are mutually exclusive, as are the left, right,
* start, and end properties.
*/
export type ExtendedScrollToOptions = _XAxis & _YAxis & ScrollOptions;
/**
* Sends an event when the directive's element is scrolled. Registers itself with the
* ScrollDispatcher service to include itself as part of its collection of scrolling events that it
* can be listened to through the service.
*/
@Directive({
selector: '[cdk-scrollable], [cdkScrollable]',
})
export class CdkScrollable implements OnInit, OnDestroy {
private readonly _destroyed = new Subject<void>();
private _elementScrolled: Observable<Event> = new Observable((observer: Observer<Event>) =>
this.ngZone.runOutsideAngular(() =>
fromEvent(this.elementRef.nativeElement, 'scroll')
.pipe(takeUntil(this._destroyed))
.subscribe(observer),
),
);
constructor(
protected elementRef: ElementRef<HTMLElement>,
protected scrollDispatcher: ScrollDispatcher,
protected ngZone: NgZone,
@Optional() protected dir?: Directionality,
) {}
ngOnInit() {
this.scrollDispatcher.register(this);
}
ngOnDestroy() {
this.scrollDispatcher.deregister(this);
this._destroyed.next();
this._destroyed.complete();
}
/** Returns observable that emits when a scroll event is fired on the host element. */
elementScrolled(): Observable<Event> {
return this._elementScrolled;
}
/** Gets the ElementRef for the viewport. */
getElementRef(): ElementRef<HTMLElement> {
return this.elementRef;
}
/**
* Scrolls to the specified offsets. This is a normalized version of the browser's native scrollTo
* method, since browsers are not consistent about what scrollLeft means in RTL. For this method
* left and right always refer to the left and right side of the scrolling container irrespective
* of the layout direction. start and end refer to left and right in an LTR context and vice-versa
* in an RTL context.
* @param options specified the offsets to scroll to.
*/
scrollTo(options: ExtendedScrollToOptions): void {
const el = this.elementRef.nativeElement;
const isRtl = this.dir && this.dir.value == 'rtl';
// Rewrite start & end offsets as right or left offsets.
if (options.left == null) {
options.left = isRtl ? options.end : options.start;
}
if (options.right == null) {
options.right = isRtl ? options.start : options.end;
}
// Rewrite the bottom offset as a top offset.
if (options.bottom != null) {
(options as _Without<_Bottom> & _Top).top =
el.scrollHeight - el.clientHeight - options.bottom;
}
// Rewrite the right offset as a left offset.
if (isRtl && getRtlScrollAxisType() != RtlScrollAxisType.NORMAL) {
if (options.left != null) {
(options as _Without<_Left> & _Right).right =
el.scrollWidth - el.clientWidth - options.left;
}
if (getRtlScrollAxisType() == RtlScrollAxisType.INVERTED) {
options.left = options.right;
} else if (getRtlScrollAxisType() == RtlScrollAxisType.NEGATED) {
options.left = options.right ? -options.right : options.right;
}
} else {
if (options.right != null) {
(options as _Without<_Right> & _Left).left =
el.scrollWidth - el.clientWidth - options.right;
}
}
this._applyScrollToOptions(options);
}
private _applyScrollToOptions(options: ScrollToOptions): void {
const el = this.elementRef.nativeElement;
if (supportsScrollBehavior()) {
el.scrollTo(options);
} else {
if (options.top != null) {
el.scrollTop = options.top;
}
if (options.left != null) {
el.scrollLeft = options.left;
}
}
}
/**
* Measures the scroll offset relative to the specified edge of the viewport. This method can be
* used instead of directly checking scrollLeft or scrollTop, since browsers are not consistent
* about what scrollLeft means in RTL. The values returned by this method are normalized such that
* left and right always refer to the left and right side of the scrolling container irrespective
* of the layout direction. start and end refer to left and right in an LTR context and vice-versa
* in an RTL context.
* @param from The edge to measure from.
*/
measureScrollOffset(from: 'top' | 'left' | 'right' | 'bottom' | 'start' | 'end'): number {
const LEFT = 'left';
const RIGHT = 'right';
const el = this.elementRef.nativeElement;
if (from == 'top') {
return el.scrollTop;
}
if (from == 'bottom') {
return el.scrollHeight - el.clientHeight - el.scrollTop;
}
// Rewrite start & end as left or right offsets.
const isRtl = this.dir && this.dir.value == 'rtl';
if (from == 'start') {
from = isRtl ? RIGHT : LEFT;
} else if (from == 'end') {
from = isRtl ? LEFT : RIGHT;
}
if (isRtl && getRtlScrollAxisType() == RtlScrollAxisType.INVERTED) {
// For INVERTED, scrollLeft is (scrollWidth - clientWidth) when scrolled all the way left and
// 0 when scrolled all the way right.
if (from == LEFT) {
return el.scrollWidth - el.clientWidth - el.scrollLeft;
} else {
return el.scrollLeft;
}
} else if (isRtl && getRtlScrollAxisType() == RtlScrollAxisType.NEGATED) {
// For NEGATED, scrollLeft is -(scrollWidth - clientWidth) when scrolled all the way left and
// 0 when scrolled all the way right.
if (from == LEFT) {
return el.scrollLeft + el.scrollWidth - el.clientWidth;
} else {
return -el.scrollLeft;
}
} else {
// For NORMAL, as well as non-RTL contexts, scrollLeft is 0 when scrolled all the way left and
// (scrollWidth - clientWidth) when scrolled all the way right.
if (from == LEFT) {
return el.scrollLeft;
} else {
return el.scrollWidth - el.clientWidth - el.scrollLeft;
}
}
}
}<|fim▁end|> | */
import {Directionality} from '@angular/cdk/bidi'; |
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>export function pluralize(count, word) {
return count === 1 ? word : word + 's';
}<|fim▁hole|> // based on https://github.com/JedWatson/classnames
let classes = '';
args.forEach(arg => {
if (arg) {
const argType = typeof arg;
if (argType === 'string' || argType === 'number') {
classes += ' ' + arg;
} else if (Array.isArray(arg)) {
classes += ' ' + classNames(...arg);
} else if (argType === 'object') {
Object.keys(arg).forEach(key => {
if (arg[key]) {
classes += ' ' + key;
}
});
}
}
});
return classes.substr(1);
}
export function uuid() {
let i, random;
let uuid = '';
for (i = 0; i < 32; i++) {
random = Math.random() * 16 | 0;
if (i === 8 || i === 12 || i === 16 || i === 20) {
uuid += '-';
}
uuid += (i === 12 ? 4 : (i === 16 ? (random & 3 | 8) : random))
.toString(16);
}
return uuid;
}<|fim▁end|> |
export function classNames(...args) { |
<|file_name|>accountEditor_en.js<|end_file_name|><|fim▁begin|>Brick.util.Language.add('en',{'mod': {'{C#MODNAME}':{
'accounteditor': {
'widget': {
'1': 'Add account'
},
'editor': {
'1': 'Edit account',
'2': 'Create account',
'3': 'Save',
'4': 'Cancel',
'5': 'Close'
},
'row': {
'1': 'Remove this account',
'2': 'Remove',<|fim▁hole|> '7': 'Currency',
'8': 'Member account'
}
}
}}});<|fim▁end|> | '3': 'Account type',
'4': 'Title',
'5': 'Remark',
'6': 'Opening balance', |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 by SiegeLord
//
// All rights reserved. Distributed under ZLib. For full terms see the file LICENSE.
use std::env::var;
fn main()
{<|fim▁hole|> if var("CARGO_FEATURE_LINK_NONE").is_ok()
{
return;
}
let debug = match var("CARGO_FEATURE_LINK_DEBUG")
{
Err(_) => "",
Ok(_) => "-debug"
};
let static_ = match var("CARGO_FEATURE_LINK_STATIC")
{
Err(_) => "",
Ok(_) => "-static"
};
println!("cargo:rustc-flags=-l allegro_ttf{}{}", static_, debug);
}<|fim▁end|> | |
<|file_name|>xcrate-issue-43189-a.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>
pub trait A {
fn a(&self) {}
}
impl A for () {}<|fim▁end|> | #![crate_type="lib"] |
<|file_name|>project.ts<|end_file_name|><|fim▁begin|>import { Language } from './language.ts';
import { ProjectType } from './projecttype.ts';
export class Project {
id_question_project: number;
description: string;
name: string;
active: number ;<|fim▁hole|> end_date: string;
end_text: string;
footer_message: string;
header_message: string;
id_language: number ;
id_question_project_type: number ;
language: Language[];
question_project_type: ProjectType[];
start_date: string;
update_date: string;
user_id: number ;
welcome_text: string;
}<|fim▁end|> | create_date: string; |
<|file_name|>set-uuid.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# requirements: partition /dev/sdc1 with swap
from storage import *
from storageitu import *
<|fim▁hole|>environment = Environment(False)
storage = Storage(environment)
storage.probe()
staging = storage.get_staging()
print(staging)
sdc1 = BlkDevice.find_by_name(staging, "/dev/sdc1")
blk_filesystem = sdc1.get_blk_filesystem()
blk_filesystem.set_uuid("7420b069-cd50-464e-b0b2-66c1fdc75bcd")
print(staging)
commit(storage)<|fim▁end|> |
set_logger(get_logfile_logger())
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#coding: latin1
label = 'Prototyp einer Homepage für den V.O.R.'
def populate(db):
import vor1<|fim▁hole|><|fim▁end|> | vor1.populate(db) |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
import sys
test_suite = "tests"
tests_require = ["mongo-orchestration >= 0.2, < 0.4", "requests >= 2.5.1"]
if sys.version_info[:2] == (2, 6):
# Need unittest2 to run unittests in Python 2.6
tests_require.append("unittest2")
test_suite = "unittest2.collector"
try:
with open("README.rst", "r") as fd:
long_description = fd.read()
except IOError:
long_description = None # Install without README.rst
setup(name='hzkgelastic2-doc-manager',
version='0.2.1.dev0',
maintainer='mongodb',
description='Elastic2 plugin for mongo-connector',
long_description=long_description,<|fim▁hole|> install_requires=['mongo-connector >= 2.3.0', "elasticsearch>=2.0.0,<3.0.0"],
packages=["mongo_connector", "mongo_connector.doc_managers"],
extras_require={'aws': ['boto3 >= 1.4.0', 'requests-aws-sign >= 0.1.1']},
license="Apache License, Version 2.0",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Database",
"Topic :: Software Development :: Libraries :: Python Modules",
"Operating System :: Unix",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX"
],
keywords=['mongo-connector', "mongodb", "elastic", "elasticsearch"],
test_suite=test_suite,
tests_require=tests_require
)<|fim▁end|> | platforms=['any'],
author='anna herlihy',
author_email='[email protected]',
url='https://github.com/mongodb-labs/hzkgelastic2-doc-manager', |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import sys
from . import (
utils, env, defs, context, layers, parser, preprocessor, loader, analyzer,
generator)
LAYERS = (
(parser.Parser, "parse"),
(preprocessor.Preprocessor, "transform_ast"),
(loader.Loader, "expand_ast"),
(analyzer.Analyzer, "expand_ast"),
(generator.Generator, "expand_ast")
)
def _get_context_args_from_settings(string, settings):
return {
"main_file_hash": utils.get_string_hash(string),
"main_file_name": settings["main_file_name"],
"module_paths": settings["module_paths"],
"loaded_modules": settings["loaded_modules"],
"test_mode_on": settings["test_mode_on"],
"env": env.Env()
}
def _update_context_args():<|fim▁hole|>def compile_string(string, **settings):
context_args = _get_context_args_from_settings(string, settings)
current_ast = string
for layer_cls, method_name in LAYERS:
if settings["stop_before"] == layer_cls:
return current_ast
with context.new_context(**context_args):
layer = layer_cls()
if method_name == "parse":
current_ast = layer.parse(current_ast)
else:
new_ast = getattr(layers, method_name)(
current_ast, registry=layer.get_registry())
if new_ast is not None:
current_ast = list(new_ast)
if settings["stop_after"] == layer_cls:
return current_ast
context_args = _update_context_args()
return "\n".join(current_ast)
#return current_ast
def compile_file(in_file, **settings):
result = compile_string(
utils.read_file(in_file),
main_file_name=in_file.split("/")[-1].split(".")[0], **settings)
if settings["print_ast"]:
for node in result:
print(node)
sys.exit(0)
return result<|fim▁end|> | return {**context.modified_context_args(), **{"env": env.Env()}}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.