prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>NodeContent.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _reactAddonsShallowCompare = require('react-addons-shallow-compare');
var _reactAddonsShallowCompare2 = _interopRequireDefault(_reactAddonsShallowCompare);
var _path = require('path');
var _path2 = _interopRequireDefault(_path);
var _NodeCaret = require('./NodeCaret');
var _NodeCaret2 = _interopRequireDefault(_NodeCaret);
var _styles = require('./styles');
var _styles2 = _interopRequireDefault(_styles);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var isDirectory = function isDirectory(type) {
return type === 'directory';
};
var Node = function (_Component) {
_inherits(Node, _Component);
function Node() {
_classCallCheck(this, Node);
var _this = _possibleConstructorReturn(this, Object.getPrototypeOf(Node).call(this));
_this.state = {};
return _this;
}
// shouldComponentUpdate(nextProps, nextState, nextContext) {<|fim▁hole|> //
// // console.log('update', shouldUpdate, nextProps.node.path)
//
// return shouldUpdate
// }
_createClass(Node, [{
key: 'render',
value: function render() {
var _this2 = this;
var _props = this.props;
var node = _props.node;
var metadata = _props.metadata;
var depth = _props.depth;
var type = node.type;
var name = node.name;
var path = node.path;
var expanded = metadata.expanded;
var selected = metadata.selected;
var hover = this.state.hover;
return _react2.default.createElement(
'div',
{ style: (0, _styles.getPaddedStyle)(depth, selected, hover),
onMouseEnter: function onMouseEnter() {
return _this2.setState({ hover: true });
},
onMouseLeave: function onMouseLeave() {
return _this2.setState({ hover: false });
}
},
isDirectory(type) && _react2.default.createElement(_NodeCaret2.default, {
expanded: expanded
}),
_react2.default.createElement(
'div',
{ style: _styles2.default.nodeText },
name
)
);
}
}]);
return Node;
}(_react.Component);
exports.default = Node;<|fim▁end|> | // const shouldUpdate = shallowCompare(this, nextProps, nextState) |
<|file_name|>actions_path_test.go<|end_file_name|><|fim▁begin|>package horizon
import (
"testing"
. "github.com/smartystreets/goconvey/convey"
"github.com/stellar/horizon/test"
)
func TestPathActions(t *testing.T) {
test.LoadScenario("paths")
app := NewTestApp()
defer app.Close()
rh := NewRequestHelper(app)
Convey("Path Actions:", t, func() {
Convey("(no query args): GET /paths", func() {
w := rh.Get("/paths", test.RequestHelperNoop)
t.Log(w.Body.String())
So(w.Code, ShouldEqual, 400)
})
Convey("(happy path): GET /paths?{all args}", func() {
qs := "?destination_account=GAEDTJ4PPEFVW5XV2S7LUXBEHNQMX5Q2GM562RJGOQG7GVCE5H3HIB4V" +
"&source_account=GARSFJNXJIHO6ULUBK3DBYKVSIZE7SC72S5DYBCHU7DKL22UXKVD7MXP" +
"&destination_asset_type=credit_alphanum4" +
"&destination_asset_code=EUR" +
"&destination_asset_issuer=GDSBCQO34HWPGUGQSP3QBFEXVTSR2PW46UIGTHVWGWJGQKH3AFNHXHXN" +<|fim▁hole|>
w := rh.Get("/paths"+qs, test.RequestHelperNoop)
So(w.Code, ShouldEqual, 200)
t.Log(qs)
t.Log(w.Body.String())
So(w.Body, ShouldBePageOf, 3)
})
})
}<|fim▁end|> | "&destination_amount=10" |
<|file_name|>killmail.go<|end_file_name|><|fim▁begin|>package hammer
import (
"context"
"log"
"github.com/antihax/evedata/internal/datapackages"
)
func init() {
registerConsumer("killmail", killmailConsumer)
}
func killmailConsumer(s *Hammer, parameter interface{}) {
parameters := parameter.([]interface{})
hash := parameters[0].(string)
id := int32(parameters[1].(int))
known := s.inQueue.CheckWorkCompleted("evedata_known_kills", id)
if known {<|fim▁hole|> }
kill, _, err := s.esi.ESI.KillmailsApi.GetKillmailsKillmailIdKillmailHash(context.Background(), hash, id, nil)
if err != nil {
log.Println(err)
return
}
s.inQueue.SetWorkCompleted("evedata_known_kills", id)
if err != nil {
log.Println(err)
return
}
// Send out the result, but ignore DUST stuff.
if kill.Victim.ShipTypeId < 65535 {
err = s.QueueResult(&datapackages.Killmail{Hash: hash, Kill: kill}, "killmail")
if err != nil {
log.Println(err)
return
}
}
err = s.AddCharacter(kill.Victim.CharacterId)
if err != nil {
log.Println(err)
return
}
err = s.AddAlliance(kill.Victim.AllianceId)
if err != nil {
log.Println(err)
return
}
err = s.AddCorporation(kill.Victim.CorporationId)
if err != nil {
log.Println(err)
return
}
for _, a := range kill.Attackers {
err = s.AddCharacter(a.CharacterId)
if err != nil {
log.Println(err)
return
}
err = s.AddAlliance(a.AllianceId)
if err != nil {
log.Println(err)
return
}
err = s.AddCorporation(a.CorporationId)
if err != nil {
log.Println(err)
return
}
}
}<|fim▁end|> | return |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for sticky-cluster 0.3
// Project: https://github.com/uqee/sticky-cluster
// Definitions by: Austin Turner <https://github.com/paustint>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
/// <reference types="node"/>
import * as http from 'http';
declare namespace stickyCluster {
type InitializeFn = (callback: Callback) => void;
type Callback = (server: http.Server) => void;
interface Options {<|fim▁hole|> debug?: boolean;
prefix?: string;
env?: (index: number) => { stickycluster_worker_index: number };
hardShutdownDelay?: number;
errorHandler?: (err: any) => void;
}
}
declare function stickyCluster(callback: stickyCluster.InitializeFn, options?: stickyCluster.Options): void;
export = stickyCluster;<|fim▁end|> | concurrency?: number;
port?: number; |
<|file_name|>thread_pool.rs<|end_file_name|><|fim▁begin|>// Copyright 2020, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <https://opensource.org/licenses/MIT>
use glib_sys;
use translate::*;
use futures_channel::oneshot;
use std::future::Future;
use std::ptr;
<|fim▁hole|>unsafe impl Send for ThreadPool {}
unsafe impl Sync for ThreadPool {}
impl ThreadPool {
pub fn new_shared(max_threads: Option<u32>) -> Result<Self, ::Error> {
unsafe {
let mut err = ptr::null_mut();
let pool = glib_sys::g_thread_pool_new(
Some(spawn_func),
ptr::null_mut(),
max_threads.map(|v| v as i32).unwrap_or(-1),
glib_sys::GFALSE,
&mut err,
);
if pool.is_null() {
Err(from_glib_full(err))
} else {
Ok(ThreadPool(ptr::NonNull::new_unchecked(pool)))
}
}
}
pub fn new_exclusive(max_threads: u32) -> Result<Self, ::Error> {
unsafe {
let mut err = ptr::null_mut();
let pool = glib_sys::g_thread_pool_new(
Some(spawn_func),
ptr::null_mut(),
max_threads as i32,
glib_sys::GTRUE,
&mut err,
);
if pool.is_null() {
Err(from_glib_full(err))
} else {
Ok(ThreadPool(ptr::NonNull::new_unchecked(pool)))
}
}
}
pub fn push<F: FnOnce() + Send + 'static>(&self, func: F) -> Result<(), ::Error> {
unsafe {
let func: Box<dyn FnOnce() + Send + 'static> = Box::new(func);
let func = Box::new(func);
let mut err = ptr::null_mut();
let func = Box::into_raw(func);
let ret: bool = from_glib(glib_sys::g_thread_pool_push(
self.0.as_ptr(),
func as *mut _,
&mut err,
));
if ret {
Ok(())
} else {
let _ = Box::from_raw(func);
Err(from_glib_full(err))
}
}
}
pub fn push_future<T: Send + 'static, F: FnOnce() -> T + Send + 'static>(
&self,
func: F,
) -> Result<impl Future<Output = T>, ::Error> {
use futures_util::future::FutureExt;
let (sender, receiver) = oneshot::channel();
self.push(move || {
let _ = sender.send(func());
})?;
Ok(receiver.map(|res| res.expect("Dropped before executing")))
}
pub fn set_max_threads(&self, max_threads: Option<u32>) -> Result<(), ::Error> {
unsafe {
let mut err = ptr::null_mut();
let ret: bool = from_glib(glib_sys::g_thread_pool_set_max_threads(
self.0.as_ptr(),
max_threads.map(|v| v as i32).unwrap_or(-1),
&mut err,
));
if ret {
Ok(())
} else {
Err(from_glib_full(err))
}
}
}
pub fn get_max_threads(&self) -> Option<u32> {
unsafe {
let max_threads = glib_sys::g_thread_pool_get_max_threads(self.0.as_ptr());
if max_threads == -1 {
None
} else {
Some(max_threads as u32)
}
}
}
pub fn get_num_threads(&self) -> u32 {
unsafe { glib_sys::g_thread_pool_get_num_threads(self.0.as_ptr()) }
}
pub fn get_unprocessed(&self) -> u32 {
unsafe { glib_sys::g_thread_pool_unprocessed(self.0.as_ptr()) }
}
pub fn set_max_unused_threads(max_threads: Option<u32>) {
unsafe {
glib_sys::g_thread_pool_set_max_unused_threads(
max_threads.map(|v| v as i32).unwrap_or(-1),
)
}
}
pub fn get_max_unused_threads() -> Option<u32> {
unsafe {
let max_unused_threads = glib_sys::g_thread_pool_get_max_unused_threads();
if max_unused_threads == -1 {
None
} else {
Some(max_unused_threads as u32)
}
}
}
pub fn get_num_unused_threads() -> u32 {
unsafe { glib_sys::g_thread_pool_get_num_unused_threads() }
}
pub fn stop_unused_threads() {
unsafe {
glib_sys::g_thread_pool_stop_unused_threads();
}
}
pub fn set_max_idle_time(max_idle_time: u32) {
unsafe { glib_sys::g_thread_pool_set_max_idle_time(max_idle_time) }
}
pub fn get_max_idle_time() -> u32 {
unsafe { glib_sys::g_thread_pool_get_max_idle_time() }
}
}
impl Drop for ThreadPool {
fn drop(&mut self) {
unsafe {
glib_sys::g_thread_pool_free(self.0.as_ptr(), glib_sys::GFALSE, glib_sys::GTRUE);
}
}
}
unsafe extern "C" fn spawn_func(func: glib_sys::gpointer, _data: glib_sys::gpointer) {
let func: Box<Box<dyn FnOnce()>> = Box::from_raw(func as *mut _);
func()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_push() {
use std::sync::mpsc;
let p = ThreadPool::new_exclusive(1).unwrap();
let (sender, receiver) = mpsc::channel();
p.push(move || {
sender.send(true).unwrap();
})
.unwrap();
assert_eq!(receiver.recv(), Ok(true));
}
#[test]
fn test_push_future() {
let c = ::MainContext::new();
let p = ThreadPool::new_shared(None).unwrap();
let fut = p.push_future(|| true).unwrap();
let res = c.block_on(fut);
assert!(res);
}
}<|fim▁end|> | #[derive(Debug)]
pub struct ThreadPool(ptr::NonNull<glib_sys::GThreadPool>);
|
<|file_name|>HeapLongBuffer.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2014 The Android Open Source Project
* Copyright (c) 2000, 2008, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.nio;
/**
* A read/write HeapLongBuffer.
*/
class HeapLongBuffer
extends LongBuffer {
// For speed these fields are actually declared in X-Buffer;
// these declarations are here as documentation
/*
protected final long[] hb;
protected final int offset;
*/
HeapLongBuffer(int cap, int lim) { // package-private
this(cap, lim, false);
}
HeapLongBuffer(int cap, int lim, boolean isReadOnly) { // package-private
super(-1, 0, lim, cap, new long[cap], 0);
this.isReadOnly = isReadOnly;
}
HeapLongBuffer(long[] buf, int off, int len) { // package-private
this(buf, off, len, false);
}
HeapLongBuffer(long[] buf, int off, int len, boolean isReadOnly) { // package-private
super(-1, off, off + len, buf.length, buf, 0);
this.isReadOnly = isReadOnly;
}
protected HeapLongBuffer(long[] buf,
int mark, int pos, int lim, int cap,
int off) {
this(buf, mark, pos, lim, cap, off, false);
}
protected HeapLongBuffer(long[] buf,
int mark, int pos, int lim, int cap,
int off, boolean isReadOnly) {
super(mark, pos, lim, cap, buf, off);
this.isReadOnly = isReadOnly;
}<|fim▁hole|> return new HeapLongBuffer(hb,
-1,
0,
this.remaining(),
this.remaining(),
this.position() + offset,
isReadOnly);
}
public LongBuffer duplicate() {
return new HeapLongBuffer(hb,
this.markValue(),
this.position(),
this.limit(),
this.capacity(),
offset,
isReadOnly);
}
public LongBuffer asReadOnlyBuffer() {
return new HeapLongBuffer(hb,
this.markValue(),
this.position(),
this.limit(),
this.capacity(),
offset, true);
}
protected int ix(int i) {
return i + offset;
}
public long get() {
return hb[ix(nextGetIndex())];
}
public long get(int i) {
return hb[ix(checkIndex(i))];
}
public LongBuffer get(long[] dst, int offset, int length) {
checkBounds(offset, length, dst.length);
if (length > remaining())
throw new BufferUnderflowException();
System.arraycopy(hb, ix(position()), dst, offset, length);
position(position() + length);
return this;
}
public boolean isDirect() {
return false;
}
public boolean isReadOnly() {
return isReadOnly;
}
public LongBuffer put(long x) {
if (isReadOnly) {
throw new ReadOnlyBufferException();
}
hb[ix(nextPutIndex())] = x;
return this;
}
public LongBuffer put(int i, long x) {
if (isReadOnly) {
throw new ReadOnlyBufferException();
}
hb[ix(checkIndex(i))] = x;
return this;
}
public LongBuffer put(long[] src, int offset, int length) {
if (isReadOnly) {
throw new ReadOnlyBufferException();
}
checkBounds(offset, length, src.length);
if (length > remaining())
throw new BufferOverflowException();
System.arraycopy(src, offset, hb, ix(position()), length);
position(position() + length);
return this;
}
public LongBuffer put(LongBuffer src) {
if (isReadOnly) {
throw new ReadOnlyBufferException();
}
if (src instanceof HeapLongBuffer) {
if (src == this)
throw new IllegalArgumentException();
HeapLongBuffer sb = (HeapLongBuffer) src;
int n = sb.remaining();
if (n > remaining())
throw new BufferOverflowException();
System.arraycopy(sb.hb, sb.ix(sb.position()),
hb, ix(position()), n);
sb.position(sb.position() + n);
position(position() + n);
} else if (src.isDirect()) {
int n = src.remaining();
if (n > remaining())
throw new BufferOverflowException();
src.get(hb, ix(position()), n);
position(position() + n);
} else {
super.put(src);
}
return this;
}
public LongBuffer compact() {
if (isReadOnly) {
throw new ReadOnlyBufferException();
}
System.arraycopy(hb, ix(position()), hb, ix(0), remaining());
position(remaining());
limit(capacity());
discardMark();
return this;
}
public ByteOrder order() {
return ByteOrder.nativeOrder();
}
}<|fim▁end|> |
public LongBuffer slice() { |
<|file_name|>package.js<|end_file_name|><|fim▁begin|>require("../pc.v0");
require("util").puts(JSON.stringify({
"name": "pc",
"version": pc.version,
"description": "property creation for reusable d3.js code.",
"keywords": ["d3", "visualization"],
"homepage": "http://milroc.github.com/pc/",
"author": {"name": "Miles McCrocklin", "url": "http://www.milesmccrocklin.com" },
"repository": {"type": "git", "url": "http://github.com/milroc/pc.git"},
"devDependencies": {
"uglify-js": "1.2.6",<|fim▁hole|>}, null, 2));<|fim▁end|> | "vows": "0.6.0"
} |
<|file_name|>cmediaserverdelegate.cpp<|end_file_name|><|fim▁begin|>#include <vector>
#include <fstream>
#include <logger.h>
#include <iupnpdevicedelegate.h>
#include <cupnpservice.h>
#include <crapidxmlhelper.h>
#include "cmediaserverdelegate.h"
#define XSTR(x) #x
#define STR(x) XSTR(x)
#define UPNP_MEDIA_SERVER_DEVICE_TYPE "urn:schemas-upnp-org:device:MediaServer:1"
#define UPNP_MEDIA_SERVER_SERVICE_CDS "urn:schemas-upnp-org:service:ContentDirectory:1"
#define UPNP_MEDIA_SERVER_SERVICE_ID "urn:upnp-org:serviceId:ContentDirectory"
#define RESOURCE_MEDIA_SERVER_CDS_PATH STR(RESOURCE_PATH)"/resources/mediaServerCDS.xml"
#define MEDIA_SERVER_CDS_PATH "/service/mediaServerCDS.xml"
#define MEDIA_SERVER_CDC_PATH "/control/contentdirectory.xml"
//#define RESOURCE_MEDIA_SERVER_ROOT STR(RESOURCE_PATH)"/resources/mediaServerRoot.xml"
static std::map<std::string, CUPnPService *> l_serviceList;
CMediaServerDelegate::CMediaServerDelegate(const std::string &uuid,
const std::string &friendlyName,
const std::string &manufacturer,
const std::string &manufacturerUrl)
: m_uuid(uuid),
m_friendlyName(friendlyName),
m_manufacturer(manufacturer),
m_manufacturerUrl(manufacturerUrl)
{
registerServices();
}
CMediaServerDelegate::~CMediaServerDelegate()
{
auto service_it = l_serviceList.begin();
for(;service_it != l_serviceList.end(); service_it++)
{
delete service_it->second;
}
l_serviceList.clear();
}
const char *CMediaServerDelegate::getDeviceType() const
{
return UPNP_MEDIA_SERVER_DEVICE_TYPE;
}
const char *CMediaServerDelegate::getFriendlyName() const
{
return m_friendlyName.data();
}
const char *CMediaServerDelegate::getManufacturer() const
{
return m_manufacturer.data();
}
const char *CMediaServerDelegate::getManufacturerUrl() const
{
return m_manufacturerUrl.data();
}
const char *CMediaServerDelegate::getUuid() const
{
return m_uuid.data();
}
bool CMediaServerDelegate::onAction(const CUPnPAction &action)
{
return false;
}
std::map<std::string, CUPnPService *> CMediaServerDelegate::getServiceList() const
{
return l_serviceList;
}
bool CMediaServerDelegate::addService(CUPnPService *service)
{
if(l_serviceList.find(service->getType()) == l_serviceList.end())
{
l_serviceList.insert(std::pair<std::string, CUPnPService *>(service->getType(), service));
return true;
}
return false;
}
void CMediaServerDelegate::registerServices()
{
// There are three services in the MediaServer:1
// ContentDirectory:1.0 (required)
// ConnectionManager:1.0 (required)
// AVTransport:1.0 (optional)
registerService(UPNP_MEDIA_SERVER_SERVICE_CDS,
UPNP_MEDIA_SERVER_SERVICE_ID,
MEDIA_SERVER_CDS_PATH,
RESOURCE_MEDIA_SERVER_CDS_PATH,
MEDIA_SERVER_CDC_PATH);
//registerConnectionManager();
//AVTransport();
}
bool CMediaServerDelegate::registerService(const std::string &type,
const std::string &id,
const std::string &scpdServerPath,
const std::string &descrXmlPath,
const std::string &controlPath)
{
std::string xmlContent;
if(loadFile(descrXmlPath, xmlContent))
{
try
{
CUPnPService *service = CUPnPService::create(&(xmlContent)[0]);
if(service)
{
service->setType(type);
service->setId(id);
service->setSCPDPath(scpdServerPath);
service->setControlPath(controlPath);
return addService(service);
}
else
{
LOGGER_ERROR("Error while parsing device services.");
}
}
catch(const rapidxml::parse_error &err)
{
LOGGER_ERROR("XML parse error. what='" << err.what() << "'");
}
}
else<|fim▁hole|>
return false;
}
bool CMediaServerDelegate::loadFile(const std::string &filePath,
std::string &fileContent)
{
std::ifstream istream(filePath);
if(istream.is_open())
{
istream.seekg(0, std::ios::end);
size_t strSize = (1 + istream.tellg());
fileContent.reserve(strSize);
istream.seekg(0, std::ios::beg);
fileContent.assign(std::istreambuf_iterator<char>(istream),
std::istreambuf_iterator<char>());
fileContent[strSize-1] = '\0';
return true;
}
return false;
}<|fim▁end|> | {
LOGGER_ERROR("Error reading file. descrXmlPath=" << descrXmlPath);
} |
<|file_name|>CCashDbWorkerTH.hpp<|end_file_name|><|fim▁begin|>// This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually
#pragma once
#include <common/common.h>
#include <CashDbWorker.hpp>
#include <_param_cash_update.hpp>
START_ATF_NAMESPACE
struct CCashDbWorkerTH : CashDbWorker
{
public:
CCashDbWorkerTH();
void ctor_CCashDbWorkerTH();
void GetUseCashQueryStr(struct _param_cash_update* rParam, int nIdx, char* wszQuery, uint64_t tBufferSize);
~CCashDbWorkerTH();
void dtor_CCashDbWorkerTH();
};<|fim▁hole|><|fim▁end|> | END_ATF_NAMESPACE |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
{
'name': 'Import OFX Bank Statement',
'category': 'Banking addons',
'version': '8.0.1.0.1',
'license': 'AGPL-3',
'author': 'OpenERP SA,'
'Odoo Community Association (OCA)',
'website': 'https://github.com/OCA/bank-statement-import',<|fim▁hole|> 'depends': [
'account_bank_statement_import'
],
'demo': [
'demo/demo_data.xml',
],
'external_dependencies': {
'python': ['ofxparse'],
},
'auto_install': False,
'installable': True,
}<|fim▁end|> | |
<|file_name|>pca9543a.py<|end_file_name|><|fim▁begin|>"""
Created on 19 Nov 2020
@author: Bruno Beloff ([email protected])
Two-Channel I2C-Bus Switch With Interrupt Logic and Reset
https://www.ti.com/product/PCA9543A
"""
<|fim▁hole|>
# --------------------------------------------------------------------------------------------------------------------
class PCA9543A(object):
"""
classdocs
"""
___I2C_ADDR = 0x70
# ----------------------------------------------------------------------------------------------------------------
def __init__(self):
"""
Constructor
"""
self.__addr = self.___I2C_ADDR
# ----------------------------------------------------------------------------------------------------------------
def enable(self, ch0, ch1):
ch0_en = 0x01 if ch0 else 0x00
ch1_en = 0x02 if ch1 else 0x00
ctrl = ch1_en | ch0_en
try:
I2C.Sensors.start_tx(self.__addr)
I2C.Sensors.write(ctrl)
finally:
I2C.Sensors.end_tx()
def read(self):
try:
I2C.Sensors.start_tx(self.__addr)
ctrl = I2C.Sensors.read(1)
finally:
I2C.Sensors.end_tx()
return ctrl
def reset(self):
self.enable(False, False)
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
try:
ctrl = "0x%02x" % self.read()
except OSError:
ctrl = None
return "PCA9543A:{addr:0x%02x, ctrl:%s}" % (self.__addr, ctrl)<|fim▁end|> | from scs_host.bus.i2c import I2C |
<|file_name|>lorem.ts<|end_file_name|><|fim▁begin|>import { LoremIpsum } from 'lorem-ipsum';
/**<|fim▁hole|> */
export const lorem = new LoremIpsum({
sentencesPerParagraph: {
max: 4,
min: 2,
},
wordsPerSentence: {
max: 16,
min: 4,
},
});<|fim▁end|> | * 生成占位符的方法 |
<|file_name|>allprimes.py<|end_file_name|><|fim▁begin|>#print's all prime numbers in a given range limit
_author__ = "Dilipbobby"
<|fim▁hole|>lower = int(input("Enter lower range: "))
upper = int(input("Enter upper range: "))
#condtion to print all prime numbers that are in btw given number limits
for num in range(lower,upper + 1):
if num > 1:
for i in range(2,num):
if (num % i) == 0:
break
else:
print(num)<|fim▁end|> |
#Take the input from the user: |
<|file_name|>ptm_voronoi_cell.cpp<|end_file_name|><|fim▁begin|>// clang-format off
/*
Voro++ Copyright (c) 2008, The Regents of the University of California, through
Lawrence Berkeley National Laboratory (subject to receipt of any required
approvals from the U.S. Dept. of Energy). All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
(1) Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
(2) Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
(3) Neither the name of the University of California, Lawrence Berkeley
National Laboratory, U.S. Dept. of Energy nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You are under no obligation whatsoever to provide any bug fixes, patches, or
upgrades to the features, functionality or performance of the source code
("Enhancements") to anyone; however, if you choose to make your Enhancements
available either publicly, or directly to Lawrence Berkeley National
Laboratory, without imposing a separate written license agreement for such
Enhancements, then you hereby grant the following license: a non-exclusive,
royalty-free perpetual license to install, use, modify, prepare derivative
works, incorporate into other computer software, distribute, and sublicense
such enhancements or derivative works thereof, in binary and source code form.
*/
// Voro++, a 3D cell-based Voronoi library
//
// Author : Chris H. Rycroft (LBL / UC Berkeley)
// Email : [email protected]
// Date : August 30th 2011
//
// Modified by PM Larsen for use in Polyhedral Template Matching
/** \file cell.cc
* \brief Function implementations for the voronoicell and related classes. */
#include <cmath>
#include <cstdio>
#include <cstdlib>
#include "ptm_voronoi_config.h"
#include "ptm_voronoi_cell.h"
namespace ptm_voro {
inline void voro_fatal_error(const char *p,int status) {
fprintf(stderr,"voro++: %s\n",p);
exit(status);
//return -1;//status;
}
/** Constructs a Voronoi cell and sets up the initial memory. */
voronoicell_base::voronoicell_base() :
current_vertices(init_vertices), current_vertex_order(init_vertex_order),
current_delete_size(init_delete_size), current_delete2_size(init_delete2_size),
ed(new int*[current_vertices]), nu(new int[current_vertices]),
pts(new double[3*current_vertices]), mem(new int[current_vertex_order]),
mec(new int[current_vertex_order]), mep(new int*[current_vertex_order]),
ds(new int[current_delete_size]), stacke(ds+current_delete_size),
ds2(new int[current_delete2_size]), stacke2(ds2+current_delete_size),
current_marginal(init_marginal), marg(new int[current_marginal]) {
int i;
for (i=0;i<3;i++) {
mem[i]=init_n_vertices;mec[i]=0;
mep[i]=new int[init_n_vertices*((i<<1)+1)];
}
mem[3]=init_3_vertices;mec[3]=0;
mep[3]=new int[init_3_vertices*7];
for (i=4;i<current_vertex_order;i++) {
mem[i]=init_n_vertices;mec[i]=0;
mep[i]=new int[init_n_vertices*((i<<1)+1)];
}
}
/** The voronoicell destructor deallocates all the dynamic memory. */
voronoicell_base::~voronoicell_base() {
for (int i=current_vertex_order-1;i>=0;i--) if (mem[i]>0) delete [] mep[i];
delete [] marg;
delete [] ds2;delete [] ds;
delete [] mep;delete [] mec;
delete [] mem;delete [] pts;
delete [] nu;delete [] ed;
}
/** Ensures that enough memory is allocated prior to carrying out a copy.
* \param[in] vc a reference to the specialized version of the calling class.
* \param[in] vb a pointered to the class to be copied. */
template<class vc_class>
void voronoicell_base::check_memory_for_copy(vc_class &vc,voronoicell_base* vb) {
while (current_vertex_order<vb->current_vertex_order) add_memory_vorder(vc);
for (int i=0;i<current_vertex_order;i++) while (mem[i]<vb->mec[i]) add_memory(vc,i,ds2);
while (current_vertices<vb->p) add_memory_vertices(vc);
}
/** Increases the memory storage for a particular vertex order, by increasing
* the size of the of the corresponding mep array. If the arrays already exist,
* their size is doubled; if they don't exist, then new ones of size
* init_n_vertices are allocated. The routine also ensures that the pointers in
* the ed array are updated, by making use of the back pointers. For the cases
* where the back pointer has been temporarily overwritten in the marginal
* vertex code, the auxiliary delete stack is scanned to find out how to update
* the ed value. If the template has been instantiated with the neighbor
* tracking turned on, then the routine also reallocates the corresponding mne
* array.
* \param[in] i the order of the vertex memory to be increased. */
template<class vc_class>
void voronoicell_base::add_memory(vc_class &vc,int i,int *stackp2) {
int s=(i<<1)+1;
if (mem[i]==0) {
vc.n_allocate(i,init_n_vertices);
mep[i]=new int[init_n_vertices*s];
mem[i]=init_n_vertices;
#if VOROPP_VERBOSE >=2
fprintf(stderr,"Order %d vertex memory created\n",i);
#endif
} else {
int j=0,k,*l;
mem[i]<<=1;
if (mem[i]>max_n_vertices) voro_fatal_error("Point memory allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR);
#if VOROPP_VERBOSE >=2
fprintf(stderr,"Order %d vertex memory scaled up to %d\n",i,mem[i]);
#endif
l=new int[s*mem[i]];
int m=0;
vc.n_allocate_aux1(i);
while (j<s*mec[i]) {
k=mep[i][j+(i<<1)];
if (k>=0) {
ed[k]=l+j;
vc.n_set_to_aux1_offset(k,m);
} else {
int *dsp;
for (dsp=ds2;dsp<stackp2;dsp++) {
if (ed[*dsp]==mep[i]+j) {
ed[*dsp]=l+j;
vc.n_set_to_aux1_offset(*dsp,m);
break;
}
}
if (dsp==stackp2) voro_fatal_error("Couldn't relocate dangling pointer",VOROPP_INTERNAL_ERROR);
#if VOROPP_VERBOSE >=3
fputs("Relocated dangling pointer",stderr);
#endif
}
for (k=0;k<s;k++,j++) l[j]=mep[i][j];
for (k=0;k<i;k++,m++) vc.n_copy_to_aux1(i,m);
}
delete [] mep[i];
mep[i]=l;
vc.n_switch_to_aux1(i);
}
}
/** Doubles the maximum number of vertices allowed, by reallocating the ed, nu,
* and pts arrays. If the allocation exceeds the absolute maximum set in
* max_vertices, then the routine exits with a fatal error. If the template has
* been instantiated with the neighbor tracking turned on, then the routine
* also reallocates the ne array. */
template<class vc_class>
void voronoicell_base::add_memory_vertices(vc_class &vc) {
printf("nope: %d\n", current_vertices);
exit(3);
int i=(current_vertices<<1),j,**pp,*pnu;
if (i>max_vertices) voro_fatal_error("Vertex memory allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR);
#if VOROPP_VERBOSE >=2
fprintf(stderr,"Vertex memory scaled up to %d\n",i);
#endif
double *ppts;
pp=new int*[i];
for (j=0;j<current_vertices;j++) pp[j]=ed[j];
delete [] ed;ed=pp;
vc.n_add_memory_vertices(i);
pnu=new int[i];
for (j=0;j<current_vertices;j++) pnu[j]=nu[j];
delete [] nu;nu=pnu;
ppts=new double[3*i];
for (j=0;j<3*current_vertices;j++) ppts[j]=pts[j];
delete [] pts;pts=ppts;
current_vertices=i;
}
/** Doubles the maximum allowed vertex order, by reallocating mem, mep, and mec
* arrays. If the allocation exceeds the absolute maximum set in
* max_vertex_order, then the routine causes a fatal error. If the template has
* been instantiated with the neighbor tracking turned on, then the routine
* also reallocates the mne array. */
template<class vc_class>
void voronoicell_base::add_memory_vorder(vc_class &vc) {
int i=(current_vertex_order<<1),j,*p1,**p2;
if (i>max_vertex_order) voro_fatal_error("Vertex order memory allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR);
#if VOROPP_VERBOSE >=2
fprintf(stderr,"Vertex order memory scaled up to %d\n",i);
#endif
p1=new int[i];
for (j=0;j<current_vertex_order;j++) p1[j]=mem[j];
while (j<i) p1[j++]=0;
delete [] mem;mem=p1;
p2=new int*[i];
for (j=0;j<current_vertex_order;j++) p2[j]=mep[j];
delete [] mep;mep=p2;
p1=new int[i];
for (j=0;j<current_vertex_order;j++) p1[j]=mec[j];
while (j<i) p1[j++]=0;<|fim▁hole|> delete [] mec;mec=p1;
vc.n_add_memory_vorder(i);
current_vertex_order=i;
}
/** Doubles the size allocation of the main delete stack. If the allocation
* exceeds the absolute maximum set in max_delete_size, then routine causes a
* fatal error. */
void voronoicell_base::add_memory_ds(int *&stackp) {
current_delete_size<<=1;
if (current_delete_size>max_delete_size) voro_fatal_error("Delete stack 1 memory allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR);
#if VOROPP_VERBOSE >=2
fprintf(stderr,"Delete stack 1 memory scaled up to %d\n",current_delete_size);
#endif
int *dsn=new int[current_delete_size],*dsnp=dsn,*dsp=ds;
while (dsp<stackp) *(dsnp++)=*(dsp++);
delete [] ds;ds=dsn;stackp=dsnp;
stacke=ds+current_delete_size;
}
/** Doubles the size allocation of the auxiliary delete stack. If the
* allocation exceeds the absolute maximum set in max_delete2_size, then the
* routine causes a fatal error. */
void voronoicell_base::add_memory_ds2(int *&stackp2) {
current_delete2_size<<=1;
if (current_delete2_size>max_delete2_size) voro_fatal_error("Delete stack 2 memory allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR);
#if VOROPP_VERBOSE >=2
fprintf(stderr,"Delete stack 2 memory scaled up to %d\n",current_delete2_size);
#endif
int *dsn=new int[current_delete2_size],*dsnp=dsn,*dsp=ds2;
while (dsp<stackp2) *(dsnp++)=*(dsp++);
delete [] ds2;ds2=dsn;stackp2=dsnp;
stacke2=ds2+current_delete2_size;
}
/** Initializes a Voronoi cell as a rectangular box with the given dimensions.
* \param[in] (xmin,xmax) the minimum and maximum x coordinates.
* \param[in] (ymin,ymax) the minimum and maximum y coordinates.
* \param[in] (zmin,zmax) the minimum and maximum z coordinates. */
void voronoicell_base::init_base(double xmin,double xmax,double ymin,double ymax,double zmin,double zmax) {
for (int i=0;i<current_vertex_order;i++) mec[i]=0;
up=0;mec[3]=p=8;xmin*=2;xmax*=2;ymin*=2;ymax*=2;zmin*=2;zmax*=2;
*pts=xmin;pts[1]=ymin;pts[2]=zmin;
pts[3]=xmax;pts[4]=ymin;pts[5]=zmin;
pts[6]=xmin;pts[7]=ymax;pts[8]=zmin;
pts[9]=xmax;pts[10]=ymax;pts[11]=zmin;
pts[12]=xmin;pts[13]=ymin;pts[14]=zmax;
pts[15]=xmax;pts[16]=ymin;pts[17]=zmax;
pts[18]=xmin;pts[19]=ymax;pts[20]=zmax;
pts[21]=xmax;pts[22]=ymax;pts[23]=zmax;
int *q=mep[3];
*q=1;q[1]=4;q[2]=2;q[3]=2;q[4]=1;q[5]=0;q[6]=0;
q[7]=3;q[8]=5;q[9]=0;q[10]=2;q[11]=1;q[12]=0;q[13]=1;
q[14]=0;q[15]=6;q[16]=3;q[17]=2;q[18]=1;q[19]=0;q[20]=2;
q[21]=2;q[22]=7;q[23]=1;q[24]=2;q[25]=1;q[26]=0;q[27]=3;
q[28]=6;q[29]=0;q[30]=5;q[31]=2;q[32]=1;q[33]=0;q[34]=4;
q[35]=4;q[36]=1;q[37]=7;q[38]=2;q[39]=1;q[40]=0;q[41]=5;
q[42]=7;q[43]=2;q[44]=4;q[45]=2;q[46]=1;q[47]=0;q[48]=6;
q[49]=5;q[50]=3;q[51]=6;q[52]=2;q[53]=1;q[54]=0;q[55]=7;
*ed=q;ed[1]=q+7;ed[2]=q+14;ed[3]=q+21;
ed[4]=q+28;ed[5]=q+35;ed[6]=q+42;ed[7]=q+49;
*nu=nu[1]=nu[2]=nu[3]=nu[4]=nu[5]=nu[6]=nu[7]=3;
}
/** Starting from a point within the current cutting plane, this routine attempts
* to find an edge to a point outside the cutting plane. This prevents the plane
* routine from .
* \param[in] vc a reference to the specialized version of the calling class.
* \param[in,out] up */
template<class vc_class>
inline bool voronoicell_base::search_for_outside_edge(vc_class &vc,int &up) {
int i,lp,lw,*j(ds2),*stackp2(ds2);
double l;
*(stackp2++)=up;
while (j<stackp2) {
up=*(j++);
for (i=0;i<nu[up];i++) {
lp=ed[up][i];
lw=m_test(lp,l);
if (lw==-1) return true;
else if (lw==0) add_to_stack(vc,lp,stackp2);
}
}
return false;
}
/** Adds a point to the auxiliary delete stack if it is not already there.
* \param[in] vc a reference to the specialized version of the calling class.
* \param[in] lp the index of the point to add.
* \param[in,out] stackp2 a pointer to the end of the stack entries. */
template<class vc_class>
inline void voronoicell_base::add_to_stack(vc_class &vc,int lp,int *&stackp2) {
(void)vc;
for (int *k(ds2);k<stackp2;k++) if (*k==lp) return;
if (stackp2==stacke2) add_memory_ds2(stackp2);
*(stackp2++)=lp;
}
/** Cuts the Voronoi cell by a particle whose center is at a separation of
* (x,y,z) from the cell center. The value of rsq should be initially set to
* \f$x^2+y^2+z^2\f$.
* \param[in] vc a reference to the specialized version of the calling class.
* \param[in] (x,y,z) the normal vector to the plane.
* \param[in] rsq the distance along this vector of the plane.
* \param[in] p_id the plane ID (for neighbor tracking only).
* \return False if the plane cut deleted the cell entirely, true otherwise. */
template<class vc_class>
bool voronoicell_base::nplane(vc_class &vc,double x,double y,double z,double rsq,int p_id) {
int count=0,i,j,k,lp=up,cp,qp,rp,*stackp(ds),*stackp2(ds2),*dsp;
int us=0,ls=0,qs,iqs,cs,uw,qw,lw;
int *edp,*edd;
double u,l,r,q;bool complicated_setup=false,new_double_edge=false,double_edge=false;
// Initialize the safe testing routine
n_marg=0;px=x;py=y;pz=z;prsq=rsq;
// Test approximately sqrt(n)/4 points for their proximity to the plane
// and keep the one which is closest
uw=m_test(up,u);
// Starting from an initial guess, we now move from vertex to vertex,
// to try and find an edge which intersects the cutting plane,
// or a vertex which is on the plane
try {
if (uw==1) {
// The test point is inside the cutting plane.
us=0;
do {
lp=ed[up][us];
lw=m_test(lp,l);
if (l<u) break;
us++;
} while (us<nu[up]);
if (us==nu[up]) {
return false;
}
ls=ed[up][nu[up]+us];
while (lw==1) {
if (++count>=p) throw true;
u=l;up=lp;
for (us=0;us<ls;us++) {
lp=ed[up][us];
lw=m_test(lp,l);
if (l<u) break;
}
if (us==ls) {
us++;
while (us<nu[up]) {
lp=ed[up][us];
lw=m_test(lp,l);
if (l<u) break;
us++;
}
if (us==nu[up]) {
return false;
}
}
ls=ed[up][nu[up]+us];
}
// If the last point in the iteration is within the
// plane, we need to do the complicated setup
// routine. Otherwise, we use the regular iteration.
if (lw==0) {
up=lp;
complicated_setup=true;
} else complicated_setup=false;
} else if (uw==-1) {
us=0;
do {
qp=ed[up][us];
qw=m_test(qp,q);
if (u<q) break;
us++;
} while (us<nu[up]);
if (us==nu[up]) return true;
while (qw==-1) {
qs=ed[up][nu[up]+us];
if (++count>=p) throw true;
u=q;up=qp;
for (us=0;us<qs;us++) {
qp=ed[up][us];
qw=m_test(qp,q);
if (u<q) break;
}
if (us==qs) {
us++;
while (us<nu[up]) {
qp=ed[up][us];
qw=m_test(qp,q);
if (u<q) break;
us++;
}
if (us==nu[up]) return true;
}
}
if (qw==1) {
lp=up;ls=us;l=u;
up=qp;us=ed[lp][nu[lp]+ls];u=q;
complicated_setup=false;
} else {
up=qp;
complicated_setup=true;
}
} else {
// Our original test point was on the plane, so we
// automatically head for the complicated setup
// routine
complicated_setup=true;
}
}
catch(bool except) {
// This routine is a fall-back, in case floating point errors
// cause the usual search routine to fail. In the fall-back
// routine, we just test every edge to find one straddling
// the plane.
#if VOROPP_VERBOSE >=1
fputs("Bailed out of convex calculation\n",stderr);
#endif
qw=1;lw=0;
for (qp=0;qp<p;qp++) {
qw=m_test(qp,q);
if (qw==1) {
// The point is inside the cutting space. Now
// see if we can find a neighbor which isn't.
for (us=0;us<nu[qp];us++) {
lp=ed[qp][us];
if (lp<qp) {
lw=m_test(lp,l);
if (lw!=1) break;
}
}
if (us<nu[qp]) {
up=qp;
if (lw==0) {
complicated_setup=true;
} else {
complicated_setup=false;
u=q;
ls=ed[up][nu[up]+us];
}
break;
}
} else if (qw==-1) {
// The point is outside the cutting space. See
// if we can find a neighbor which isn't.
for (ls=0;ls<nu[qp];ls++) {
up=ed[qp][ls];
if (up<qp) {
uw=m_test(up,u);
if (uw!=-1) break;
}
}
if (ls<nu[qp]) {
if (uw==0) {
up=qp;
complicated_setup=true;
} else {
complicated_setup=false;
lp=qp;l=q;
us=ed[lp][nu[lp]+ls];
}
break;
}
} else {
// The point is in the plane, so we just
// proceed with the complicated setup routine
up=qp;
complicated_setup=true;
break;
}
}
if (qp==p) return qw==-1?true:false;
}
// We're about to add the first point of the new facet. In either
// routine, we have to add a point, so first check there's space for
// it.
if (p==current_vertices) add_memory_vertices(vc);
if (complicated_setup) {
// We want to be strict about reaching the conclusion that the
// cell is entirely within the cutting plane. It's not enough
// to find a vertex that has edges which are all inside or on
// the plane. If the vertex has neighbors that are also on the
// plane, we should check those too.
if (!search_for_outside_edge(vc,up)) return false;
// The search algorithm found a point which is on the cutting
// plane. We leave that point in place, and create a new one at
// the same location.
pts[3*p]=pts[3*up];
pts[3*p+1]=pts[3*up+1];
pts[3*p+2]=pts[3*up+2];
// Search for a collection of edges of the test vertex which
// are outside of the cutting space. Begin by testing the
// zeroth edge.
i=0;
lp=*ed[up];
lw=m_test(lp,l);
if (lw!=-1) {
// The first edge is either inside the cutting space,
// or lies within the cutting plane. Test the edges
// sequentially until we find one that is outside.
rp=lw;
do {
i++;
// If we reached the last edge with no luck
// then all of the vertices are inside
// or on the plane, so the cell is completely
// deleted
if (i==nu[up]) return false;
lp=ed[up][i];
lw=m_test(lp,l);
} while (lw!=-1);
j=i+1;
// We found an edge outside the cutting space. Keep
// moving through these edges until we find one that's
// inside or on the plane.
while (j<nu[up]) {
lp=ed[up][j];
lw=m_test(lp,l);
if (lw!=-1) break;
j++;
}
// Compute the number of edges for the new vertex. In
// general it will be the number of outside edges
// found, plus two. But we need to recognize the
// special case when all but one edge is outside, and
// the remaining one is on the plane. For that case we
// have to reduce the edge count by one to prevent
// doubling up.
if (j==nu[up]&&i==1&&rp==0) {
nu[p]=nu[up];
double_edge=true;
} else nu[p]=j-i+2;
k=1;
// Add memory for the new vertex if needed, and
// initialize
while (nu[p]>=current_vertex_order) add_memory_vorder(vc);
if (mec[nu[p]]==mem[nu[p]]) add_memory(vc,nu[p],stackp2);
vc.n_set_pointer(p,nu[p]);
ed[p]=mep[nu[p]]+((nu[p]<<1)+1)*mec[nu[p]]++;
ed[p][nu[p]<<1]=p;
// Copy the edges of the original vertex into the new
// one. Delete the edges of the original vertex, and
// update the relational table.
us=cycle_down(i,up);
while (i<j) {
qp=ed[up][i];
qs=ed[up][nu[up]+i];
vc.n_copy(p,k,up,i);
ed[p][k]=qp;
ed[p][nu[p]+k]=qs;
ed[qp][qs]=p;
ed[qp][nu[qp]+qs]=k;
ed[up][i]=-1;
i++;k++;
}
qs=i==nu[up]?0:i;
} else {
// In this case, the zeroth edge is outside the cutting
// plane. Begin by searching backwards from the last
// edge until we find an edge which isn't outside.
i=nu[up]-1;
lp=ed[up][i];
lw=m_test(lp,l);
while (lw==-1) {
i--;
// If i reaches zero, then we have a point in
// the plane all of whose edges are outside
// the cutting space, so we just exit
if (i==0) return true;
lp=ed[up][i];
lw=m_test(lp,l);
}
// Now search forwards from zero
j=1;
qp=ed[up][j];
qw=m_test(qp,q);
while (qw==-1) {
j++;
qp=ed[up][j];
qw=m_test(qp,l);
}
// Compute the number of edges for the new vertex. In
// general it will be the number of outside edges
// found, plus two. But we need to recognize the
// special case when all but one edge is outside, and
// the remaining one is on the plane. For that case we
// have to reduce the edge count by one to prevent
// doubling up.
if (i==j&&qw==0) {
double_edge=true;
nu[p]=nu[up];
} else {
nu[p]=nu[up]-i+j+1;
}
// Add memory to store the vertex if it doesn't exist
// already
k=1;
while (nu[p]>=current_vertex_order) add_memory_vorder(vc);
if (mec[nu[p]]==mem[nu[p]]) add_memory(vc,nu[p],stackp2);
// Copy the edges of the original vertex into the new
// one. Delete the edges of the original vertex, and
// update the relational table.
vc.n_set_pointer(p,nu[p]);
ed[p]=mep[nu[p]]+((nu[p]<<1)+1)*mec[nu[p]]++;
ed[p][nu[p]<<1]=p;
us=i++;
while (i<nu[up]) {
qp=ed[up][i];
qs=ed[up][nu[up]+i];
vc.n_copy(p,k,up,i);
ed[p][k]=qp;
ed[p][nu[p]+k]=qs;
ed[qp][qs]=p;
ed[qp][nu[qp]+qs]=k;
ed[up][i]=-1;
i++;k++;
}
i=0;
while (i<j) {
qp=ed[up][i];
qs=ed[up][nu[up]+i];
vc.n_copy(p,k,up,i);
ed[p][k]=qp;
ed[p][nu[p]+k]=qs;
ed[qp][qs]=p;
ed[qp][nu[qp]+qs]=k;
ed[up][i]=-1;
i++;k++;
}
qs=j;
}
if (!double_edge) {
vc.n_copy(p,k,up,qs);
vc.n_set(p,0,p_id);
} else vc.n_copy(p,0,up,qs);
// Add this point to the auxiliary delete stack
if (stackp2==stacke2) add_memory_ds2(stackp2);
*(stackp2++)=up;
// Look at the edges on either side of the group that was
// detected. We're going to commence facet computation by
// moving along one of them. We are going to end up coming back
// along the other one.
cs=k;
qp=up;q=u;
i=ed[up][us];
us=ed[up][nu[up]+us];
up=i;
ed[qp][nu[qp]<<1]=-p;
} else {
// The search algorithm found an intersected edge between the
// points lp and up. Create a new vertex between them which
// lies on the cutting plane. Since u and l differ by at least
// the tolerance, this division should never screw up.
if (stackp==stacke) add_memory_ds(stackp);
*(stackp++)=up;
r=u/(u-l);l=1-r;
pts[3*p]=pts[3*lp]*r+pts[3*up]*l;
pts[3*p+1]=pts[3*lp+1]*r+pts[3*up+1]*l;
pts[3*p+2]=pts[3*lp+2]*r+pts[3*up+2]*l;
// This point will always have three edges. Connect one of them
// to lp.
nu[p]=3;
if (mec[3]==mem[3]) add_memory(vc,3,stackp2);
vc.n_set_pointer(p,3);
vc.n_set(p,0,p_id);
vc.n_copy(p,1,up,us);
vc.n_copy(p,2,lp,ls);
ed[p]=mep[3]+7*mec[3]++;
ed[p][6]=p;
ed[up][us]=-1;
ed[lp][ls]=p;
ed[lp][nu[lp]+ls]=1;
ed[p][1]=lp;
ed[p][nu[p]+1]=ls;
cs=2;
// Set the direction to move in
qs=cycle_up(us,up);
qp=up;q=u;
}
// When the code reaches here, we have initialized the first point, and
// we have a direction for moving it to construct the rest of the facet
cp=p;rp=p;p++;
while (qp!=up||qs!=us) {
// We're currently tracing round an intersected facet. Keep
// moving around it until we find a point or edge which
// intersects the plane.
lp=ed[qp][qs];
lw=m_test(lp,l);
if (lw==1) {
// The point is still in the cutting space. Just add it
// to the delete stack and keep moving.
qs=cycle_up(ed[qp][nu[qp]+qs],lp);
qp=lp;
q=l;
if (stackp==stacke) add_memory_ds(stackp);
*(stackp++)=qp;
} else if (lw==-1) {
// The point is outside of the cutting space, so we've
// found an intersected edge. Introduce a regular point
// at the point of intersection. Connect it to the
// point we just tested. Also connect it to the previous
// new point in the facet we're constructing.
if (p==current_vertices) add_memory_vertices(vc);
r=q/(q-l);l=1-r;
pts[3*p]=pts[3*lp]*r+pts[3*qp]*l;
pts[3*p+1]=pts[3*lp+1]*r+pts[3*qp+1]*l;
pts[3*p+2]=pts[3*lp+2]*r+pts[3*qp+2]*l;
nu[p]=3;
if (mec[3]==mem[3]) add_memory(vc,3,stackp2);
ls=ed[qp][qs+nu[qp]];
vc.n_set_pointer(p,3);
vc.n_set(p,0,p_id);
vc.n_copy(p,1,qp,qs);
vc.n_copy(p,2,lp,ls);
ed[p]=mep[3]+7*mec[3]++;
*ed[p]=cp;
ed[p][1]=lp;
ed[p][3]=cs;
ed[p][4]=ls;
ed[p][6]=p;
ed[lp][ls]=p;
ed[lp][nu[lp]+ls]=1;
ed[cp][cs]=p;
ed[cp][nu[cp]+cs]=0;
ed[qp][qs]=-1;
qs=cycle_up(qs,qp);
cp=p++;
cs=2;
} else {
// We've found a point which is on the cutting plane.
// We're going to introduce a new point right here, but
// first we need to figure out the number of edges it
// has.
if (p==current_vertices) add_memory_vertices(vc);
// If the previous vertex detected a double edge, our
// new vertex will have one less edge.
k=double_edge?0:1;
qs=ed[qp][nu[qp]+qs];
qp=lp;
iqs=qs;
// Start testing the edges of the current point until
// we find one which isn't outside the cutting space
do {
k++;
qs=cycle_up(qs,qp);
lp=ed[qp][qs];
lw=m_test(lp,l);
} while (lw==-1);
// Now we need to find out whether this marginal vertex
// we are on has been visited before, because if that's
// the case, we need to add vertices to the existing
// new vertex, rather than creating a fresh one. We also
// need to figure out whether we're in a case where we
// might be creating a duplicate edge.
j=-ed[qp][nu[qp]<<1];
if (qp==up&&qs==us) {
// If we're heading into the final part of the
// new facet, then we never worry about the
// duplicate edge calculation.
new_double_edge=false;
if (j>0) k+=nu[j];
} else {
if (j>0) {
// This vertex was visited before, so
// count those vertices to the ones we
// already have.
k+=nu[j];
// The only time when we might make a
// duplicate edge is if the point we're
// going to move to next is also a
// marginal point, so test for that
// first.
if (lw==0) {
// Now see whether this marginal point
// has been visited before.
i=-ed[lp][nu[lp]<<1];
if (i>0) {
// Now see if the last edge of that other
// marginal point actually ends up here.
if (ed[i][nu[i]-1]==j) {
new_double_edge=true;
k-=1;
} else new_double_edge=false;
} else {
// That marginal point hasn't been visited
// before, so we probably don't have to worry
// about duplicate edges, except in the
// case when that's the way into the end
// of the facet, because that way always creates
// an edge.
if (j==rp&&lp==up&&ed[qp][nu[qp]+qs]==us) {
new_double_edge=true;
k-=1;
} else new_double_edge=false;
}
} else new_double_edge=false;
} else {
// The vertex hasn't been visited
// before, but let's see if it's
// marginal
if (lw==0) {
// If it is, we need to check
// for the case that it's a
// small branch, and that we're
// heading right back to where
// we came from
i=-ed[lp][nu[lp]<<1];
if (i==cp) {
new_double_edge=true;
k-=1;
} else new_double_edge=false;
} else new_double_edge=false;
}
}
// k now holds the number of edges of the new vertex
// we are forming. Add memory for it if it doesn't exist
// already.
while (k>=current_vertex_order) add_memory_vorder(vc);
if (mec[k]==mem[k]) add_memory(vc,k,stackp2);
// Now create a new vertex with order k, or augment
// the existing one
if (j>0) {
// If we're augmenting a vertex but we don't
// actually need any more edges, just skip this
// routine to avoid memory confusion
if (nu[j]!=k) {
// Allocate memory and copy the edges
// of the previous instance into it
vc.n_set_aux1(k);
edp=mep[k]+((k<<1)+1)*mec[k]++;
i=0;
while (i<nu[j]) {
vc.n_copy_aux1(j,i);
edp[i]=ed[j][i];
edp[k+i]=ed[j][nu[j]+i];
i++;
}
edp[k<<1]=j;
// Remove the previous instance with
// fewer vertices from the memory
// structure
edd=mep[nu[j]]+((nu[j]<<1)+1)*--mec[nu[j]];
if (edd!=ed[j]) {
for (lw=0;lw<=(nu[j]<<1);lw++) ed[j][lw]=edd[lw];
vc.n_set_aux2_copy(j,nu[j]);
vc.n_copy_pointer(edd[nu[j]<<1],j);
ed[edd[nu[j]<<1]]=ed[j];
}
vc.n_set_to_aux1(j);
ed[j]=edp;
} else i=nu[j];
} else {
// Allocate a new vertex of order k
vc.n_set_pointer(p,k);
ed[p]=mep[k]+((k<<1)+1)*mec[k]++;
ed[p][k<<1]=p;
if (stackp2==stacke2) add_memory_ds2(stackp2);
*(stackp2++)=qp;
pts[3*p]=pts[3*qp];
pts[3*p+1]=pts[3*qp+1];
pts[3*p+2]=pts[3*qp+2];
ed[qp][nu[qp]<<1]=-p;
j=p++;
i=0;
}
nu[j]=k;
// Unless the previous case was a double edge, connect
// the first available edge of the new vertex to the
// last one in the facet
if (!double_edge) {
ed[j][i]=cp;
ed[j][nu[j]+i]=cs;
vc.n_set(j,i,p_id);
ed[cp][cs]=j;
ed[cp][nu[cp]+cs]=i;
i++;
}
// Copy in the edges of the underlying vertex,
// and do one less if this was a double edge
qs=iqs;
while (i<(new_double_edge?k:k-1)) {
qs=cycle_up(qs,qp);
lp=ed[qp][qs];ls=ed[qp][nu[qp]+qs];
vc.n_copy(j,i,qp,qs);
ed[j][i]=lp;
ed[j][nu[j]+i]=ls;
ed[lp][ls]=j;
ed[lp][nu[lp]+ls]=i;
ed[qp][qs]=-1;
i++;
}
qs=cycle_up(qs,qp);
cs=i;
cp=j;
vc.n_copy(j,new_double_edge?0:cs,qp,qs);
// Update the double_edge flag, to pass it
// to the next instance of this routine
double_edge=new_double_edge;
}
}
// Connect the final created vertex to the initial one
ed[cp][cs]=rp;
*ed[rp]=cp;
ed[cp][nu[cp]+cs]=0;
ed[rp][nu[rp]]=cs;
// Delete points: first, remove any duplicates
dsp=ds;
while (dsp<stackp) {
j=*dsp;
if (ed[j][nu[j]]!=-1) {
ed[j][nu[j]]=-1;
dsp++;
} else *dsp=*(--stackp);
}
// Add the points in the auxiliary delete stack,
// and reset their back pointers
for (dsp=ds2;dsp<stackp2;dsp++) {
j=*dsp;
ed[j][nu[j]<<1]=j;
if (ed[j][nu[j]]!=-1) {
ed[j][nu[j]]=-1;
if (stackp==stacke) add_memory_ds(stackp);
*(stackp++)=j;
}
}
// Scan connections and add in extras
for (dsp=ds;dsp<stackp;dsp++) {
cp=*dsp;
for (edp=ed[cp];edp<ed[cp]+nu[cp];edp++) {
qp=*edp;
if (qp!=-1&&ed[qp][nu[qp]]!=-1) {
if (stackp==stacke) {
int dis=stackp-dsp;
add_memory_ds(stackp);
dsp=ds+dis;
}
*(stackp++)=qp;
ed[qp][nu[qp]]=-1;
}
}
}
up=0;
// Delete them from the array structure
while (stackp>ds) {
--p;
while (ed[p][nu[p]]==-1) {
j=nu[p];
edp=ed[p];edd=(mep[j]+((j<<1)+1)*--mec[j]);
while (edp<ed[p]+(j<<1)+1) *(edp++)=*(edd++);
vc.n_set_aux2_copy(p,j);
vc.n_copy_pointer(ed[p][(j<<1)],p);
ed[ed[p][(j<<1)]]=ed[p];
--p;
}
up=*(--stackp);
if (up<p) {
// Vertex management
pts[3*up]=pts[3*p];
pts[3*up+1]=pts[3*p+1];
pts[3*up+2]=pts[3*p+2];
// Memory management
j=nu[up];
edp=ed[up];edd=(mep[j]+((j<<1)+1)*--mec[j]);
while (edp<ed[up]+(j<<1)+1) *(edp++)=*(edd++);
vc.n_set_aux2_copy(up,j);
vc.n_copy_pointer(ed[up][j<<1],up);
vc.n_copy_pointer(up,p);
ed[ed[up][j<<1]]=ed[up];
// Edge management
ed[up]=ed[p];
nu[up]=nu[p];
for (i=0;i<nu[up];i++) ed[ed[up][i]][ed[up][nu[up]+i]]=up;
ed[up][nu[up]<<1]=up;
} else up=p++;
}
// Check for any vertices of zero order
if (*mec>0) voro_fatal_error("Zero order vertex formed",VOROPP_INTERNAL_ERROR);
// Collapse any order 2 vertices and exit
return collapse_order2(vc);
}
/** During the creation of a new facet in the plane routine, it is possible
* that some order two vertices may arise. This routine removes them.
* Suppose an order two vertex joins c and d. If there's a edge between
* c and d already, then the order two vertex is just removed; otherwise,
* the order two vertex is removed and c and d are joined together directly.
* It is possible this process will create order two or order one vertices,
* and the routine is continually run until all of them are removed.
* \return False if the vertex removal was unsuccessful, indicative of the cell
* reducing to zero volume and disappearing; true if the vertex removal
* was successful. */
template<class vc_class>
inline bool voronoicell_base::collapse_order2(vc_class &vc) {
if (!collapse_order1(vc)) return false;
int a,b,i,j,k,l;
while (mec[2]>0) {
// Pick a order 2 vertex and read in its edges
i=--mec[2];
j=mep[2][5*i];k=mep[2][5*i+1];
if (j==k) {
#if VOROPP_VERBOSE >=1
fputs("Order two vertex joins itself",stderr);
#endif
return false;
}
// Scan the edges of j to see if joins k
for (l=0;l<nu[j];l++) {
if (ed[j][l]==k) break;
}
// If j doesn't already join k, join them together.
// Otherwise delete the connection to the current
// vertex from j and k.
a=mep[2][5*i+2];b=mep[2][5*i+3];i=mep[2][5*i+4];
if (l==nu[j]) {
ed[j][a]=k;
ed[k][b]=j;
ed[j][nu[j]+a]=b;
ed[k][nu[k]+b]=a;
} else {
if (!delete_connection(vc,j,a,false)) return false;
if (!delete_connection(vc,k,b,true)) return false;
}
// Compact the memory
--p;
if (up==i) up=0;
if (p!=i) {
if (up==p) up=i;
pts[3*i]=pts[3*p];
pts[3*i+1]=pts[3*p+1];
pts[3*i+2]=pts[3*p+2];
for (k=0;k<nu[p];k++) ed[ed[p][k]][ed[p][nu[p]+k]]=i;
vc.n_copy_pointer(i,p);
ed[i]=ed[p];
nu[i]=nu[p];
ed[i][nu[i]<<1]=i;
}
// Collapse any order 1 vertices if they were created
if (!collapse_order1(vc)) return false;
}
return true;
}
/** Order one vertices can potentially be created during the order two collapse
* routine. This routine keeps removing them until there are none left.
* \return False if the vertex removal was unsuccessful, indicative of the cell
* having zero volume and disappearing; true if the vertex removal was
* successful. */
template<class vc_class>
inline bool voronoicell_base::collapse_order1(vc_class &vc) {
int i,j,k;
while (mec[1]>0) {
up=0;
#if VOROPP_VERBOSE >=1
fputs("Order one collapse\n",stderr);
#endif
i=--mec[1];
j=mep[1][3*i];k=mep[1][3*i+1];
i=mep[1][3*i+2];
if (!delete_connection(vc,j,k,false)) return false;
--p;
if (up==i) up=0;
if (p!=i) {
if (up==p) up=i;
pts[3*i]=pts[3*p];
pts[3*i+1]=pts[3*p+1];
pts[3*i+2]=pts[3*p+2];
for (k=0;k<nu[p];k++) ed[ed[p][k]][ed[p][nu[p]+k]]=i;
vc.n_copy_pointer(i,p);
ed[i]=ed[p];
nu[i]=nu[p];
ed[i][nu[i]<<1]=i;
}
}
return true;
}
/** This routine deletes the kth edge of vertex j and reorganizes the memory.
* If the neighbor computation is enabled, we also have to supply an handedness
* flag to decide whether to preserve the plane on the left or right of the
* connection.
* \return False if a zero order vertex was formed, indicative of the cell
* disappearing; true if the vertex removal was successful. */
template<class vc_class>
inline bool voronoicell_base::delete_connection(vc_class &vc,int j,int k,bool hand) {
int q=hand?k:cycle_up(k,j);
int i=nu[j]-1,l,*edp,*edd,m;
#if VOROPP_VERBOSE >=1
if (i<1) {
fputs("Zero order vertex formed\n",stderr);
return false;
}
#endif
if (mec[i]==mem[i]) add_memory(vc,i,ds2);
vc.n_set_aux1(i);
for (l=0;l<q;l++) vc.n_copy_aux1(j,l);
while (l<i) {
vc.n_copy_aux1_shift(j,l);
l++;
}
edp=mep[i]+((i<<1)+1)*mec[i]++;
edp[i<<1]=j;
for (l=0;l<k;l++) {
edp[l]=ed[j][l];
edp[l+i]=ed[j][l+nu[j]];
}
while (l<i) {
m=ed[j][l+1];
edp[l]=m;
k=ed[j][l+nu[j]+1];
edp[l+i]=k;
ed[m][nu[m]+k]--;
l++;
}
edd=mep[nu[j]]+((nu[j]<<1)+1)*--mec[nu[j]];
for (l=0;l<=(nu[j]<<1);l++) ed[j][l]=edd[l];
vc.n_set_aux2_copy(j,nu[j]);
vc.n_set_to_aux2(edd[nu[j]<<1]);
vc.n_set_to_aux1(j);
ed[edd[nu[j]<<1]]=edd;
ed[j]=edp;
nu[j]=i;
return true;
}
/** Calculates the areas of each face of the Voronoi cell and prints the
* results to an output stream.
* \param[out] v the vector to store the results in. */
void voronoicell_base::face_areas(std::vector<double> &v) {
double area;
v.clear();
int i,j,k,l,m,n;
double ux,uy,uz,vx,vy,vz,wx,wy,wz;
for (i=1;i<p;i++) for(j=0;j<nu[i];j++) {
k=ed[i][j];
if (k>=0) {
area=0;
ed[i][j]=-1-k;
l=cycle_up(ed[i][nu[i]+j],k);
m=ed[k][l];ed[k][l]=-1-m;
while (m!=i) {
n=cycle_up(ed[k][nu[k]+l],m);
ux=pts[3*k]-pts[3*i];
uy=pts[3*k+1]-pts[3*i+1];
uz=pts[3*k+2]-pts[3*i+2];
vx=pts[3*m]-pts[3*i];
vy=pts[3*m+1]-pts[3*i+1];
vz=pts[3*m+2]-pts[3*i+2];
wx=uy*vz-uz*vy;
wy=uz*vx-ux*vz;
wz=ux*vy-uy*vx;
area+=sqrt(wx*wx+wy*wy+wz*wz);
k=m;l=n;
m=ed[k][l];ed[k][l]=-1-m;
}
v.push_back(0.125*area);
}
}
reset_edges();
}
/** Several routines in the class that gather cell-based statistics internally
* track their progress by flipping edges to negative so that they know what
* parts of the cell have already been tested. This function resets them back
* to positive. When it is called, it assumes that every edge in the routine
* should have already been flipped to negative, and it bails out with an
* internal error if it encounters a positive edge. */
inline void voronoicell_base::reset_edges() {
int i,j;
for (i=0;i<p;i++) for(j=0;j<nu[i];j++) {
if (ed[i][j]>=0) voro_fatal_error("Edge reset routine found a previously untested edge",VOROPP_INTERNAL_ERROR);
ed[i][j]=-1-ed[i][j];
}
}
/** Checks to see if a given vertex is inside, outside or within the test
* plane. If the point is far away from the test plane, the routine immediately
* returns whether it is inside or outside. If the routine is close the the
* plane and within the specified tolerance, then the special check_marginal()
* routine is called.
* \param[in] n the vertex to test.
* \param[out] ans the result of the scalar product used in evaluating the
* location of the point.
* \return -1 if the point is inside the plane, 1 if the point is outside the
* plane, or 0 if the point is within the plane. */
inline int voronoicell_base::m_test(int n,double &ans) {
double *pp=pts+n+(n<<1);
ans=*(pp++)*px;
ans+=*(pp++)*py;
ans+=*pp*pz-prsq;
if (ans<-tolerance2) {
return -1;
} else if (ans>tolerance2) {
return 1;
}
return check_marginal(n,ans);
}
/** Checks to see if a given vertex is inside, outside or within the test
* plane, for the case when the point has been detected to be very close to the
* plane. The routine ensures that the returned results are always consistent
* with previous tests, by keeping a table of any marginal results. The routine
* first sees if the vertex is in the table, and if it finds a previously
* computed result it uses that. Otherwise, it computes a result for this
* vertex and adds it the table.
* \param[in] n the vertex to test.
* \param[in] ans the result of the scalar product used in evaluating
* the location of the point.
* \return -1 if the point is inside the plane, 1 if the point is outside the
* plane, or 0 if the point is within the plane. */
int voronoicell_base::check_marginal(int n,double &ans) {
int i;
for (i=0;i<n_marg;i+=2) if (marg[i]==n) return marg[i+1];
if (n_marg==current_marginal) {
current_marginal<<=1;
if (current_marginal>max_marginal)
voro_fatal_error("Marginal case buffer allocation exceeded absolute maximum",VOROPP_MEMORY_ERROR);
#if VOROPP_VERBOSE >=2
fprintf(stderr,"Marginal cases buffer scaled up to %d\n",i);
#endif
int *pmarg=new int[current_marginal];
for (int j=0;j<n_marg;j++) pmarg[j]=marg[j];
delete [] marg;
marg=pmarg;
}
marg[n_marg++]=n;
marg[n_marg++]=ans>tolerance?1:(ans<-tolerance?-1:0);
return marg[n_marg-1];
}
/** This initializes the class to be a rectangular box. It calls the base class
* initialization routine to set up the edge and vertex information, and then
* sets up the neighbor information, with initial faces being assigned ID
* numbers from -1 to -6.
* \param[in] (xmin,xmax) the minimum and maximum x coordinates.
* \param[in] (ymin,ymax) the minimum and maximum y coordinates.
* \param[in] (zmin,zmax) the minimum and maximum z coordinates. */
void voronoicell_neighbor::init(double xmin,double xmax,double ymin,double ymax,double zmin,double zmax) {
init_base(xmin,xmax,ymin,ymax,zmin,zmax);
int *q=mne[3];
*q=-5;q[1]=-3;q[2]=-1;
q[3]=-5;q[4]=-2;q[5]=-3;
q[6]=-5;q[7]=-1;q[8]=-4;
q[9]=-5;q[10]=-4;q[11]=-2;
q[12]=-6;q[13]=-1;q[14]=-3;
q[15]=-6;q[16]=-3;q[17]=-2;
q[18]=-6;q[19]=-4;q[20]=-1;
q[21]=-6;q[22]=-2;q[23]=-4;
*ne=q;ne[1]=q+3;ne[2]=q+6;ne[3]=q+9;
ne[4]=q+12;ne[5]=q+15;ne[6]=q+18;ne[7]=q+21;
}
/** This routine checks to make sure the neighbor information of each face is
* consistent. */
void voronoicell_neighbor::check_facets() {
int i,j,k,l,m,q;
for (i=1;i<p;i++) for(j=0;j<nu[i];j++) {
k=ed[i][j];
if (k>=0) {
ed[i][j]=-1-k;
q=ne[i][j];
l=cycle_up(ed[i][nu[i]+j],k);
do {
m=ed[k][l];
ed[k][l]=-1-m;
if (ne[k][l]!=q) fprintf(stderr,"Facet error at (%d,%d)=%d, started from (%d,%d)=%d\n",k,l,ne[k][l],i,j,q);
l=cycle_up(ed[k][nu[k]+l],m);
k=m;
} while (k!=i);
}
}
reset_edges();
}
/** The class constructor allocates memory for storing neighbor information. */
voronoicell_neighbor::voronoicell_neighbor() {
int i;
mne=new int*[current_vertex_order];
ne=new int*[current_vertices];
for (i=0;i<3;i++) mne[i]=new int[init_n_vertices*i];
mne[3]=new int[init_3_vertices*3];
for (i=4;i<current_vertex_order;i++) mne[i]=new int[init_n_vertices*i];
}
/** The class destructor frees the dynamically allocated memory for storing
* neighbor information. */
voronoicell_neighbor::~voronoicell_neighbor() {
for (int i=current_vertex_order-1;i>=0;i--) if (mem[i]>0) delete [] mne[i];
delete [] mne;
delete [] ne;
}
/** Computes a vector list of neighbors. */
void voronoicell_neighbor::neighbors(std::vector<int> &v) {
v.clear();
int i,j,k,l,m;
for (i=1;i<p;i++) for(j=0;j<nu[i];j++) {
k=ed[i][j];
if (k>=0) {
v.push_back(ne[i][j]);
ed[i][j]=-1-k;
l=cycle_up(ed[i][nu[i]+j],k);
do {
m=ed[k][l];
ed[k][l]=-1-m;
l=cycle_up(ed[k][nu[k]+l],m);
k=m;
} while (k!=i);
}
}
reset_edges();
}
/** Returns the number of faces of a computed Voronoi cell.
* \return The number of faces. */
int voronoicell_base::number_of_faces() {
int i,j,k,l,m,s=0;
for (i=1;i<p;i++) for(j=0;j<nu[i];j++) {
k=ed[i][j];
if (k>=0) {
s++;
ed[i][j]=-1-k;
l=cycle_up(ed[i][nu[i]+j],k);
do {
m=ed[k][l];
ed[k][l]=-1-m;
l=cycle_up(ed[k][nu[k]+l],m);
k=m;
} while (k!=i);
}
}
reset_edges();
return s;
}
/** Returns a vector of the vertex vectors in the global coordinate system.
* \param[out] v the vector to store the results in.
* \param[in] (x,y,z) the position vector of the particle in the global
* coordinate system. */
void voronoicell_base::vertices(double x,double y,double z,std::vector<double> &v) {
v.resize(3*p);
double *ptsp=pts;
for (int i=0;i<3*p;i+=3) {
v[i]=x+*(ptsp++)*0.5;
v[i+1]=y+*(ptsp++)*0.5;
v[i+2]=z+*(ptsp++)*0.5;
}
}
/** For each face, this routine outputs a bracketed sequence of numbers
* containing a list of all the vertices that make up that face.
* \param[out] v the vector to store the results in. */
void voronoicell_base::face_vertices(std::vector<int> &v) {
int i,j,k,l,m,vp(0),vn;
v.clear();
for (i=1;i<p;i++) for(j=0;j<nu[i];j++) {
k=ed[i][j];
if (k>=0) {
v.push_back(0);
v.push_back(i);
ed[i][j]=-1-k;
l=cycle_up(ed[i][nu[i]+j],k);
do {
v.push_back(k);
m=ed[k][l];
ed[k][l]=-1-m;
l=cycle_up(ed[k][nu[k]+l],m);
k=m;
} while (k!=i);
vn=v.size();
v[vp]=vn-vp-1;
vp=vn;
}
}
reset_edges();
}
// Explicit instantiation
template bool voronoicell_base::nplane(voronoicell_neighbor&,double,double,double,double,int);
template void voronoicell_base::check_memory_for_copy(voronoicell_neighbor&,voronoicell_base*);
}<|fim▁end|> | |
<|file_name|>test_unix_echo_server.rs<|end_file_name|><|fim▁begin|>use {TryRead, TryWrite};
use mio::*;
use mio::deprecated::{EventLoop, Handler};
use mio::deprecated::unix::*;
use bytes::{Buf, ByteBuf, MutByteBuf, SliceBuf};
use slab;
use std::path::PathBuf;
use std::io;
use tempdir::TempDir;
const SERVER: Token = Token(10_000_000);
const CLIENT: Token = Token(10_000_001);
struct EchoConn {
sock: UnixStream,
buf: Option<ByteBuf>,
mut_buf: Option<MutByteBuf>,
token: Option<Token>,
interest: Ready,
}
type Slab<T> = slab::Slab<T, Token>;
impl EchoConn {
fn new(sock: UnixStream) -> EchoConn {
EchoConn {
sock: sock,
buf: None,
mut_buf: Some(ByteBuf::mut_with_capacity(2048)),
token: None,
interest: Ready::hup(),
}
}
fn writable(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
let mut buf = self.buf.take().unwrap();
match self.sock.try_write_buf(&mut buf) {
Ok(None) => {
debug!("client flushing buf; WOULDBLOCK");
self.buf = Some(buf);
self.interest.insert(Ready::writable());
}
Ok(Some(r)) => {
debug!("CONN : we wrote {} bytes!", r);
self.mut_buf = Some(buf.flip());
self.interest.insert(Ready::readable());
self.interest.remove(Ready::writable());
}
Err(e) => debug!("not implemented; client err={:?}", e),
}
assert!(self.interest.is_readable() || self.interest.is_writable(), "actual={:?}", self.interest);
event_loop.reregister(&self.sock, self.token.unwrap(), self.interest, PollOpt::edge() | PollOpt::oneshot())
}
fn readable(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
let mut buf = self.mut_buf.take().unwrap();
match self.sock.try_read_buf(&mut buf) {
Ok(None) => {
debug!("CONN : spurious read wakeup");
self.mut_buf = Some(buf);
}
Ok(Some(r)) => {
debug!("CONN : we read {} bytes!", r);
// prepare to provide this to writable
self.buf = Some(buf.flip());
self.interest.remove(Ready::readable());
self.interest.insert(Ready::writable());
}
Err(e) => {
debug!("not implemented; client err={:?}", e);
self.interest.remove(Ready::readable());
}
};
assert!(self.interest.is_readable() || self.interest.is_writable(), "actual={:?}", self.interest);
event_loop.reregister(&self.sock, self.token.unwrap(), self.interest, PollOpt::edge() | PollOpt::oneshot())
}
}
struct EchoServer {
sock: UnixListener,
conns: Slab<EchoConn>
}
impl EchoServer {
fn accept(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
debug!("server accepting socket");
let sock = self.sock.accept().unwrap();
let conn = EchoConn::new(sock);
let tok = self.conns.insert(conn)
.ok().expect("could not add connection to slab");
// Register the connection
self.conns[tok].token = Some(tok);
event_loop.register(&self.conns[tok].sock, tok, Ready::readable(), PollOpt::edge() | PollOpt::oneshot())
.ok().expect("could not register socket with event loop");
Ok(())
}
fn conn_readable(&mut self, event_loop: &mut EventLoop<Echo>, tok: Token) -> io::Result<()> {
debug!("server conn readable; tok={:?}", tok);
self.conn(tok).readable(event_loop)
}
fn conn_writable(&mut self, event_loop: &mut EventLoop<Echo>, tok: Token) -> io::Result<()> {
debug!("server conn writable; tok={:?}", tok);
self.conn(tok).writable(event_loop)
}
fn conn<'a>(&'a mut self, tok: Token) -> &'a mut EchoConn {
&mut self.conns[tok]
}
}
struct EchoClient {
sock: UnixStream,
msgs: Vec<&'static str>,
tx: SliceBuf<'static>,
rx: SliceBuf<'static>,
mut_buf: Option<MutByteBuf>,
token: Token,
interest: Ready,
}
// Sends a message and expects to receive the same exact message, one at a time
impl EchoClient {
fn new(sock: UnixStream, tok: Token, mut msgs: Vec<&'static str>) -> EchoClient {
let curr = msgs.remove(0);
EchoClient {
sock: sock,
msgs: msgs,
tx: SliceBuf::wrap(curr.as_bytes()),
rx: SliceBuf::wrap(curr.as_bytes()),
mut_buf: Some(ByteBuf::mut_with_capacity(2048)),
token: tok,
interest: Ready::none(),
}
}
fn readable(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
debug!("client socket readable");
let mut buf = self.mut_buf.take().unwrap();
match self.sock.try_read_buf(&mut buf) {
Ok(None) => {
debug!("CLIENT : spurious read wakeup");
self.mut_buf = Some(buf);
}
Ok(Some(r)) => {
debug!("CLIENT : We read {} bytes!", r);
// prepare for reading
let mut buf = buf.flip();
debug!("CLIENT : buf = {:?} -- rx = {:?}", buf.bytes(), self.rx.bytes());
while buf.has_remaining() {
let actual = buf.read_byte().unwrap();
let expect = self.rx.read_byte().unwrap();
assert!(actual == expect, "actual={}; expect={}", actual, expect);
}
self.mut_buf = Some(buf.flip());
self.interest.remove(Ready::readable());
if !self.rx.has_remaining() {
self.next_msg(event_loop).unwrap();
}
}
Err(e) => {
panic!("not implemented; client err={:?}", e);
}
};
if !self.interest.is_none() {
assert!(self.interest.is_readable() || self.interest.is_writable(), "actual={:?}", self.interest);
event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())?;
}
Ok(())
}
fn writable(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
debug!("client socket writable");
match self.sock.try_write_buf(&mut self.tx) {
Ok(None) => {
debug!("client flushing buf; WOULDBLOCK");
self.interest.insert(Ready::writable());
}
Ok(Some(r)) => {
debug!("CLIENT : we wrote {} bytes!", r);
self.interest.insert(Ready::readable());
self.interest.remove(Ready::writable());
}
Err(e) => debug!("not implemented; client err={:?}", e)
}
assert!(self.interest.is_readable() || self.interest.is_writable(), "actual={:?}", self.interest);
event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())
}
fn next_msg(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
if self.msgs.is_empty() {
event_loop.shutdown();
return Ok(());
}
let curr = self.msgs.remove(0);
debug!("client prepping next message");
self.tx = SliceBuf::wrap(curr.as_bytes());
self.rx = SliceBuf::wrap(curr.as_bytes());
self.interest.insert(Ready::writable());
assert!(self.interest.is_readable() || self.interest.is_writable(), "actual={:?}", self.interest);
event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())
}
}
struct Echo {
server: EchoServer,
client: EchoClient,
}
impl Echo {
fn new(srv: UnixListener, client: UnixStream, msgs: Vec<&'static str>) -> Echo {
Echo {
server: EchoServer {
sock: srv,
conns: Slab::with_capacity(128)
},
client: EchoClient::new(client, CLIENT, msgs)
}
}
}<|fim▁hole|> type Message = ();
fn ready(&mut self, event_loop: &mut EventLoop<Echo>, token: Token, events: Ready) {
if events.is_readable() {
match token {
SERVER => self.server.accept(event_loop).unwrap(),
CLIENT => self.client.readable(event_loop).unwrap(),
i => self.server.conn_readable(event_loop, i).unwrap()
};
}
if events.is_writable() {
match token {
SERVER => panic!("received writable for token 0"),
CLIENT => self.client.writable(event_loop).unwrap(),
_ => self.server.conn_writable(event_loop, token).unwrap()
};
}
}
}
#[test]
pub fn test_unix_echo_server() {
debug!("Starting TEST_UNIX_ECHO_SERVER");
let mut event_loop = EventLoop::new().unwrap();
let tmp_dir = TempDir::new("mio").unwrap();
let addr = tmp_dir.path().join(&PathBuf::from("sock"));
let srv = UnixListener::bind(&addr).unwrap();
info!("listen for connections");
event_loop.register(&srv, SERVER, Ready::readable(), PollOpt::edge() | PollOpt::oneshot()).unwrap();
let sock = UnixStream::connect(&addr).unwrap();
// Connect to the server
event_loop.register(&sock, CLIENT, Ready::writable(), PollOpt::edge() | PollOpt::oneshot()).unwrap();
// Start the event loop
event_loop.run(&mut Echo::new(srv, sock, vec!["foo", "bar"])).unwrap();
}<|fim▁end|> |
impl Handler for Echo {
type Timeout = usize; |
<|file_name|>app-dispatcher.js<|end_file_name|><|fim▁begin|>var Dispatcher = require('flux').Dispatcher;
var assign = require('object-assign')
var AppDispatcher = assign(new Dispatcher(), {
handleViewAction: function(action) {
this.dispatch({<|fim▁hole|>
handleServerAction: function(action) {
this.dispatch({
actionType: 'SERVER_ACTION',
action: action
});
}
});
module.exports = AppDispatcher;<|fim▁end|> | actionType: 'VIEW_ACTION',
action: action
});
}, |
<|file_name|>amqp.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2011 - 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Shared code between AMQP based openstack.common.rpc implementations.
The code in this module is shared between the rpc implemenations based on AMQP.
Specifically, this includes impl_kombu and impl_qpid. impl_carrot also uses
AMQP, but is deprecated and predates this code.
"""
import collections
import inspect
import sys
import uuid
from eventlet import greenpool
from eventlet import pools
from eventlet import queue
from eventlet import semaphore
from oslo.config import cfg
from neutron.openstack.common import excutils
from neutron.openstack.common.gettextutils import _
from neutron.openstack.common import local
from neutron.openstack.common import log as logging
from neutron.openstack.common.rpc import common as rpc_common
amqp_opts = [
cfg.BoolOpt('amqp_durable_queues',
default=False,
deprecated_name='rabbit_durable_queues',
deprecated_group='DEFAULT',
help='Use durable queues in amqp.'),
cfg.BoolOpt('amqp_auto_delete',
default=False,
help='Auto-delete queues in amqp.'),
]
cfg.CONF.register_opts(amqp_opts)
UNIQUE_ID = '_unique_id'
LOG = logging.getLogger(__name__)
class Pool(pools.Pool):
"""Class that implements a Pool of Connections."""
def __init__(self, conf, connection_cls, *args, **kwargs):
self.connection_cls = connection_cls
self.conf = conf
kwargs.setdefault("max_size", self.conf.rpc_conn_pool_size)
kwargs.setdefault("order_as_stack", True)
super(Pool, self).__init__(*args, **kwargs)
self.reply_proxy = None
# TODO(comstud): Timeout connections not used in a while
def create(self):
LOG.debug(_('Pool creating new connection'))
return self.connection_cls(self.conf)
def empty(self):
while self.free_items:
self.get().close()
# Force a new connection pool to be created.
# Note that this was added due to failing unit test cases. The issue
# is the above "while loop" gets all the cached connections from the
# pool and closes them, but never returns them to the pool, a pool
# leak. The unit tests hang waiting for an item to be returned to the
# pool. The unit tests get here via the tearDown() method. In the run
# time code, it gets here via cleanup() and only appears in service.py
# just before doing a sys.exit(), so cleanup() only happens once and
# the leakage is not a problem.
self.connection_cls.pool = None
_pool_create_sem = semaphore.Semaphore()
def get_connection_pool(conf, connection_cls):
with _pool_create_sem:
# Make sure only one thread tries to create the connection pool.
if not connection_cls.pool:
connection_cls.pool = Pool(conf, connection_cls)
return connection_cls.pool
class ConnectionContext(rpc_common.Connection):
"""The class that is actually returned to the create_connection() caller.
This is essentially a wrapper around Connection that supports 'with'.
It can also return a new Connection, or one from a pool.
The function will also catch when an instance of this class is to be
deleted. With that we can return Connections to the pool on exceptions
and so forth without making the caller be responsible for catching them.
If possible the function makes sure to return a connection to the pool.
"""
def __init__(self, conf, connection_pool, pooled=True, server_params=None):
"""Create a new connection, or get one from the pool."""
self.connection = None
self.conf = conf
self.connection_pool = connection_pool
if pooled:
self.connection = connection_pool.get()
else:
self.connection = connection_pool.connection_cls(
conf,
server_params=server_params)
self.pooled = pooled
def __enter__(self):
"""When with ConnectionContext() is used, return self."""
return self
def _done(self):
"""If the connection came from a pool, clean it up and put it back.
If it did not come from a pool, close it.
"""
if self.connection:
if self.pooled:
# Reset the connection so it's ready for the next caller
# to grab from the pool
self.connection.reset()
self.connection_pool.put(self.connection)
else:
try:
self.connection.close()
except Exception:
pass
self.connection = None
def __exit__(self, exc_type, exc_value, tb):
"""End of 'with' statement. We're done here."""
self._done()
def __del__(self):
"""Caller is done with this connection. Make sure we cleaned up."""
self._done()
def close(self):
"""Caller is done with this connection."""
self._done()
def create_consumer(self, topic, proxy, fanout=False):
self.connection.create_consumer(topic, proxy, fanout)
def create_worker(self, topic, proxy, pool_name):
self.connection.create_worker(topic, proxy, pool_name)
def join_consumer_pool(self, callback, pool_name, topic, exchange_name):
self.connection.join_consumer_pool(callback,
pool_name,
topic,
exchange_name)
def consume_in_thread(self):
self.connection.consume_in_thread()
def __getattr__(self, key):
"""Proxy all other calls to the Connection instance."""
if self.connection:
return getattr(self.connection, key)
else:
raise rpc_common.InvalidRPCConnectionReuse()
class ReplyProxy(ConnectionContext):
"""Connection class for RPC replies / callbacks."""
def __init__(self, conf, connection_pool):
self._call_waiters = {}
self._num_call_waiters = 0
self._num_call_waiters_wrn_threshhold = 10
self._reply_q = 'reply_' + uuid.uuid4().hex
super(ReplyProxy, self).__init__(conf, connection_pool, pooled=False)
self.declare_direct_consumer(self._reply_q, self._process_data)
self.consume_in_thread()
def _process_data(self, message_data):
msg_id = message_data.pop('_msg_id', None)
waiter = self._call_waiters.get(msg_id)
if not waiter:
LOG.warn(_('No calling threads waiting for msg_id : %(msg_id)s'
', message : %(data)s'), {'msg_id': msg_id,
'data': message_data})
LOG.warn(_('_call_waiters: %s') % str(self._call_waiters))
else:
waiter.put(message_data)
def add_call_waiter(self, waiter, msg_id):
self._num_call_waiters += 1
if self._num_call_waiters > self._num_call_waiters_wrn_threshhold:
LOG.warn(_('Number of call waiters is greater than warning '
'threshhold: %d. There could be a MulticallProxyWaiter '
'leak.') % self._num_call_waiters_wrn_threshhold)
self._num_call_waiters_wrn_threshhold *= 2
self._call_waiters[msg_id] = waiter
def del_call_waiter(self, msg_id):
self._num_call_waiters -= 1
del self._call_waiters[msg_id]
def get_reply_q(self):
return self._reply_q
def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None,
failure=None, ending=False, log_failure=True):
"""Sends a reply or an error on the channel signified by msg_id.
Failure should be a sys.exc_info() tuple.
"""
with ConnectionContext(conf, connection_pool) as conn:
if failure:
failure = rpc_common.serialize_remote_exception(failure,
log_failure)
try:
msg = {'result': reply, 'failure': failure}
except TypeError:
msg = {'result': dict((k, repr(v))
for k, v in reply.__dict__.iteritems()),
'failure': failure}
if ending:
msg['ending'] = True
_add_unique_id(msg)
# If a reply_q exists, add the msg_id to the reply and pass the
# reply_q to direct_send() to use it as the response queue.
# Otherwise use the msg_id for backward compatibilty.
if reply_q:
msg['_msg_id'] = msg_id
conn.direct_send(reply_q, rpc_common.serialize_msg(msg))
else:
conn.direct_send(msg_id, rpc_common.serialize_msg(msg))
class RpcContext(rpc_common.CommonRpcContext):
"""Context that supports replying to a rpc.call."""
def __init__(self, **kwargs):
self.msg_id = kwargs.pop('msg_id', None)
self.reply_q = kwargs.pop('reply_q', None)
self.conf = kwargs.pop('conf')
super(RpcContext, self).__init__(**kwargs)
def deepcopy(self):
values = self.to_dict()
values['conf'] = self.conf
values['msg_id'] = self.msg_id
values['reply_q'] = self.reply_q
return self.__class__(**values)
def reply(self, reply=None, failure=None, ending=False,
connection_pool=None, log_failure=True):
if self.msg_id:
msg_reply(self.conf, self.msg_id, self.reply_q, connection_pool,
reply, failure, ending, log_failure)
if ending:
self.msg_id = None
def unpack_context(conf, msg):
"""Unpack context from msg."""
context_dict = {}
for key in list(msg.keys()):
# NOTE(vish): Some versions of python don't like unicode keys
# in kwargs.
key = str(key)
if key.startswith('_context_'):
value = msg.pop(key)
context_dict[key[9:]] = value
context_dict['msg_id'] = msg.pop('_msg_id', None)
context_dict['reply_q'] = msg.pop('_reply_q', None)
context_dict['conf'] = conf
ctx = RpcContext.from_dict(context_dict)
rpc_common._safe_log(LOG.debug, _('unpacked context: %s'), ctx.to_dict())
return ctx
def pack_context(msg, context):
"""Pack context into msg.
Values for message keys need to be less than 255 chars, so we pull
context out into a bunch of separate keys. If we want to support
more arguments in rabbit messages, we may want to do the same
for args at some point.
"""
context_d = dict([('_context_%s' % key, value)
for (key, value) in context.to_dict().iteritems()])
msg.update(context_d)
class _MsgIdCache(object):
"""This class checks any duplicate messages."""
# NOTE: This value is considered can be a configuration item, but
# it is not necessary to change its value in most cases,
# so let this value as static for now.
DUP_MSG_CHECK_SIZE = 16
def __init__(self, **kwargs):<|fim▁hole|>
def check_duplicate_message(self, message_data):
"""AMQP consumers may read same message twice when exceptions occur
before ack is returned. This method prevents doing it.
"""
if UNIQUE_ID in message_data:
msg_id = message_data[UNIQUE_ID]
if msg_id not in self.prev_msgids:
self.prev_msgids.append(msg_id)
else:
raise rpc_common.DuplicateMessageError(msg_id=msg_id)
def _add_unique_id(msg):
"""Add unique_id for checking duplicate messages."""
unique_id = uuid.uuid4().hex
msg.update({UNIQUE_ID: unique_id})
LOG.debug(_('UNIQUE_ID is %s.') % (unique_id))
class _ThreadPoolWithWait(object):
"""Base class for a delayed invocation manager.
Used by the Connection class to start up green threads
to handle incoming messages.
"""
def __init__(self, conf, connection_pool):
self.pool = greenpool.GreenPool(conf.rpc_thread_pool_size)
self.connection_pool = connection_pool
self.conf = conf
def wait(self):
"""Wait for all callback threads to exit."""
self.pool.waitall()
class CallbackWrapper(_ThreadPoolWithWait):
"""Wraps a straight callback.
Allows it to be invoked in a green thread.
"""
def __init__(self, conf, callback, connection_pool):
"""Initiates CallbackWrapper object.
:param conf: cfg.CONF instance
:param callback: a callable (probably a function)
:param connection_pool: connection pool as returned by
get_connection_pool()
"""
super(CallbackWrapper, self).__init__(
conf=conf,
connection_pool=connection_pool,
)
self.callback = callback
def __call__(self, message_data):
self.pool.spawn_n(self.callback, message_data)
class ProxyCallback(_ThreadPoolWithWait):
"""Calls methods on a proxy object based on method and args."""
def __init__(self, conf, proxy, connection_pool):
super(ProxyCallback, self).__init__(
conf=conf,
connection_pool=connection_pool,
)
self.proxy = proxy
self.msg_id_cache = _MsgIdCache()
def __call__(self, message_data):
"""Consumer callback to call a method on a proxy object.
Parses the message for validity and fires off a thread to call the
proxy object method.
Message data should be a dictionary with two keys:
method: string representing the method to call
args: dictionary of arg: value
Example: {'method': 'echo', 'args': {'value': 42}}
"""
# It is important to clear the context here, because at this point
# the previous context is stored in local.store.context
if hasattr(local.store, 'context'):
del local.store.context
rpc_common._safe_log(LOG.debug, _('received %s'), message_data)
self.msg_id_cache.check_duplicate_message(message_data)
ctxt = unpack_context(self.conf, message_data)
method = message_data.get('method')
args = message_data.get('args', {})
version = message_data.get('version')
namespace = message_data.get('namespace')
if not method:
LOG.warn(_('no method for message: %s') % message_data)
ctxt.reply(_('No method for message: %s') % message_data,
connection_pool=self.connection_pool)
return
self.pool.spawn_n(self._process_data, ctxt, version, method,
namespace, args)
def _process_data(self, ctxt, version, method, namespace, args):
"""Process a message in a new thread.
If the proxy object we have has a dispatch method
(see rpc.dispatcher.RpcDispatcher), pass it the version,
method, and args and let it dispatch as appropriate. If not, use
the old behavior of magically calling the specified method on the
proxy we have here.
"""
ctxt.update_store()
try:
rval = self.proxy.dispatch(ctxt, version, method, namespace,
**args)
# Check if the result was a generator
if inspect.isgenerator(rval):
for x in rval:
ctxt.reply(x, None, connection_pool=self.connection_pool)
else:
ctxt.reply(rval, None, connection_pool=self.connection_pool)
# This final None tells multicall that it is done.
ctxt.reply(ending=True, connection_pool=self.connection_pool)
except rpc_common.ClientException as e:
LOG.debug(_('Expected exception during message handling (%s)') %
e._exc_info[1])
ctxt.reply(None, e._exc_info,
connection_pool=self.connection_pool,
log_failure=False)
except Exception:
# sys.exc_info() is deleted by LOG.exception().
exc_info = sys.exc_info()
LOG.error(_('Exception during message handling'),
exc_info=exc_info)
ctxt.reply(None, exc_info, connection_pool=self.connection_pool)
class MulticallProxyWaiter(object):
def __init__(self, conf, msg_id, timeout, connection_pool):
self._msg_id = msg_id
self._timeout = timeout or conf.rpc_response_timeout
self._reply_proxy = connection_pool.reply_proxy
self._done = False
self._got_ending = False
self._conf = conf
self._dataqueue = queue.LightQueue()
# Add this caller to the reply proxy's call_waiters
self._reply_proxy.add_call_waiter(self, self._msg_id)
self.msg_id_cache = _MsgIdCache()
def put(self, data):
self._dataqueue.put(data)
def done(self):
if self._done:
return
self._done = True
# Remove this caller from reply proxy's call_waiters
self._reply_proxy.del_call_waiter(self._msg_id)
def _process_data(self, data):
result = None
self.msg_id_cache.check_duplicate_message(data)
if data['failure']:
failure = data['failure']
result = rpc_common.deserialize_remote_exception(self._conf,
failure)
elif data.get('ending', False):
self._got_ending = True
else:
result = data['result']
return result
def __iter__(self):
"""Return a result until we get a reply with an 'ending' flag."""
if self._done:
raise StopIteration
while True:
try:
data = self._dataqueue.get(timeout=self._timeout)
result = self._process_data(data)
except queue.Empty:
self.done()
raise rpc_common.Timeout()
except Exception:
with excutils.save_and_reraise_exception():
self.done()
if self._got_ending:
self.done()
raise StopIteration
if isinstance(result, Exception):
self.done()
raise result
yield result
def create_connection(conf, new, connection_pool):
"""Create a connection."""
return ConnectionContext(conf, connection_pool, pooled=not new)
_reply_proxy_create_sem = semaphore.Semaphore()
def multicall(conf, context, topic, msg, timeout, connection_pool):
"""Make a call that returns multiple times."""
LOG.debug(_('Making synchronous call on %s ...'), topic)
msg_id = uuid.uuid4().hex
msg.update({'_msg_id': msg_id})
LOG.debug(_('MSG_ID is %s') % (msg_id))
_add_unique_id(msg)
pack_context(msg, context)
with _reply_proxy_create_sem:
if not connection_pool.reply_proxy:
connection_pool.reply_proxy = ReplyProxy(conf, connection_pool)
msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()})
wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool)
with ConnectionContext(conf, connection_pool) as conn:
conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout)
return wait_msg
def call(conf, context, topic, msg, timeout, connection_pool):
"""Sends a message on a topic and wait for a response."""
rv = multicall(conf, context, topic, msg, timeout, connection_pool)
# NOTE(vish): return the last result from the multicall
rv = list(rv)
if not rv:
return
return rv[-1]
def cast(conf, context, topic, msg, connection_pool):
"""Sends a message on a topic without waiting for a response."""
LOG.debug(_('Making asynchronous cast on %s...'), topic)
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool) as conn:
conn.topic_send(topic, rpc_common.serialize_msg(msg))
def fanout_cast(conf, context, topic, msg, connection_pool):
"""Sends a message on a fanout exchange without waiting for a response."""
LOG.debug(_('Making asynchronous fanout cast...'))
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool) as conn:
conn.fanout_send(topic, rpc_common.serialize_msg(msg))
def cast_to_server(conf, context, server_params, topic, msg, connection_pool):
"""Sends a message on a topic to a specific server."""
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool, pooled=False,
server_params=server_params) as conn:
conn.topic_send(topic, rpc_common.serialize_msg(msg))
def fanout_cast_to_server(conf, context, server_params, topic, msg,
connection_pool):
"""Sends a message on a fanout exchange to a specific server."""
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool, pooled=False,
server_params=server_params) as conn:
conn.fanout_send(topic, rpc_common.serialize_msg(msg))
def notify(conf, context, topic, msg, connection_pool, envelope):
"""Sends a notification event on a topic."""
LOG.debug(_('Sending %(event_type)s on %(topic)s'),
dict(event_type=msg.get('event_type'),
topic=topic))
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool) as conn:
if envelope:
msg = rpc_common.serialize_msg(msg)
conn.notify_send(topic, msg)
def cleanup(connection_pool):
if connection_pool:
connection_pool.empty()
def get_control_exchange(conf):
return conf.control_exchange<|fim▁end|> | self.prev_msgids = collections.deque([],
maxlen=self.DUP_MSG_CHECK_SIZE) |
<|file_name|>qmljstoolsplugin.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2016 The Qt Company Ltd.
** Contact: https://www.qt.io/licensing/
**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see https://www.qt.io/terms-conditions. For further
** information use the contact form at https://www.qt.io/contact-us.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3 as published by the Free Software
** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
** included in the packaging of this file. Please review the following
** information to ensure the GNU General Public License requirements will
** be met: https://www.gnu.org/licenses/gpl-3.0.html.
**
****************************************************************************/
#include "qmljstoolsplugin.h"
#include "qmljsmodelmanager.h"
#include "qmljsfunctionfilter.h"
#include "qmljslocatordata.h"
#include "qmljscodestylesettingspage.h"
#include "qmljstoolsconstants.h"
#include "qmljstoolssettings.h"
#include "qmljsbundleprovider.h"
#include <coreplugin/icontext.h>
#include <coreplugin/icore.h>
#include <coreplugin/coreconstants.h>
#include <coreplugin/actionmanager/actionmanager.h>
#include <coreplugin/actionmanager/actioncontainer.h>
#include <coreplugin/progressmanager/progressmanager.h>
#include <QMenu>
using namespace Core;
namespace QmlJSTools {
namespace Internal {
enum { debug = 0 };
class QmlJSToolsPluginPrivate : public QObject
{
public:
QmlJSToolsPluginPrivate();
QmlJSToolsSettings settings;
ModelManager modelManager;
QAction resetCodeModelAction{QmlJSToolsPlugin::tr("Reset Code Model"), nullptr};
LocatorData locatorData;
FunctionFilter functionFilter{&locatorData};
QmlJSCodeStyleSettingsPage codeStyleSettingsPage;
BasicBundleProvider basicBundleProvider;
};
QmlJSToolsPlugin::~QmlJSToolsPlugin()
{
delete d;
}
bool QmlJSToolsPlugin::initialize(const QStringList &arguments, QString *error)
{
Q_UNUSED(arguments)
Q_UNUSED(error)
d = new QmlJSToolsPluginPrivate;
return true;
}
QmlJSToolsPluginPrivate::QmlJSToolsPluginPrivate()
{
// Core::VcsManager *vcsManager = Core::VcsManager::instance();
// Core::DocumentManager *documentManager = Core::DocumentManager::instance();
// connect(vcsManager, &Core::VcsManager::repositoryChanged,
// &d->modelManager, &ModelManager::updateModifiedSourceFiles);
// connect(documentManager, &DocumentManager::filesChangedInternally,
// &d->modelManager, &ModelManager::updateSourceFiles);
// Menus
ActionContainer *mtools = ActionManager::actionContainer(Core::Constants::M_TOOLS);
ActionContainer *mqmljstools = ActionManager::createMenu(Constants::M_TOOLS_QMLJS);
QMenu *menu = mqmljstools->menu();
menu->setTitle(QmlJSToolsPlugin::tr("&QML/JS"));
menu->setEnabled(true);
mtools->addMenu(mqmljstools);
// Update context in global context
Command *cmd = ActionManager::registerAction(<|fim▁hole|> connect(&resetCodeModelAction, &QAction::triggered,
&modelManager, &ModelManager::resetCodeModel);
mqmljstools->addAction(cmd);
// Watch task progress
connect(ProgressManager::instance(), &ProgressManager::taskStarted, this,
[this](Core::Id type) {
if (type == QmlJS::Constants::TASK_INDEX)
resetCodeModelAction.setEnabled(false);
});
connect(ProgressManager::instance(), &ProgressManager::allTasksFinished,
[this](Core::Id type) {
if (type == QmlJS::Constants::TASK_INDEX)
resetCodeModelAction.setEnabled(true);
});
}
void QmlJSToolsPlugin::extensionsInitialized()
{
d->modelManager.delayedInitialization();
}
} // Internal
} // QmlJSTools<|fim▁end|> | &resetCodeModelAction, Constants::RESET_CODEMODEL); |
<|file_name|>migration_cli.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of EUDAT B2Share.
# Copyright (C) 2017 CERN, SurfsSara
#
# B2Share is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# B2Share is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with B2Share; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""B2Share migration command line interface.
These commands were designed only for the migration of data from
the instance hosted by CSC on https://b2share.eudat.eu .
WARNING - Operating these commands on local instances may severely impact data
integrity and/or lead to dysfunctional behaviour."""
import logging
import os
import requests
import traceback
from urllib.parse import urlunsplit, urljoin, urlsplit
import click
from flask_cli import with_appcontext
from flask import current_app
from invenio_db import db
from invenio_indexer.api import RecordIndexer
from invenio_records.api import Record
from .migration import (download_v1_data, process_v1_record, main_diff,
make_v2_index, records_endpoint, directly_list_v2_record_ids)
@click.group()
def migrate():
"""Migration commands. WARNING csc only."""
@migrate.command()
@with_appcontext
@click.option('-v', '--verbose', count=True)
@click.option('-d', '--download', is_flag=True, default=False)
@click.option('-l', '--limit', default=None)
@click.argument('token')
@click.argument('download_directory')
def import_v1_data(verbose, download, token, download_directory,limit):
click.secho("Importing data to the current instance")
logger = logging.getLogger("sqlalchemy.engine")
logger.setLevel(logging.ERROR)
logfile = open(current_app.config.get('MIGRATION_LOGFILE'), 'a')
logfile.write("\n\n\n~~~ Starting import task download={} limit={}"
.format(download, limit))
if os.path.isdir(download_directory):
os.chdir(download_directory)
else:
raise click.ClickException("%s does not exist or is not a directory. If you want to import "
"records specify an empty, existing directory."
% download_directory)
if limit and not download:
raise click.ClickException("Limit can only be set with download")
if download:
filelist = os.listdir('.')
if len(filelist) > 0:
click.secho("!!! Downloading data into existing directory, "
"overwriting previous data", fg='red')
click.secho("----------")
click.secho("Downloading data into directory %s" % download_directory)
if limit is not None:
limit = int(limit)
click.secho("Limiting to %d records for debug purposes" % limit)
download_v1_data(token, download_directory, logfile, limit)
indexer = RecordIndexer(record_to_index=lambda record: ('records', 'record'))
dirlist = os.listdir('.')
click.secho("-----------")
click.secho("Processing %d downloaded records" % (len(dirlist)))
base_url = urlunsplit((
current_app.config.get('PREFERRED_URL_SCHEME', 'http'),
# current_app.config['SERVER_NAME'],
current_app.config['JSONSCHEMAS_HOST'],
current_app.config.get('APPLICATION_ROOT') or '', '', ''
))
for d in dirlist:
try:
process_v1_record(d, indexer, base_url, logfile)
except:
logfile.write("\n********************")
logfile.write("\nERROR: exception while processing record /{}/___record.json___\n"
.format(d))
logfile.write(traceback.format_exc())
logfile.write("\n********************")
logfile.close()
@migrate.command()
@with_appcontext
@click.option('-u', '--update', is_flag=True, default=False)
@click.argument('base_url')
def check_pids(update, base_url):
""" Checks and optionally fixes ePIC PIDs from records in the `base_url`.
The ePIC PIDs in the first 1000 records of the `base_url` B2SHARE site
are checked. The PIDs are extracted from the main ePIC_PID field and
the alternative_identifiers fields (based on the type being equal to
'ePIC_PID'). Only the PIDs starting with the configured ePIC prefix are
considered. If the PID does not point to the record it's contained in,
then an error message is generated. When the `-u` argument is used, the
current configuration variables are used to update the PID with the
correct target URL.
"""
epic_base_url = str(current_app.config.get('CFG_EPIC_BASEURL'))
epic_username = str(current_app.config.get('CFG_EPIC_USERNAME'))
epic_password = str(current_app.config.get('CFG_EPIC_PASSWORD'))
epic_prefix = str(current_app.config.get('CFG_EPIC_PREFIX'))
click.secho('Checking epic pids for all records')
record_search = requests.get(urljoin(base_url, "api/records"),
{'size': 1000, 'page': 1},
verify=False)
records = record_search.json()['hits']['hits']
for rec in records:
recid = str(rec['id'])
click.secho('\n--- Checking epic pids for record {}'.format(recid))
rec_url = rec['links']['self'].replace("/api/records/", "/records/")
metadata = rec['metadata']
epic_list = [aid['alternate_identifier']
for aid in metadata.get('alternate_identifiers', [])
if aid['alternate_identifier_type'] == 'ePIC_PID']
if metadata.get('ePIC_PID'):
epic_list.append(metadata.get('ePIC_PID'))
for epic_url in epic_list:
pid = urlsplit(epic_url).path.strip('/')
if not pid.startswith(epic_prefix):
continue # is not one of our pids
click.secho(' {}'.format(pid))
target_request = requests.get(epic_url, allow_redirects=False)
if target_request.status_code < 300 or target_request.status_code >= 400:
click.secho('Record {}: error retrieving epic pid information: {}'
.format(recid, epic_url),
fg='yellow', bold=True)
continue
target_url = target_request.headers.get('Location')
if is_same_url(target_url, rec_url):
continue
click.secho('Record {}: error: bad epic pid: {}'.format(recid, epic_url),
fg='red', bold=True)
if update:
change_req = requests.put(urljoin(epic_base_url, pid),
json=[{'type': 'URL', 'parsed_data': rec_url}],
auth=(epic_username, epic_password),
headers={'Content-Type': 'application/json',
'Accept': 'application/json'})
if change_req.status_code >= 300:
click.secho('Record {}: error setting epic pid target url: {}, error code {}'
.format(recid, epic_url, change_req.status_code),
fg='red', bold=True)
else:
click.secho('Record {}: fixed epic pid target url: {}'
.format(recid, epic_url),
fg='green', bold=True)
def is_same_url(url1, url2):
u1 = urlsplit(url1)
u2 = urlsplit(url2)
return u1.scheme == u2.scheme and u1.netloc == u2.netloc and \
u1.path == u2.path and u1.query == u2.query
@migrate.command()
@with_appcontext
def diff_sites():
main_diff()
@migrate.command()
@with_appcontext
def swap_pids():
""" Fix the invalid creation of new ePIC_PIDs for migrated files. Swaps
with the old b2share v1 PID that we stored in alternate_identifiers and
puts the wrongly created ePIC_PID in alternate_identifiers. Note this
creates a new version of the invenio record (at the time of writing we do
not show the latest version of invenio record objects)
"""
for search_record in directly_list_v2_record_ids():
recid = search_record.get('_id')
inv_record = Record.get_record(recid)
if inv_record.revision_id >= 1:
print ("Skipping record {}: too many revisions ({}), "
"may have been already updated".format(
recid, inv_record.revision_id))
continue
aids = None
if 'alternate_identifiers' in inv_record.keys():
aids = inv_record['alternate_identifiers']
found = False
found_v1_id = False
for aid in aids:
if aid['alternate_identifier_type']=='B2SHARE_V1_ID':
found_v1_id = True
if aid['alternate_identifier_type']=='ePIC_PID':
new_pid = aid['alternate_identifier']
_pid = inv_record['_pid']
for pid in _pid:
if pid['type']=='ePIC_PID':
old_pid = pid['value']
found = True
break
break
found = found and found_v1_id
if not found:
error_msg = """***** INFO - this record does not have ePIC_PID
in _pid or alternate_identifiers or does not have a
B2SHARE_V1_ID in alternate_identifiers"""
print(error_msg)
print(inv_record['titles'])
print(recid)
print("********")
else:
print("SWAPPING %s %s" % (old_pid, new_pid))
for pid in inv_record['_pid']:
if pid['type']=='ePIC_PID':
pid['value']=new_pid
break
for aid in inv_record['alternate_identifiers']:
if aid['alternate_identifier_type']=='ePIC_PID':
aid['alternate_identifier']=old_pid
break
inv_record.commit()
db.session.commit()
@migrate.command()
@with_appcontext
@click.argument('v1_api_url')
@click.argument('v1_access_token')
@click.argument('v2_api_url')
@click.argument('v2_access_token')
def extract_alternate_identifiers(v1_api_url, v1_access_token, v2_api_url, v2_access_token):
"""Extracting alternate identifiers from v1 records"""
v2_index = make_v2_index(v2_api_url, v2_access_token)
click.secho('Extracting alternate identifiers from v1 records')
params = {'access_token': v1_access_token, 'page_size': 100}
for page in range(0, 7):
params['page_offset'] = page
req = requests.get(records_endpoint(v1_api_url), params=params, verify=False)
req.raise_for_status()
recs = req.json().get('records')
for record in recs:
recid = str(record.get('record_id'))
alternate_identifier = str(record.get('alternate_identifier'))
if not alternate_identifier:
continue
click.secho("alternate_identifier: {}".format(alternate_identifier))
click.secho(" domain: {}".format(record.get('domain')))
click.secho(" old record ID: {}".format(recid))
v2 = v2_index.get(recid)
if v2:
click.secho(" new record ID: {}".format(v2.get('id')))
click.secho(" new record URL: {}".format(v2.get('links', {}).get('self')))
click.secho(" new record PID: {}".format(v2.get('metadata', {}).get('ePIC_PID')))
@migrate.command()
@with_appcontext
@click.argument('v1_api_url')
@click.argument('v1_access_token')
# @click.argument('v2_api_url')
def add_missing_alternate_identifiers(v1_api_url, v1_access_token):
"""Add missing alternate identifiers from v1 records to the published v2
records in the current instance"""
v2_index = make_v2_index(None, None) # make index of current site
# v2_index = make_v2_index(v2_api_url, None)
click.secho('Adding missing alternate identifiers from v1 records')
params = {'access_token': v1_access_token, 'page_size': 100}
for page in range(0, 7):
params['page_offset'] = page
req = requests.get(records_endpoint(v1_api_url), params=params, verify=False)
req.raise_for_status()
for v1_record in req.json().get('records'):
v1_recid = str(v1_record.get('record_id'))
alternate_identifier = str(v1_record.get('alternate_identifier', '')).strip()
if not alternate_identifier:
continue
ai_type = guess_alternate_identifier_type(alternate_identifier)
click.secho("alternate_identifier: {}"
"\n\told id: {}\n\taltid type: {}".format(
alternate_identifier, v1_recid, ai_type))
if not v2_index.get(v1_recid):
click.secho("\tcannot find recid {}".format(v1_recid), fg='red')
continue
record_search = v2_index.get(v1_recid)
v2_recid = record_search.get('id') or record_search.get('_id')<|fim▁hole|> record = Record.get_record(v2_recid)
# record = v2_index.get(v1_recid).get('metadata')
exists = [ai for ai in record.get('alternate_identifiers', [])
if ai.get('alternate_identifier') == alternate_identifier]
if exists:
click.secho("\talready present in record: {}".format(v2_recid))
else:
ais = record.get('alternate_identifiers', [])
new_ai = {'alternate_identifier': alternate_identifier,
'alternate_identifier_type': ai_type}
ais.insert(0, new_ai)
record['alternate_identifiers'] = ais
record.commit()
click.secho("\tupdated new record: {}".format(v2_recid))
db.session.commit()
def guess_alternate_identifier_type(aid):
for x in ['http://dx.doi.org/', 'http://doi.org/', 'doi.org', 'dx.doi.org', 'doi.', '10.']:
if aid.startswith(x):
return 'DOI'
if aid.startswith('URN:'):
return 'URN'
if aid.startswith('http://') or aid.startswith('https://'):
return 'URL'
return 'Other'<|fim▁end|> | |
<|file_name|>second_urls_for_apphook_tests.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.conf.urls.defaults import handler500, handler404, patterns, include, \
url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^jsi18n/(?P<packages>\S+?)/$', 'django.views.i18n.javascript_catalog'),
url(r'^media/cms/(?P<path>.*)$', 'django.views.static.serve',<|fim▁hole|>)<|fim▁end|> | {'document_root': settings.CMS_MEDIA_ROOT, 'show_indexes': True}),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'^', include('cms.test_utils.project.second_cms_urls_for_apphook_tests')), |
<|file_name|>qdocumentconstructor.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation ([email protected])
**
** This file is part of the QtXmlPatterns module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial Usage
** Licensees holding valid Qt Commercial licenses may use this file in
** accordance with the Qt Commercial License Agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Nokia.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
** If you have questions regarding the use of this file, please contact
** Nokia at [email protected].
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qcommonsequencetypes_p.h"
#include "qdocumentcontentvalidator_p.h"
#include "qnodebuilder_p.h"
#include "qdocumentconstructor_p.h"
QT_BEGIN_NAMESPACE
using namespace QPatternist;
DocumentConstructor::DocumentConstructor(const Expression::Ptr &op) : SingleContainer(op)
{
}
Item DocumentConstructor::evaluateSingleton(const DynamicContext::Ptr &context) const
{
NodeBuilder::Ptr nodeBuilder(context->nodeBuilder(m_staticBaseURI));
DocumentContentValidator validator(nodeBuilder.data(), context, ConstPtr(this));
const DynamicContext::Ptr receiverContext(context->createReceiverContext(&validator));
validator.startDocument();
m_operand->evaluateToSequenceReceiver(receiverContext);
validator.endDocument();
const QAbstractXmlNodeModel::Ptr nm(nodeBuilder->builtDocument());
context->addNodeModel(nm);
return nm->root(QXmlNodeModelIndex());
}
void DocumentConstructor::evaluateToSequenceReceiver(const DynamicContext::Ptr &context) const
{<|fim▁hole|>
const DynamicContext::Ptr receiverContext(context->createReceiverContext(&validator));
validator.startDocument();
m_operand->evaluateToSequenceReceiver(receiverContext);
validator.endDocument();
}
Expression::Ptr DocumentConstructor::typeCheck(const StaticContext::Ptr &context,
const SequenceType::Ptr &reqType)
{
m_staticBaseURI = context->baseURI();
return SingleContainer::typeCheck(context, reqType);
}
SequenceType::Ptr DocumentConstructor::staticType() const
{
return CommonSequenceTypes::ExactlyOneDocumentNode;
}
SequenceType::List DocumentConstructor::expectedOperandTypes() const
{
SequenceType::List result;
result.append(CommonSequenceTypes::ZeroOrMoreItems);
return result;
}
Expression::Properties DocumentConstructor::properties() const
{
return DisableElimination | IsNodeConstructor;
}
ExpressionVisitorResult::Ptr
DocumentConstructor::accept(const ExpressionVisitor::Ptr &visitor) const
{
return visitor->visit(this);
}
QT_END_NAMESPACE<|fim▁end|> | QAbstractXmlReceiver *const receiver = context->outputReceiver();
DocumentContentValidator validator(receiver, context, ConstPtr(this)); |
<|file_name|>weather.py<|end_file_name|><|fim▁begin|>"""Platform for retrieving meteorological data from Environment Canada."""
import datetime
import re
from env_canada import ECData # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.weather import (
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
PLATFORM_SCHEMA,
WeatherEntity,
)
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, TEMP_CELSIUS
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt
CONF_FORECAST = "forecast"
CONF_ATTRIBUTION = "Data provided by Environment Canada"
CONF_STATION = "station"
def validate_station(station):
"""Check that the station ID is well-formed."""
if station is None:
return
if not re.fullmatch(r"[A-Z]{2}/s0000\d{3}", station):
raise vol.error.Invalid('Station ID must be of the form "XX/s0000###"')
return station
<|fim▁hole|>PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_STATION): validate_station,
vol.Inclusive(CONF_LATITUDE, "latlon"): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "latlon"): cv.longitude,
vol.Optional(CONF_FORECAST, default="daily"): vol.In(["daily", "hourly"]),
}
)
# Icon codes from http://dd.weatheroffice.ec.gc.ca/citypage_weather/
# docs/current_conditions_icon_code_descriptions_e.csv
ICON_CONDITION_MAP = {
"sunny": [0, 1],
"clear-night": [30, 31],
"partlycloudy": [2, 3, 4, 5, 22, 32, 33, 34, 35],
"cloudy": [10],
"rainy": [6, 9, 11, 12, 28, 36],
"lightning-rainy": [19, 39, 46, 47],
"pouring": [13],
"snowy-rainy": [7, 14, 15, 27, 37],
"snowy": [8, 16, 17, 18, 25, 26, 38, 40],
"windy": [43],
"fog": [20, 21, 23, 24, 44],
"hail": [26, 27],
}
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Environment Canada weather."""
if config.get(CONF_STATION):
ec_data = ECData(station_id=config[CONF_STATION])
else:
lat = config.get(CONF_LATITUDE, hass.config.latitude)
lon = config.get(CONF_LONGITUDE, hass.config.longitude)
ec_data = ECData(coordinates=(lat, lon))
add_devices([ECWeather(ec_data, config)])
class ECWeather(WeatherEntity):
"""Representation of a weather condition."""
def __init__(self, ec_data, config):
"""Initialize Environment Canada weather."""
self.ec_data = ec_data
self.platform_name = config.get(CONF_NAME)
self.forecast_type = config[CONF_FORECAST]
@property
def attribution(self):
"""Return the attribution."""
return CONF_ATTRIBUTION
@property
def name(self):
"""Return the name of the weather entity."""
if self.platform_name:
return self.platform_name
return self.ec_data.metadata.get("location")
@property
def temperature(self):
"""Return the temperature."""
if self.ec_data.conditions.get("temperature", {}).get("value"):
return float(self.ec_data.conditions["temperature"]["value"])
if self.ec_data.hourly_forecasts[0].get("temperature"):
return float(self.ec_data.hourly_forecasts[0]["temperature"])
return None
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def humidity(self):
"""Return the humidity."""
if self.ec_data.conditions.get("humidity", {}).get("value"):
return float(self.ec_data.conditions["humidity"]["value"])
return None
@property
def wind_speed(self):
"""Return the wind speed."""
if self.ec_data.conditions.get("wind_speed", {}).get("value"):
return float(self.ec_data.conditions["wind_speed"]["value"])
return None
@property
def wind_bearing(self):
"""Return the wind bearing."""
if self.ec_data.conditions.get("wind_bearing", {}).get("value"):
return float(self.ec_data.conditions["wind_bearing"]["value"])
return None
@property
def pressure(self):
"""Return the pressure."""
if self.ec_data.conditions.get("pressure", {}).get("value"):
return 10 * float(self.ec_data.conditions["pressure"]["value"])
return None
@property
def visibility(self):
"""Return the visibility."""
if self.ec_data.conditions.get("visibility", {}).get("value"):
return float(self.ec_data.conditions["visibility"]["value"])
return None
@property
def condition(self):
"""Return the weather condition."""
icon_code = None
if self.ec_data.conditions.get("icon_code", {}).get("value"):
icon_code = self.ec_data.conditions["icon_code"]["value"]
elif self.ec_data.hourly_forecasts[0].get("icon_code"):
icon_code = self.ec_data.hourly_forecasts[0]["icon_code"]
if icon_code:
return icon_code_to_condition(int(icon_code))
return ""
@property
def forecast(self):
"""Return the forecast array."""
return get_forecast(self.ec_data, self.forecast_type)
def update(self):
"""Get the latest data from Environment Canada."""
self.ec_data.update()
def get_forecast(ec_data, forecast_type):
"""Build the forecast array."""
forecast_array = []
if forecast_type == "daily":
half_days = ec_data.daily_forecasts
if half_days[0]["temperature_class"] == "high":
forecast_array.append(
{
ATTR_FORECAST_TIME: dt.now().isoformat(),
ATTR_FORECAST_TEMP: int(half_days[0]["temperature"]),
ATTR_FORECAST_TEMP_LOW: int(half_days[1]["temperature"]),
ATTR_FORECAST_CONDITION: icon_code_to_condition(
int(half_days[0]["icon_code"])
),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: int(
half_days[0]["precip_probability"]
),
}
)
half_days = half_days[2:]
else:
half_days = half_days[1:]
for day, high, low in zip(range(1, 6), range(0, 9, 2), range(1, 10, 2)):
forecast_array.append(
{
ATTR_FORECAST_TIME: (
dt.now() + datetime.timedelta(days=day)
).isoformat(),
ATTR_FORECAST_TEMP: int(half_days[high]["temperature"]),
ATTR_FORECAST_TEMP_LOW: int(half_days[low]["temperature"]),
ATTR_FORECAST_CONDITION: icon_code_to_condition(
int(half_days[high]["icon_code"])
),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: int(
half_days[high]["precip_probability"]
),
}
)
elif forecast_type == "hourly":
hours = ec_data.hourly_forecasts
for hour in range(0, 24):
forecast_array.append(
{
ATTR_FORECAST_TIME: dt.as_local(
datetime.datetime.strptime(hours[hour]["period"], "%Y%m%d%H%M")
).isoformat(),
ATTR_FORECAST_TEMP: int(hours[hour]["temperature"]),
ATTR_FORECAST_CONDITION: icon_code_to_condition(
int(hours[hour]["icon_code"])
),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: int(
hours[hour]["precip_probability"]
),
}
)
return forecast_array
def icon_code_to_condition(icon_code):
"""Return the condition corresponding to an icon code."""
for condition, codes in ICON_CONDITION_MAP.items():
if icon_code in codes:
return condition
return None<|fim▁end|> | |
<|file_name|>binary_tree_postorder_traversal_iter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#coding: utf-8
# Definition for a binary tree node
class TreeNode:<|fim▁hole|> self.right = None
class Solution:
# @param root, a tree node
# @return a list of integers
def postorderTraversal(self, root):
pass<|fim▁end|> | def __init__(self, x):
self.val = x
self.left = None |
<|file_name|>SessionShiny.cpp<|end_file_name|><|fim▁begin|>/*
* SessionShiny.cpp
*
* Copyright (C) 2009-12 by RStudio, Inc.
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
#include "SessionShiny.hpp"
#include <boost/algorithm/string/predicate.hpp>
#include <core/Error.hpp>
#include <core/Exec.hpp>
#include <r/RExec.hpp>
#include <session/SessionOptions.hpp>
#include <session/SessionModuleContext.hpp><|fim▁hole|>
namespace session {
namespace modules {
namespace shiny {
namespace {
void onPackageLoaded(const std::string& pkgname)
{
// we need an up to date version of shiny when running in server mode
// to get the websocket protocol/path and port randomizing changes
if (session::options().programMode() == kSessionProgramModeServer)
{
if (pkgname == "shiny")
{
if (!module_context::isPackageVersionInstalled("shiny", "0.8"))
{
module_context::consoleWriteError("\nWARNING: To run Shiny "
"applications with RStudio you need to install the "
"latest version of the Shiny package from CRAN (version 0.8 "
"or higher is required).\n\n");
}
}
}
}
bool isShinyAppDir(const FilePath& filePath)
{
bool hasServer = filePath.childPath("server.R").exists() ||
filePath.childPath("server.r").exists();
if (hasServer)
{
bool hasUI = filePath.childPath("ui.R").exists() ||
filePath.childPath("ui.r").exists() ||
filePath.childPath("www").exists();
return hasUI;
}
else
{
return false;
}
}
std::string onDetectShinySourceType(
boost::shared_ptr<source_database::SourceDocument> pDoc)
{
const char * const kShinyType = "shiny";
if (!pDoc->path().empty())
{
FilePath filePath = module_context::resolveAliasedPath(pDoc->path());
std::string filename = filePath.filename();
if (boost::algorithm::iequals(filename, "ui.r") &&
boost::algorithm::icontains(pDoc->contents(), "shinyUI"))
{
return kShinyType;
}
else if (boost::algorithm::iequals(filename, "server.r") &&
boost::algorithm::icontains(pDoc->contents(), "shinyServer"))
{
return kShinyType;
}
else if (filePath.extensionLowerCase() == ".r" &&
isShinyAppDir(filePath.parent()))
{
return kShinyType;
}
}
return std::string();
}
Error getShinyCapabilities(const json::JsonRpcRequest& request,
json::JsonRpcResponse* pResponse)
{
json::Object capsJson;
capsJson["installed"] = module_context::isPackageInstalled("shiny");
pResponse->setResult(capsJson);
return Success();
}
} // anonymous namespace
Error initialize()
{
using namespace module_context;
events().onPackageLoaded.connect(onPackageLoaded);
// run app features require shiny v0.8 (the version where the
// shiny.launch.browser option can be a function)
if (module_context::isPackageVersionInstalled("shiny", "0.8"))
events().onDetectSourceExtendedType.connect(onDetectShinySourceType);
ExecBlock initBlock;
initBlock.addFunctions()
(boost::bind(registerRpcMethod, "get_shiny_capabilities", getShinyCapabilities));
return initBlock.execute();
}
} // namespace crypto
} // namespace modules
} // namesapce session<|fim▁end|> |
using namespace core; |
<|file_name|>query_test.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2020 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package main
import (
"context"
"os"
"testing"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/assert"
"github.com/uber-go/dosa"
"github.com/uber-go/dosa/mocks"
)
func TestQuery_ServiceDefault(t *testing.T) {
tcs := []struct {
serviceName string
expected string
}{
// service = "" -> default
{
expected: _defServiceName,
},
// service = "foo" -> foo
{
serviceName: "foo",
expected: "foo",
},
}
for _, tc := range tcs {
for _, cmd := range []string{"read", "range"} {
os.Args = []string{
"dosa",
"--service", tc.serviceName,
"query",
cmd,
"--namePrefix", "foo",
"--scope", "bar",
"--path", "../../testentity",
"TestEntity",
"StrKey:eq:foo",
}
main()
assert.Equal(t, tc.expected, options.ServiceName)
}
}
}
func TestQuery_Read_Happy(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
mc := mocks.NewMockConnector(ctrl)
mc.EXPECT().Read(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).
Do(func(_ context.Context, ei *dosa.EntityInfo, keys map[string]dosa.FieldValue, minimumFields []string) {
assert.NotNil(t, ei)
assert.Equal(t, dosa.FieldValue("foo"), keys["strkey"])
assert.Equal(t, []string{"strkey", "int64key"}, minimumFields)
}).Return(map[string]dosa.FieldValue{}, nil).MinTimes(1)
mc.EXPECT().Shutdown().Return(nil)
table, err := dosa.FindEntityByName("../../testentity", "TestEntity")
assert.NoError(t, err)
reg, err := newSimpleRegistrar(scope, namePrefix, table)
assert.NoError(t, err)
provideClient := func(opts GlobalOptions, scope, prefix, path, structName string) (ShellQueryClient, error) {
return newShellQueryClient(reg, mc), nil
}
queryRead := QueryRead{
QueryCmd: &QueryCmd{
QueryOptions: &QueryOptions{
Fields: "StrKey,Int64Key",
},
Scope: scopeFlag("scope"),
NamePrefix: "foo",
Path: "../../testentity",
provideClient: provideClient,
},
}
queryRead.Args.EntityName = "TestEntity"
queryRead.Args.Queries = []string{"StrKey:eq:foo"}
err = queryRead.Execute([]string{})
assert.NoError(t, err)
}
func TestQuery_Range_Happy(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
mc := mocks.NewMockConnector(ctrl)
mc.EXPECT().Range(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).
Do(func(_ context.Context, ei *dosa.EntityInfo, columnConditions map[string][]*dosa.Condition, minimumFields []string, token string, limit int) {
assert.NotNil(t, ei)
assert.Len(t, columnConditions, 1)
assert.Len(t, columnConditions["int64key"], 1)
assert.Equal(t, []string{"strkey", "int64key"}, minimumFields)
}).Return([]map[string]dosa.FieldValue{{"key": "value"}}, "", nil)
mc.EXPECT().Shutdown().Return(nil)
table, err := dosa.FindEntityByName("../../testentity", "TestEntity")
assert.NoError(t, err)
reg, err := newSimpleRegistrar(scope, namePrefix, table)
assert.NoError(t, err)
provideClient := func(opts GlobalOptions, scope, prefix, path, structName string) (ShellQueryClient, error) {
return newShellQueryClient(reg, mc), nil
}
queryRange := QueryRange{
QueryCmd: &QueryCmd{
QueryOptions: &QueryOptions{
Fields: "StrKey,Int64Key",
},
Scope: scopeFlag("scope"),
NamePrefix: "foo",
Path: "../../testentity",
provideClient: provideClient,
},
}
queryRange.Args.EntityName = "TestEntity"
queryRange.Args.Queries = []string{"Int64Key:lt:200"}
err = queryRange.Execute([]string{})
assert.NoError(t, err)
}
func TestQuery_NewQueryObj(t *testing.T) {
qo := newQueryObj("StrKey", "eq", "foo")
assert.NotNil(t, qo)
assert.Equal(t, "StrKey", qo.fieldName)
assert.Equal(t, "eq", qo.op)
assert.Equal(t, "foo", qo.valueStr)
}
func TestQuery_ScopeRequired(t *testing.T) {
for _, cmd := range []string{"read", "range"} {
c := StartCapture()
exit = func(r int) {}
os.Args = []string{
"dosa",
"query",
cmd,
"--namePrefix", "foo",
"--path", "../../testentity",
"TestEntity",
"StrKey:eq:foo",
}
main()
assert.Contains(t, c.stop(true), "-s, --scope' was not specified")
}
}
func TestQuery_PrefixRequired(t *testing.T) {
for _, cmd := range []string{"read", "range"} {
c := StartCapture()
exit = func(r int) {}
os.Args = []string{
"dosa",
"query",
cmd,
"--scope", "foo",
"--path", "../../testentity",
"TestEntity",
"StrKey:eq:foo",
}
main()
assert.Contains(t, c.stop(true), "--namePrefix' was not specified")
}
}
func TestQuery_PathRequired(t *testing.T) {
for _, cmd := range []string{"read", "range"} {
c := StartCapture()
exit = func(r int) {}
os.Args = []string{
"dosa",
"query",
cmd,
"--scope", "foo",<|fim▁hole|> assert.Contains(t, c.stop(true), "--path' was not specified")
}
}
func TestQuery_NoEntityFound(t *testing.T) {
for _, cmd := range []string{"read", "range"} {
c := StartCapture()
exit = func(r int) {}
os.Args = []string{
"dosa",
"query",
cmd,
"--scope", "foo",
"--namePrefix", "foo",
"--path", "../../testentity",
"TestEntity1",
"StrKey:eq:foo",
}
main()
assert.Contains(t, c.stop(true), "no entity named TestEntity1 found")
}
}<|fim▁end|> | "--namePrefix", "foo",
"StrKey:eq:foo",
}
main() |
<|file_name|>test_quantity_ufuncs.py<|end_file_name|><|fim▁begin|># The purpose of these tests are to ensure that calling ufuncs with quantities
# returns quantities with the right units, or raises exceptions.
import warnings
import pytest
import numpy as np
from numpy.testing.utils import assert_allclose
from ... import units as u
from ...tests.helper import raises
from ...extern.six.moves import zip
from ...utils.compat import NUMPY_LT_1_13
class TestUfuncCoverage(object):
"""Test that we cover all ufunc's"""
def test_coverage(self):
all_np_ufuncs = set([ufunc for ufunc in np.core.umath.__dict__.values()
if type(ufunc) == np.ufunc])
from .. import quantity_helper as qh
all_q_ufuncs = (qh.UNSUPPORTED_UFUNCS |
set(qh.UFUNC_HELPERS.keys()))
assert all_np_ufuncs - all_q_ufuncs == set([])
assert all_q_ufuncs - all_np_ufuncs == set([])
class TestQuantityTrigonometricFuncs(object):
"""
Test trigonometric functions
"""
def test_sin_scalar(self):
q = np.sin(30. * u.degree)
assert q.unit == u.dimensionless_unscaled
assert_allclose(q.value, 0.5)
def test_sin_array(self):
q = np.sin(np.array([0., np.pi / 4., np.pi / 2.]) * u.radian)
assert q.unit == u.dimensionless_unscaled
assert_allclose(q.value,
np.array([0., 1. / np.sqrt(2.), 1.]), atol=1.e-15)
def test_arcsin_scalar(self):
q1 = 30. * u.degree
q2 = np.arcsin(np.sin(q1)).to(q1.unit)
assert_allclose(q1.value, q2.value)
def test_arcsin_array(self):
q1 = np.array([0., np.pi / 4., np.pi / 2.]) * u.radian
q2 = np.arcsin(np.sin(q1)).to(q1.unit)
assert_allclose(q1.value, q2.value)
def test_sin_invalid_units(self):
with pytest.raises(TypeError) as exc:
np.sin(3. * u.m)
assert exc.value.args[0] == ("Can only apply 'sin' function "
"to quantities with angle units")
def test_arcsin_invalid_units(self):
with pytest.raises(TypeError) as exc:
np.arcsin(3. * u.m)
assert exc.value.args[0] == ("Can only apply 'arcsin' function to "
"dimensionless quantities")
def test_arcsin_no_warning_on_unscaled_quantity(self):
a = 15 * u.kpc
b = 27 * u.pc
with warnings.catch_warnings():
warnings.filterwarnings('error')
np.arcsin(b/a)
def test_cos_scalar(self):
q = np.cos(np.pi / 3. * u.radian)
assert q.unit == u.dimensionless_unscaled
assert_allclose(q.value, 0.5)
def test_cos_array(self):
q = np.cos(np.array([0., np.pi / 4., np.pi / 2.]) * u.radian)
assert q.unit == u.dimensionless_unscaled
assert_allclose(q.value,
np.array([1., 1. / np.sqrt(2.), 0.]), atol=1.e-15)
def test_arccos_scalar(self):
q1 = np.pi / 3. * u.radian
q2 = np.arccos(np.cos(q1)).to(q1.unit)
assert_allclose(q1.value, q2.value)
def test_arccos_array(self):
q1 = np.array([0., np.pi / 4., np.pi / 2.]) * u.radian
q2 = np.arccos(np.cos(q1)).to(q1.unit)
assert_allclose(q1.value, q2.value)
def test_cos_invalid_units(self):
with pytest.raises(TypeError) as exc:
np.cos(3. * u.s)
assert exc.value.args[0] == ("Can only apply 'cos' function "
"to quantities with angle units")
def test_arccos_invalid_units(self):
with pytest.raises(TypeError) as exc:
np.arccos(3. * u.s)
assert exc.value.args[0] == ("Can only apply 'arccos' function to "
"dimensionless quantities")
def test_tan_scalar(self):
q = np.tan(np.pi / 3. * u.radian)
assert q.unit == u.dimensionless_unscaled
assert_allclose(q.value, np.sqrt(3.))
def test_tan_array(self):
q = np.tan(np.array([0., 45., 135., 180.]) * u.degree)
assert q.unit == u.dimensionless_unscaled
assert_allclose(q.value,
np.array([0., 1., -1., 0.]), atol=1.e-15)
def test_arctan_scalar(self):
q = np.pi / 3. * u.radian
assert np.arctan(np.tan(q))
def test_arctan_array(self):
q = np.array([10., 30., 70., 80.]) * u.degree
assert_allclose(np.arctan(np.tan(q)).to_value(q.unit), q.value)
def test_tan_invalid_units(self):
with pytest.raises(TypeError) as exc:
np.tan(np.array([1, 2, 3]) * u.N)
assert exc.value.args[0] == ("Can only apply 'tan' function "
"to quantities with angle units")
def test_arctan_invalid_units(self):
with pytest.raises(TypeError) as exc:
np.arctan(np.array([1, 2, 3]) * u.N)
assert exc.value.args[0] == ("Can only apply 'arctan' function to "
"dimensionless quantities")
def test_arctan2_valid(self):
q1 = np.array([10., 30., 70., 80.]) * u.m
q2 = 2.0 * u.km
assert np.arctan2(q1, q2).unit == u.radian
assert_allclose(np.arctan2(q1, q2).value,
np.arctan2(q1.value, q2.to_value(q1.unit)))
q3 = q1 / q2
q4 = 1.
at2 = np.arctan2(q3, q4)
assert_allclose(at2.value, np.arctan2(q3.to_value(1), q4))
def test_arctan2_invalid(self):
with pytest.raises(u.UnitsError) as exc:
np.arctan2(np.array([1, 2, 3]) * u.N, 1. * u.s)
assert "compatible dimensions" in exc.value.args[0]
with pytest.raises(u.UnitsError) as exc:
np.arctan2(np.array([1, 2, 3]) * u.N, 1.)
assert "dimensionless quantities when other arg" in exc.value.args[0]
def test_radians(self):
q1 = np.deg2rad(180. * u.degree)
assert_allclose(q1.value, np.pi)
assert q1.unit == u.radian
q2 = np.radians(180. * u.degree)
assert_allclose(q2.value, np.pi)
assert q2.unit == u.radian
# the following doesn't make much sense in terms of the name of the
# routine, but we check it gives the correct result.
q3 = np.deg2rad(3. * u.radian)
assert_allclose(q3.value, 3.)
assert q3.unit == u.radian
q4 = np.radians(3. * u.radian)
assert_allclose(q4.value, 3.)
assert q4.unit == u.radian
with pytest.raises(TypeError):
np.deg2rad(3. * u.m)
with pytest.raises(TypeError):
np.radians(3. * u.m)
def test_degrees(self):
# the following doesn't make much sense in terms of the name of the
# routine, but we check it gives the correct result.
q1 = np.rad2deg(60. * u.degree)
assert_allclose(q1.value, 60.)
assert q1.unit == u.degree
q2 = np.degrees(60. * u.degree)
assert_allclose(q2.value, 60.)
assert q2.unit == u.degree
q3 = np.rad2deg(np.pi * u.radian)
assert_allclose(q3.value, 180.)
assert q3.unit == u.degree
q4 = np.degrees(np.pi * u.radian)
assert_allclose(q4.value, 180.)
assert q4.unit == u.degree
with pytest.raises(TypeError):
np.rad2deg(3. * u.m)
with pytest.raises(TypeError):
np.degrees(3. * u.m)
class TestQuantityMathFuncs(object):
"""
Test other mathematical functions
"""
def test_multiply_scalar(self):
assert np.multiply(4. * u.m, 2. / u.s) == 8. * u.m / u.s
assert np.multiply(4. * u.m, 2.) == 8. * u.m
assert np.multiply(4., 2. / u.s) == 8. / u.s
def test_multiply_array(self):
assert np.all(np.multiply(np.arange(3.) * u.m, 2. / u.s) ==
np.arange(0, 6., 2.) * u.m / u.s)
@pytest.mark.parametrize('function', (np.divide, np.true_divide))
def test_divide_scalar(self, function):
assert function(4. * u.m, 2. * u.s) == function(4., 2.) * u.m / u.s
assert function(4. * u.m, 2.) == function(4., 2.) * u.m
assert function(4., 2. * u.s) == function(4., 2.) / u.s
@pytest.mark.parametrize('function', (np.divide, np.true_divide))
def test_divide_array(self, function):
assert np.all(function(np.arange(3.) * u.m, 2. * u.s) ==
function(np.arange(3.), 2.) * u.m / u.s)
def test_floor_divide_remainder_and_divmod(self):
inch = u.Unit(0.0254 * u.m)
dividend = np.array([1., 2., 3.]) * u.m
divisor = np.array([3., 4., 5.]) * inch
quotient = dividend // divisor
remainder = dividend % divisor
assert_allclose(quotient.value, [13., 19., 23.])
assert quotient.unit == u.dimensionless_unscaled
assert_allclose(remainder.value, [0.0094, 0.0696, 0.079])
assert remainder.unit == dividend.unit
quotient2 = np.floor_divide(dividend, divisor)
remainder2 = np.remainder(dividend, divisor)
assert np.all(quotient2 == quotient)
assert np.all(remainder2 == remainder)
quotient3, remainder3 = divmod(dividend, divisor)
assert np.all(quotient3 == quotient)
assert np.all(remainder3 == remainder)
with pytest.raises(TypeError):
divmod(dividend, u.km)
with pytest.raises(TypeError):
dividend // u.km
with pytest.raises(TypeError):
dividend % u.km
if hasattr(np, 'divmod'): # not NUMPY_LT_1_13
quotient4, remainder4 = np.divmod(dividend, divisor)
assert np.all(quotient4 == quotient)
assert np.all(remainder4 == remainder)
with pytest.raises(TypeError):
np.divmod(dividend, u.km)
def test_sqrt_scalar(self):
assert np.sqrt(4. * u.m) == 2. * u.m ** 0.5
def test_sqrt_array(self):
assert np.all(np.sqrt(np.array([1., 4., 9.]) * u.m)
== np.array([1., 2., 3.]) * u.m ** 0.5)
def test_square_scalar(self):
assert np.square(4. * u.m) == 16. * u.m ** 2
def test_square_array(self):
assert np.all(np.square(np.array([1., 2., 3.]) * u.m)
== np.array([1., 4., 9.]) * u.m ** 2)
def test_reciprocal_scalar(self):
assert np.reciprocal(4. * u.m) == 0.25 / u.m
def test_reciprocal_array(self):
assert np.all(np.reciprocal(np.array([1., 2., 4.]) * u.m)
== np.array([1., 0.5, 0.25]) / u.m)
# cbrt only introduced in numpy 1.10
# heaviside only introduced in numpy 1.13
@pytest.mark.skipif("not hasattr(np, 'heaviside')")
def test_heaviside_scalar(self):
assert np.heaviside(0. * u.m, 0.5) == 0.5 * u.dimensionless_unscaled
assert np.heaviside(0. * u.s,
25 * u.percent) == 0.25 * u.dimensionless_unscaled
assert np.heaviside(2. * u.J, 0.25) == 1. * u.dimensionless_unscaled
@pytest.mark.skipif("not hasattr(np, 'heaviside')")
def test_heaviside_array(self):
values = np.array([-1., 0., 0., +1.])
halfway = np.array([0.75, 0.25, 0.75, 0.25]) * u.dimensionless_unscaled
assert np.all(np.heaviside(values * u.m,
halfway * u.dimensionless_unscaled) ==
[0, 0.25, 0.75, +1.] * u.dimensionless_unscaled)
@pytest.mark.skipif("not hasattr(np, 'cbrt')")
def test_cbrt_scalar(self):
assert np.cbrt(8. * u.m**3) == 2. * u.m
@pytest.mark.skipif("not hasattr(np, 'cbrt')")
def test_cbrt_array(self):
# Calculate cbrt on both sides since on Windows the cube root of 64
# does not exactly equal 4. See 4388.
values = np.array([1., 8., 64.])
assert np.all(np.cbrt(values * u.m**3) ==
np.cbrt(values) * u.m)
def test_power_scalar(self):
assert np.power(4. * u.m, 2.) == 16. * u.m ** 2
assert np.power(4., 200. * u.cm / u.m) == \
u.Quantity(16., u.dimensionless_unscaled)
# regression check on #1696
assert np.power(4. * u.m, 0.) == 1. * u.dimensionless_unscaled
def test_power_array(self):
assert np.all(np.power(np.array([1., 2., 3.]) * u.m, 3.)
== np.array([1., 8., 27.]) * u.m ** 3)
# regression check on #1696
assert np.all(np.power(np.arange(4.) * u.m, 0.) ==
1. * u.dimensionless_unscaled)
# float_power only introduced in numpy 1.12
@pytest.mark.skipif("not hasattr(np, 'float_power')")
def test_float_power_array(self):
assert np.all(np.float_power(np.array([1., 2., 3.]) * u.m, 3.)
== np.array([1., 8., 27.]) * u.m ** 3)
# regression check on #1696
assert np.all(np.float_power(np.arange(4.) * u.m, 0.) ==
1. * u.dimensionless_unscaled)
@raises(ValueError)
def test_power_array_array(self):
np.power(4. * u.m, [2., 4.])
@raises(ValueError)
def test_power_array_array2(self):
np.power([2., 4.] * u.m, [2., 4.])
def test_power_array_array3(self):
# Identical unit fractions are converted automatically to dimensionless
# and should be allowed as base for np.power: #4764
q = [2., 4.] * u.m / u.m
powers = [2., 4.]
res = np.power(q, powers)
assert np.all(res.value == q.value ** powers)
assert res.unit == u.dimensionless_unscaled
# The same holds for unit fractions that are scaled dimensionless.
q2 = [2., 4.] * u.m / u.cm
# Test also against different types of exponent
for cls in (list, tuple, np.array, np.ma.array, u.Quantity):
res2 = np.power(q2, cls(powers))
assert np.all(res2.value == q2.to_value(1) ** powers)
assert res2.unit == u.dimensionless_unscaled
# Though for single powers, we keep the composite unit.
res3 = q2 ** 2
assert np.all(res3.value == q2.value ** 2)
assert res3.unit == q2.unit ** 2
assert np.all(res3 == q2 ** [2, 2])
def test_power_invalid(self):
with pytest.raises(TypeError) as exc:
np.power(3., 4. * u.m)
assert "raise something to a dimensionless" in exc.value.args[0]
def test_copysign_scalar(self):
assert np.copysign(3 * u.m, 1.) == 3. * u.m
assert np.copysign(3 * u.m, 1. * u.s) == 3. * u.m
assert np.copysign(3 * u.m, -1.) == -3. * u.m
assert np.copysign(3 * u.m, -1. * u.s) == -3. * u.m
def test_copysign_array(self):
assert np.all(np.copysign(np.array([1., 2., 3.]) * u.s, -1.) == -np.array([1., 2., 3.]) * u.s)
assert np.all(np.copysign(np.array([1., 2., 3.]) * u.s, -1. * u.m) == -np.array([1., 2., 3.]) * u.s)
assert np.all(np.copysign(np.array([1., 2., 3.]) * u.s, np.array([-2., 2., -4.]) * u.m) == np.array([-1., 2., -3.]) * u.s)
q = np.copysign(np.array([1., 2., 3.]), -3 * u.m)
assert np.all(q == np.array([-1., -2., -3.]))
assert not isinstance(q, u.Quantity)
def test_ldexp_scalar(self):
assert np.ldexp(4. * u.m, 2) == 16. * u.m
def test_ldexp_array(self):
assert np.all(np.ldexp(np.array([1., 2., 3.]) * u.m, [3, 2, 1])
== np.array([8., 8., 6.]) * u.m)
def test_ldexp_invalid(self):
with pytest.raises(TypeError):
np.ldexp(3. * u.m, 4.)
with pytest.raises(TypeError):
np.ldexp(3., u.Quantity(4, u.m, dtype=int))
@pytest.mark.parametrize('function', (np.exp, np.expm1, np.exp2,
np.log, np.log2, np.log10, np.log1p))
def test_exp_scalar(self, function):
q = function(3. * u.m / (6. * u.m))
assert q.unit == u.dimensionless_unscaled
assert q.value == function(0.5)
@pytest.mark.parametrize('function', (np.exp, np.expm1, np.exp2,
np.log, np.log2, np.log10, np.log1p))
def test_exp_array(self, function):
q = function(np.array([2., 3., 6.]) * u.m / (6. * u.m))
assert q.unit == u.dimensionless_unscaled
assert np.all(q.value
== function(np.array([1. / 3., 1. / 2., 1.])))
# should also work on quantities that can be made dimensionless
q2 = function(np.array([2., 3., 6.]) * u.m / (6. * u.cm))
assert q2.unit == u.dimensionless_unscaled
assert_allclose(q2.value,
function(np.array([100. / 3., 100. / 2., 100.])))
@pytest.mark.parametrize('function', (np.exp, np.expm1, np.exp2,
np.log, np.log2, np.log10, np.log1p))
def test_exp_invalid_units(self, function):
# Can't use exp() with non-dimensionless quantities
with pytest.raises(TypeError) as exc:
function(3. * u.m / u.s)
assert exc.value.args[0] == ("Can only apply '{0}' function to "
"dimensionless quantities"
.format(function.__name__))
def test_modf_scalar(self):
q = np.modf(9. * u.m / (600. * u.cm))
assert q == (0.5 * u.dimensionless_unscaled,
1. * u.dimensionless_unscaled)
def test_modf_array(self):
v = np.arange(10.) * u.m / (500. * u.cm)
q = np.modf(v)
n = np.modf(v.to_value(u.dimensionless_unscaled))
assert q[0].unit == u.dimensionless_unscaled
assert q[1].unit == u.dimensionless_unscaled
assert all(q[0].value == n[0])
assert all(q[1].value == n[1])
def test_frexp_scalar(self):
q = np.frexp(3. * u.m / (6. * u.m))
assert q == (np.array(0.5), np.array(0.0))
def test_frexp_array(self):
q = np.frexp(np.array([2., 3., 6.]) * u.m / (6. * u.m))
assert all((_q0, _q1) == np.frexp(_d) for _q0, _q1, _d
in zip(q[0], q[1], [1. / 3., 1. / 2., 1.]))
def test_frexp_invalid_units(self):
# Can't use prod() with non-dimensionless quantities
with pytest.raises(TypeError) as exc:
np.frexp(3. * u.m / u.s)
assert exc.value.args[0] == ("Can only apply 'frexp' function to "
"unscaled dimensionless quantities")
# also does not work on quantities that can be made dimensionless
with pytest.raises(TypeError) as exc:
np.frexp(np.array([2., 3., 6.]) * u.m / (6. * u.cm))
assert exc.value.args[0] == ("Can only apply 'frexp' function to "
"unscaled dimensionless quantities")
@pytest.mark.parametrize('function', (np.logaddexp, np.logaddexp2))
def test_dimensionless_twoarg_array(self, function):
q = function(np.array([2., 3., 6.]) * u.m / (6. * u.cm), 1.)
assert q.unit == u.dimensionless_unscaled
assert_allclose(q.value,
function(np.array([100. / 3., 100. / 2., 100.]), 1.))
@pytest.mark.parametrize('function', (np.logaddexp, np.logaddexp2))
def test_dimensionless_twoarg_invalid_units(self, function):
with pytest.raises(TypeError) as exc:
function(1. * u.km / u.s, 3. * u.m / u.s)
assert exc.value.args[0] == ("Can only apply '{0}' function to "
"dimensionless quantities"
.format(function.__name__))
class TestInvariantUfuncs(object):
# np.positive was only added in numpy 1.13.
@pytest.mark.parametrize(('ufunc'), [np.absolute, np.fabs,
np.conj, np.conjugate,
np.negative, np.spacing, np.rint,
np.floor, np.ceil] +
[np.positive] if hasattr(np, 'positive') else [])
def test_invariant_scalar(self, ufunc):
q_i = 4.7 * u.m
q_o = ufunc(q_i)
assert isinstance(q_o, u.Quantity)
assert q_o.unit == q_i.unit
assert q_o.value == ufunc(q_i.value)
@pytest.mark.parametrize(('ufunc'), [np.absolute, np.conjugate,
np.negative, np.rint,
np.floor, np.ceil])
def test_invariant_array(self, ufunc):
q_i = np.array([-3.3, 2.1, 10.2]) * u.kg / u.s
q_o = ufunc(q_i)
assert isinstance(q_o, u.Quantity)
assert q_o.unit == q_i.unit
assert np.all(q_o.value == ufunc(q_i.value))
@pytest.mark.parametrize(('ufunc'), [np.add, np.subtract, np.hypot,
np.maximum, np.minimum, np.nextafter,
np.remainder, np.mod, np.fmod])
def test_invariant_twoarg_scalar(self, ufunc):
q_i1 = 4.7 * u.m
q_i2 = 9.4 * u.km
q_o = ufunc(q_i1, q_i2)
assert isinstance(q_o, u.Quantity)
assert q_o.unit == q_i1.unit
assert_allclose(q_o.value, ufunc(q_i1.value, q_i2.to_value(q_i1.unit)))
@pytest.mark.parametrize(('ufunc'), [np.add, np.subtract, np.hypot,
np.maximum, np.minimum, np.nextafter,
np.remainder, np.mod, np.fmod])
def test_invariant_twoarg_array(self, ufunc):
q_i1 = np.array([-3.3, 2.1, 10.2]) * u.kg / u.s
q_i2 = np.array([10., -5., 1.e6]) * u.g / u.us
q_o = ufunc(q_i1, q_i2)
assert isinstance(q_o, u.Quantity)
assert q_o.unit == q_i1.unit
assert_allclose(q_o.value, ufunc(q_i1.value, q_i2.to_value(q_i1.unit)))
@pytest.mark.parametrize(('ufunc'), [np.add, np.subtract, np.hypot,
np.maximum, np.minimum, np.nextafter,
np.remainder, np.mod, np.fmod])
def test_invariant_twoarg_one_arbitrary(self, ufunc):
q_i1 = np.array([-3.3, 2.1, 10.2]) * u.kg / u.s
arbitrary_unit_value = np.array([0.])
q_o = ufunc(q_i1, arbitrary_unit_value)
assert isinstance(q_o, u.Quantity)
assert q_o.unit == q_i1.unit
assert_allclose(q_o.value, ufunc(q_i1.value, arbitrary_unit_value))
@pytest.mark.parametrize(('ufunc'), [np.add, np.subtract, np.hypot,
np.maximum, np.minimum, np.nextafter,
np.remainder, np.mod, np.fmod])
def test_invariant_twoarg_invalid_units(self, ufunc):
q_i1 = 4.7 * u.m
q_i2 = 9.4 * u.s
with pytest.raises(u.UnitsError) as exc:
ufunc(q_i1, q_i2)
assert "compatible dimensions" in exc.value.args[0]
class TestComparisonUfuncs(object):
@pytest.mark.parametrize(('ufunc'), [np.greater, np.greater_equal,
np.less, np.less_equal,
np.not_equal, np.equal])
def test_comparison_valid_units(self, ufunc):
q_i1 = np.array([-3.3, 2.1, 10.2]) * u.kg / u.s
q_i2 = np.array([10., -5., 1.e6]) * u.g / u.Ms
q_o = ufunc(q_i1, q_i2)
assert not isinstance(q_o, u.Quantity)
assert q_o.dtype == np.bool
assert np.all(q_o == ufunc(q_i1.value, q_i2.to_value(q_i1.unit)))
q_o2 = ufunc(q_i1 / q_i2, 2.)
assert not isinstance(q_o2, u.Quantity)
assert q_o2.dtype == np.bool
assert np.all(q_o2 == ufunc((q_i1 / q_i2)
.to_value(u.dimensionless_unscaled), 2.))<|fim▁hole|> for arbitrary_unit_value in (0., np.inf, np.nan):
ufunc(q_i1, arbitrary_unit_value)
ufunc(q_i1, arbitrary_unit_value*np.ones(len(q_i1)))
# and just for completeness
ufunc(q_i1, np.array([0., np.inf, np.nan]))
@pytest.mark.parametrize(('ufunc'), [np.greater, np.greater_equal,
np.less, np.less_equal,
np.not_equal, np.equal])
def test_comparison_invalid_units(self, ufunc):
q_i1 = 4.7 * u.m
q_i2 = 9.4 * u.s
with pytest.raises(u.UnitsError) as exc:
ufunc(q_i1, q_i2)
assert "compatible dimensions" in exc.value.args[0]
class TestInplaceUfuncs(object):
@pytest.mark.parametrize(('value'), [1., np.arange(10.)])
def test_one_argument_ufunc_inplace(self, value):
# without scaling
s = value * u.rad
check = s
np.sin(s, out=s)
assert check is s
assert check.unit == u.dimensionless_unscaled
# with scaling
s2 = (value * u.rad).to(u.deg)
check2 = s2
np.sin(s2, out=s2)
assert check2 is s2
assert check2.unit == u.dimensionless_unscaled
assert_allclose(s.value, s2.value)
@pytest.mark.parametrize(('value'), [1., np.arange(10.)])
def test_one_argument_ufunc_inplace_2(self, value):
"""Check inplace works with non-quantity input and quantity output"""
s = value * u.m
check = s
np.absolute(value, out=s)
assert check is s
assert np.all(check.value == np.absolute(value))
assert check.unit is u.dimensionless_unscaled
np.sqrt(value, out=s)
assert check is s
assert np.all(check.value == np.sqrt(value))
assert check.unit is u.dimensionless_unscaled
np.exp(value, out=s)
assert check is s
assert np.all(check.value == np.exp(value))
assert check.unit is u.dimensionless_unscaled
np.arcsin(value/10., out=s)
assert check is s
assert np.all(check.value == np.arcsin(value/10.))
assert check.unit is u.radian
@pytest.mark.parametrize(('value'), [1., np.arange(10.)])
def test_one_argument_two_output_ufunc_inplace(self, value):
v = 100. * value * u.cm / u.m
v_copy = v.copy()
tmp = v.copy()
check = v
np.modf(v, tmp, v) # cannot use out1,out2 keywords with numpy 1.7
assert check is v
assert check.unit == u.dimensionless_unscaled
v2 = v_copy.to(u.dimensionless_unscaled)
check2 = v2
np.modf(v2, tmp, v2)
assert check2 is v2
assert check2.unit == u.dimensionless_unscaled
# can also replace in last position if no scaling is needed
v3 = v_copy.to(u.dimensionless_unscaled)
check3 = v3
np.modf(v3, v3, tmp)
assert check3 is v3
assert check3.unit == u.dimensionless_unscaled
# in np<1.13, without __array_ufunc__, one cannot replace input with
# first output when scaling
v4 = v_copy.copy()
if NUMPY_LT_1_13:
with pytest.raises(TypeError):
np.modf(v4, v4, tmp)
else:
check4 = v4
np.modf(v4, v4, tmp)
assert check4 is v4
assert check4.unit == u.dimensionless_unscaled
@pytest.mark.parametrize(('value'), [1., np.arange(10.)])
def test_two_argument_ufunc_inplace_1(self, value):
s = value * u.cycle
check = s
s /= 2.
assert check is s
assert np.all(check.value == value / 2.)
s /= u.s
assert check is s
assert check.unit == u.cycle / u.s
s *= 2. * u.s
assert check is s
assert np.all(check == value * u.cycle)
@pytest.mark.parametrize(('value'), [1., np.arange(10.)])
def test_two_argument_ufunc_inplace_2(self, value):
s = value * u.cycle
check = s
np.arctan2(s, s, out=s)
assert check is s
assert check.unit == u.radian
with pytest.raises(u.UnitsError):
s += 1. * u.m
assert check is s
assert check.unit == u.radian
np.arctan2(1. * u.deg, s, out=s)
assert check is s
assert check.unit == u.radian
np.add(1. * u.deg, s, out=s)
assert check is s
assert check.unit == u.deg
np.multiply(2. / u.s, s, out=s)
assert check is s
assert check.unit == u.deg / u.s
def test_two_argument_ufunc_inplace_3(self):
s = np.array([1., 2., 3.]) * u.dimensionless_unscaled
np.add(np.array([1., 2., 3.]), np.array([1., 2., 3.]) * 2., out=s)
assert np.all(s.value == np.array([3., 6., 9.]))
assert s.unit is u.dimensionless_unscaled
np.arctan2(np.array([1., 2., 3.]), np.array([1., 2., 3.]) * 2., out=s)
assert_allclose(s.value, np.arctan2(1., 2.))
assert s.unit is u.radian
@pytest.mark.skipif(NUMPY_LT_1_13, reason="numpy >=1.13 required.")
@pytest.mark.parametrize(('value'), [1., np.arange(10.)])
def test_two_argument_two_output_ufunc_inplace(self, value):
v = value * u.m
divisor = 70.*u.cm
v1 = v.copy()
tmp = v.copy()
check = np.divmod(v1, divisor, out=(tmp, v1))
assert check[0] is tmp and check[1] is v1
assert tmp.unit == u.dimensionless_unscaled
assert v1.unit == v.unit
v2 = v.copy()
check2 = np.divmod(v2, divisor, out=(v2, tmp))
assert check2[0] is v2 and check2[1] is tmp
assert v2.unit == u.dimensionless_unscaled
assert tmp.unit == v.unit
v3a = v.copy()
v3b = v.copy()
check3 = np.divmod(v3a, divisor, out=(v3a, v3b))
assert check3[0] is v3a and check3[1] is v3b
assert v3a.unit == u.dimensionless_unscaled
assert v3b.unit == v.unit
def test_ufunc_inplace_non_contiguous_data(self):
# ensure inplace works also for non-contiguous data (closes #1834)
s = np.arange(10.) * u.m
s_copy = s.copy()
s2 = s[::2]
s2 += 1. * u.cm
assert np.all(s[::2] > s_copy[::2])
assert np.all(s[1::2] == s_copy[1::2])
def test_ufunc_inplace_non_standard_dtype(self):
"""Check that inplace operations check properly for casting.
First two tests that check that float32 is kept close #3976.
"""
a1 = u.Quantity([1, 2, 3, 4], u.m, dtype=np.float32)
a1 *= np.float32(10)
assert a1.unit is u.m
assert a1.dtype == np.float32
a2 = u.Quantity([1, 2, 3, 4], u.m, dtype=np.float32)
a2 += (20.*u.km)
assert a2.unit is u.m
assert a2.dtype == np.float32
# For integer, in-place only works if no conversion is done.
a3 = u.Quantity([1, 2, 3, 4], u.m, dtype=np.int32)
a3 += u.Quantity(10, u.m, dtype=np.int64)
assert a3.unit is u.m
assert a3.dtype == np.int32
a4 = u.Quantity([1, 2, 3, 4], u.m, dtype=np.int32)
with pytest.raises(TypeError):
a4 += u.Quantity(10, u.mm, dtype=np.int64)
@pytest.mark.xfail("NUMPY_LT_1_13")
class TestUfuncAt(object):
"""Test that 'at' method for ufuncs (calculates in-place at given indices)
For Quantities, since calculations are in-place, it makes sense only
if the result is still a quantity, and if the unit does not have to change
"""
def test_one_argument_ufunc_at(self):
q = np.arange(10.) * u.m
i = np.array([1, 2])
qv = q.value.copy()
np.negative.at(q, i)
np.negative.at(qv, i)
assert np.all(q.value == qv)
assert q.unit is u.m
# cannot change from quantity to bool array
with pytest.raises(TypeError):
np.isfinite.at(q, i)
# for selective in-place, cannot change the unit
with pytest.raises(u.UnitsError):
np.square.at(q, i)
# except if the unit does not change (i.e., dimensionless)
d = np.arange(10.) * u.dimensionless_unscaled
dv = d.value.copy()
np.square.at(d, i)
np.square.at(dv, i)
assert np.all(d.value == dv)
assert d.unit is u.dimensionless_unscaled
d = np.arange(10.) * u.dimensionless_unscaled
dv = d.value.copy()
np.log.at(d, i)
np.log.at(dv, i)
assert np.all(d.value == dv)
assert d.unit is u.dimensionless_unscaled
# also for sine it doesn't work, even if given an angle
a = np.arange(10.) * u.radian
with pytest.raises(u.UnitsError):
np.sin.at(a, i)
# except, for consistency, if we have made radian equivalent to
# dimensionless (though hopefully it will never be needed)
av = a.value.copy()
with u.add_enabled_equivalencies(u.dimensionless_angles()):
np.sin.at(a, i)
np.sin.at(av, i)
assert_allclose(a.value, av)
# but we won't do double conversion
ad = np.arange(10.) * u.degree
with pytest.raises(u.UnitsError):
np.sin.at(ad, i)
def test_two_argument_ufunc_at(self):
s = np.arange(10.) * u.m
i = np.array([1, 2])
check = s.value.copy()
np.add.at(s, i, 1.*u.km)
np.add.at(check, i, 1000.)
assert np.all(s.value == check)
assert s.unit is u.m
with pytest.raises(u.UnitsError):
np.add.at(s, i, 1.*u.s)
# also raise UnitsError if unit would have to be changed
with pytest.raises(u.UnitsError):
np.multiply.at(s, i, 1*u.s)
# but be fine if it does not
s = np.arange(10.) * u.m
check = s.value.copy()
np.multiply.at(s, i, 2.*u.dimensionless_unscaled)
np.multiply.at(check, i, 2)
assert np.all(s.value == check)
s = np.arange(10.) * u.m
np.multiply.at(s, i, 2.)
assert np.all(s.value == check)
# of course cannot change class of data either
with pytest.raises(TypeError):
np.greater.at(s, i, 1.*u.km)
@pytest.mark.xfail("NUMPY_LT_1_13")
class TestUfuncReduceReduceatAccumulate(object):
"""Test 'reduce', 'reduceat' and 'accumulate' methods for ufuncs
For Quantities, it makes sense only if the unit does not have to change
"""
def test_one_argument_ufunc_reduce_accumulate(self):
# one argument cannot be used
s = np.arange(10.) * u.radian
i = np.array([0, 5, 1, 6])
with pytest.raises(ValueError):
np.sin.reduce(s)
with pytest.raises(ValueError):
np.sin.accumulate(s)
with pytest.raises(ValueError):
np.sin.reduceat(s, i)
def test_two_argument_ufunc_reduce_accumulate(self):
s = np.arange(10.) * u.m
i = np.array([0, 5, 1, 6])
check = s.value.copy()
s_add_reduce = np.add.reduce(s)
check_add_reduce = np.add.reduce(check)
assert s_add_reduce.value == check_add_reduce
assert s_add_reduce.unit is u.m
s_add_accumulate = np.add.accumulate(s)
check_add_accumulate = np.add.accumulate(check)
assert np.all(s_add_accumulate.value == check_add_accumulate)
assert s_add_accumulate.unit is u.m
s_add_reduceat = np.add.reduceat(s, i)
check_add_reduceat = np.add.reduceat(check, i)
assert np.all(s_add_reduceat.value == check_add_reduceat)
assert s_add_reduceat.unit is u.m
# reduce(at) or accumulate on comparisons makes no sense,
# as intermediate result is not even a Quantity
with pytest.raises(TypeError):
np.greater.reduce(s)
with pytest.raises(TypeError):
np.greater.accumulate(s)
with pytest.raises(TypeError):
np.greater.reduceat(s, i)
# raise UnitsError if unit would have to be changed
with pytest.raises(u.UnitsError):
np.multiply.reduce(s)
with pytest.raises(u.UnitsError):
np.multiply.accumulate(s)
with pytest.raises(u.UnitsError):
np.multiply.reduceat(s, i)
# but be fine if it does not
s = np.arange(10.) * u.dimensionless_unscaled
check = s.value.copy()
s_multiply_reduce = np.multiply.reduce(s)
check_multiply_reduce = np.multiply.reduce(check)
assert s_multiply_reduce.value == check_multiply_reduce
assert s_multiply_reduce.unit is u.dimensionless_unscaled
s_multiply_accumulate = np.multiply.accumulate(s)
check_multiply_accumulate = np.multiply.accumulate(check)
assert np.all(s_multiply_accumulate.value == check_multiply_accumulate)
assert s_multiply_accumulate.unit is u.dimensionless_unscaled
s_multiply_reduceat = np.multiply.reduceat(s, i)
check_multiply_reduceat = np.multiply.reduceat(check, i)
assert np.all(s_multiply_reduceat.value == check_multiply_reduceat)
assert s_multiply_reduceat.unit is u.dimensionless_unscaled
@pytest.mark.xfail("NUMPY_LT_1_13")
class TestUfuncOuter(object):
"""Test 'outer' methods for ufuncs
Just a few spot checks, since it uses the same code as the regular
ufunc call
"""
def test_one_argument_ufunc_outer(self):
# one argument cannot be used
s = np.arange(10.) * u.radian
with pytest.raises(ValueError):
np.sin.outer(s)
def test_two_argument_ufunc_outer(self):
s1 = np.arange(10.) * u.m
s2 = np.arange(2.) * u.s
check1 = s1.value
check2 = s2.value
s12_multiply_outer = np.multiply.outer(s1, s2)
check12_multiply_outer = np.multiply.outer(check1, check2)
assert np.all(s12_multiply_outer.value == check12_multiply_outer)
assert s12_multiply_outer.unit == s1.unit * s2.unit
# raise UnitsError if appropriate
with pytest.raises(u.UnitsError):
np.add.outer(s1, s2)
# but be fine if it does not
s3 = np.arange(2.) * s1.unit
check3 = s3.value
s13_add_outer = np.add.outer(s1, s3)
check13_add_outer = np.add.outer(check1, check3)
assert np.all(s13_add_outer.value == check13_add_outer)
assert s13_add_outer.unit is s1.unit
s13_greater_outer = np.greater.outer(s1, s3)
check13_greater_outer = np.greater.outer(check1, check3)
assert type(s13_greater_outer) is np.ndarray
assert np.all(s13_greater_outer == check13_greater_outer)<|fim▁end|> | # comparison with 0., inf, nan is OK even for dimensional quantities |
<|file_name|>fr-VU.js<|end_file_name|><|fim▁begin|><|fim▁hole|> * @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
(function(global) {
global.ng = global.ng || {};
global.ng.common = global.ng.common || {};
global.ng.common.locales = global.ng.common.locales || {};
var u = undefined;
function plural(n) {
var i = Math.floor(Math.abs(n));
if (i === 0 || i === 1) return 1;
return 5;
}
global.ng.common.locales['fr-vu'] = [
'fr-VU',
[['AM', 'PM'], u, u],
u,
[
['D', 'L', 'M', 'M', 'J', 'V', 'S'], ['dim.', 'lun.', 'mar.', 'mer.', 'jeu.', 'ven.', 'sam.'],
['dimanche', 'lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi'],
['di', 'lu', 'ma', 'me', 'je', 've', 'sa']
],
u,
[
['J', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'],
[
'janv.', 'févr.', 'mars', 'avr.', 'mai', 'juin', 'juil.', 'août', 'sept.', 'oct.', 'nov.',
'déc.'
],
[
'janvier', 'février', 'mars', 'avril', 'mai', 'juin', 'juillet', 'août', 'septembre',
'octobre', 'novembre', 'décembre'
]
],
u,
[['av. J.-C.', 'ap. J.-C.'], u, ['avant Jésus-Christ', 'après Jésus-Christ']],
1,
[6, 0],
['dd/MM/y', 'd MMM y', 'd MMMM y', 'EEEE d MMMM y'],
['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'],
['{1} {0}', '{1} \'à\' {0}', u, u],
[',', '\u202f', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'],
['#,##0.###', '#,##0 %', '#,##0.00 ¤', '#E0'],
'VUV',
'VT',
'vatu vanuatuan',
{
'ARS': ['$AR', '$'],
'AUD': ['$AU', '$'],
'BEF': ['FB'],
'BMD': ['$BM', '$'],
'BND': ['$BN', '$'],
'BZD': ['$BZ', '$'],
'CAD': ['$CA', '$'],
'CLP': ['$CL', '$'],
'CNY': [u, '¥'],
'COP': ['$CO', '$'],
'CYP': ['£CY'],
'EGP': [u, '£E'],
'FJD': ['$FJ', '$'],
'FKP': ['£FK', '£'],
'FRF': ['F'],
'GBP': ['£GB', '£'],
'GIP': ['£GI', '£'],
'HKD': [u, '$'],
'IEP': ['£IE'],
'ILP': ['£IL'],
'ITL': ['₤IT'],
'JPY': [u, '¥'],
'KMF': [u, 'FC'],
'LBP': ['£LB', '£L'],
'MTP': ['£MT'],
'MXN': ['$MX', '$'],
'NAD': ['$NA', '$'],
'NIO': [u, '$C'],
'NZD': ['$NZ', '$'],
'RHD': ['$RH'],
'RON': [u, 'L'],
'RWF': [u, 'FR'],
'SBD': ['$SB', '$'],
'SGD': ['$SG', '$'],
'SRD': ['$SR', '$'],
'TOP': [u, '$T'],
'TTD': ['$TT', '$'],
'TWD': [u, 'NT$'],
'USD': ['$US', '$'],
'UYU': ['$UY', '$'],
'VUV': ['VT'],
'WST': ['$WS'],
'XCD': [u, '$'],
'XPF': ['FCFP'],
'ZMW': [u, 'Kw']
},
'ltr',
plural,
[
[
['minuit', 'midi', 'mat.', 'ap.m.', 'soir', 'nuit'], u,
['minuit', 'midi', 'du matin', 'de l’après-midi', 'du soir', 'du matin']
],
[
['minuit', 'midi', 'mat.', 'ap.m.', 'soir', 'nuit'], u,
['minuit', 'midi', 'matin', 'après-midi', 'soir', 'nuit']
],
[
'00:00', '12:00', ['04:00', '12:00'], ['12:00', '18:00'], ['18:00', '24:00'],
['00:00', '04:00']
]
]
];
})(typeof globalThis !== 'undefined' && globalThis || typeof global !== 'undefined' && global ||
typeof window !== 'undefined' && window);<|fim▁end|> | /** |
<|file_name|>IMethodCall.java<|end_file_name|><|fim▁begin|>package openperipheral.adapter;
public interface IMethodCall {
public IMethodCall setEnv(String name, Object value);<|fim▁hole|>}<|fim▁end|> |
public Object[] call(Object... args) throws Exception;
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate byteorder;
mod error;
mod mqtt;
mod read;
mod write;
mod topic;
mod msg;
pub use error::{
Error,
Result
};
pub use msg::{
Message
};
pub use mqtt::{
Packet,
Connect,
Connack,
Publish,
Subscribe,
Suback,
Unsubscribe,
SubscribeTopic,
SubscribeReturnCodes
};
pub use topic::{
Topic,
TopicPath,
ToTopicPath
};
pub use read::MqttRead;
pub use write::MqttWrite;
const MULTIPLIER: usize = 0x80 * 0x80 * 0x80 * 0x80;
const MAX_PAYLOAD_SIZE: usize = 268435455;
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Protocol {
MQIsdp(u8),
MQTT(u8)
}
impl Protocol {
pub fn new(name: &str, level: u8) -> Result<Protocol> {
match name {
"MQIsdp" => match level {
3 => Ok(Protocol::MQIsdp(3)),
_ => Err(Error::UnsupportedProtocolVersion)
},
"MQTT" => match level {
4 => Ok(Protocol::MQTT(4)),
_ => Err(Error::UnsupportedProtocolVersion)
},
_ => Err(Error::UnsupportedProtocolName)
}
}
pub fn name(&self) -> &'static str {
match self {
&Protocol::MQIsdp(_) => "MQIsdp",
&Protocol::MQTT(_) => "MQTT"
}
}
pub fn level(&self) -> u8 {
match self {
&Protocol::MQIsdp(level) => level,
&Protocol::MQTT(level) => level<|fim▁hole|>#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum QoS {
AtMostOnce,
AtLeastOnce,
ExactlyOnce
}
impl QoS {
pub fn from_u8(byte: u8) -> Result<QoS> {
match byte {
0 => Ok(QoS::AtMostOnce),
1 => Ok(QoS::AtLeastOnce),
2 => Ok(QoS::ExactlyOnce),
_ => Err(Error::UnsupportedQualityOfService)
}
}
#[inline]
pub fn from_hd(hd: u8) -> Result<QoS> {
Self::from_u8((hd & 0b110) >> 1)
}
pub fn to_u8(&self) -> u8 {
match *self {
QoS::AtMostOnce => 0,
QoS::AtLeastOnce => 1,
QoS::ExactlyOnce => 2
}
}
pub fn min(&self, other: QoS) -> QoS {
match *self {
QoS::AtMostOnce => QoS::AtMostOnce,
QoS::AtLeastOnce => {
if other == QoS::AtMostOnce {
QoS::AtMostOnce
} else {
QoS::AtLeastOnce
}
},
QoS::ExactlyOnce => other
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PacketType {
Connect,
Connack,
Publish,
Puback,
Pubrec,
Pubrel,
Pubcomp,
Subscribe,
Suback,
Unsubscribe,
Unsuback,
Pingreq,
Pingresp,
Disconnect
}
impl PacketType {
pub fn to_u8(&self) -> u8 {
match *self {
PacketType::Connect => 1,
PacketType::Connack => 2,
PacketType::Publish => 3,
PacketType::Puback => 4,
PacketType::Pubrec => 5,
PacketType::Pubrel => 6,
PacketType::Pubcomp => 7,
PacketType::Subscribe => 8,
PacketType::Suback => 9,
PacketType::Unsubscribe => 10,
PacketType::Unsuback => 11,
PacketType::Pingreq => 12,
PacketType::Pingresp => 13,
PacketType::Disconnect => 14
}
}
pub fn from_u8(byte: u8) -> Result<PacketType> {
match byte {
1 => Ok(PacketType::Connect),
2 => Ok(PacketType::Connack),
3 => Ok(PacketType::Publish),
4 => Ok(PacketType::Puback),
5 => Ok(PacketType::Pubrec),
6 => Ok(PacketType::Pubrel),
7 => Ok(PacketType::Pubcomp),
8 => Ok(PacketType::Subscribe),
9 => Ok(PacketType::Suback),
10 => Ok(PacketType::Unsubscribe),
11 => Ok(PacketType::Unsuback),
12 => Ok(PacketType::Pingreq),
13 => Ok(PacketType::Pingresp),
14 => Ok(PacketType::Disconnect),
_ => Err(Error::UnsupportedPacketType)
}
}
#[inline]
pub fn from_hd(hd: u8) -> Result<PacketType> {
Self::from_u8(hd >> 4)
}
}
impl fmt::Display for PacketType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let str = format!("{:?}", self);
let first_space = str.find(' ').unwrap_or(str.len());
let (str, _) = str.split_at(first_space);
f.write_str(&str)
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ConnectReturnCode {
Accepted,
RefusedProtocolVersion,
RefusedIdentifierRejected,
ServerUnavailable,
BadUsernamePassword,
NotAuthorized
}
impl ConnectReturnCode {
pub fn to_u8(&self) -> u8 {
match *self {
ConnectReturnCode::Accepted => 0,
ConnectReturnCode::RefusedProtocolVersion => 1,
ConnectReturnCode::RefusedIdentifierRejected => 2,
ConnectReturnCode::ServerUnavailable => 3,
ConnectReturnCode::BadUsernamePassword => 4,
ConnectReturnCode::NotAuthorized => 5
}
}
pub fn from_u8(byte: u8) -> Result<ConnectReturnCode> {
match byte {
0 => Ok(ConnectReturnCode::Accepted),
1 => Ok(ConnectReturnCode::RefusedProtocolVersion),
2 => Ok(ConnectReturnCode::RefusedIdentifierRejected),
3 => Ok(ConnectReturnCode::ServerUnavailable),
4 => Ok(ConnectReturnCode::BadUsernamePassword),
5 => Ok(ConnectReturnCode::NotAuthorized),
_ => Err(Error::UnsupportedConnectReturnCode)
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct PacketIdentifier(pub u16);
impl PacketIdentifier {
pub fn zero() -> PacketIdentifier {
PacketIdentifier(0)
}
pub fn next(&self) -> PacketIdentifier {
PacketIdentifier(self.0 + 1)
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Header {
hd: u8,
pub typ: PacketType,
pub len: usize
}
impl Header {
pub fn new(hd: u8, len: usize) -> Result<Header> {
Ok(Header {
hd: hd,
typ: try!(PacketType::from_hd(hd)),
len: len
})
}
#[inline]
pub fn dup(&self) -> bool {
(self.hd & 0b1000) != 0
}
#[inline]
pub fn qos(&self) -> Result<QoS> {
QoS::from_hd(self.hd)
}
#[inline]
pub fn retain(&self) -> bool {
(self.hd & 1) != 0
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct LastWill {
pub topic: String,
pub message: String,
pub qos: QoS,
pub retain: bool
}
#[cfg(test)]
mod test {
use super::{QoS, Protocol, PacketIdentifier};
#[test]
fn protocol_test() {
assert_eq!(Protocol::new("MQTT", 4).unwrap(), Protocol::MQTT(4));
assert_eq!(Protocol::new("MQIsdp", 3).unwrap(), Protocol::MQIsdp(3));
assert_eq!(Protocol::MQIsdp(3).name(), "MQIsdp");
assert_eq!(Protocol::MQTT(4).name(), "MQTT");
assert_eq!(Protocol::MQTT(3).level(), 3);
assert_eq!(Protocol::MQTT(4).level(), 4);
}
#[test]
fn qos_min_test() {
assert_eq!(QoS::AtMostOnce.min(QoS::AtMostOnce), QoS::AtMostOnce);
assert_eq!(QoS::AtMostOnce.min(QoS::AtLeastOnce), QoS::AtMostOnce);
assert_eq!(QoS::AtLeastOnce.min(QoS::AtMostOnce), QoS::AtMostOnce);
assert_eq!(QoS::AtLeastOnce.min(QoS::ExactlyOnce), QoS::AtLeastOnce);
assert_eq!(QoS::ExactlyOnce.min(QoS::AtMostOnce), QoS::AtMostOnce);
assert_eq!(QoS::ExactlyOnce.min(QoS::ExactlyOnce), QoS::ExactlyOnce);
}
#[test]
fn packet_identifier_test() {
let pid = PacketIdentifier::zero();
assert_eq!(pid, PacketIdentifier(0));
assert_eq!(pid.next(), PacketIdentifier(1));
}
}<|fim▁end|> | }
}
}
|
<|file_name|>generate-tx.py<|end_file_name|><|fim▁begin|># Note - to use this script you need Jeff Garzik's python-bitcoinrpc
# https://github.com/jgarzik/python-bitcoinrpc
import os
import sys;
import json;
from bitcoinrpc.authproxy import AuthServiceProxy;
# SET THESE VALUES
rpc_user = "bitcoinrpc";
rpc_pass = "A7Xr149i7F6GxkhDbxWDTbmXooz1UZGhhyUYvaajA13Z";
rpc_host = "localhost";
rpc_port = 8332;
donation_minimum = 0;
donation_per_input = 3000;
donation_address = "1ForFeesAndDonationsSpendHerdtWbWy";
# http://stackoverflow.com/questions/626796/how-do-i-find-the-windows-common-application-data-folder-using-python
try:
from win32com.shell import shellcon, shell
config_file = shell.SHGetFolderPath(0, shellcon.CSIDL_APPDATA, 0, 0) + "/Bitcoin/bitcoin.conf"
except ImportError: # quick semi-nasty fallback for non-windows/win32com case
config_file = os.path.expanduser("~") + "/.bitcoin/bitcoin.conf"
# thanks ryan-c for this function
def asp_from_config(filename):
rpcport = '8332'
rpcconn = '127.0.0.1'
rpcuser = None
rpcpass = None
with open(filename, 'r') as f:
for line in f:
try:
(key, val) = line.rstrip().replace(' ', '').split('=')
except:
(key, val) = ("", "");
if key == 'rpcuser':
rpcuser = val
elif key == 'rpcpassword':
rpcpass = val
elif key == 'rpcport':
rpcport = val
elif key == 'rpcconnect':
rpcconn = val
f.close()
if rpcuser is not None and rpcpass is not None:
rpcurl = 'http://%s:%s@%s:%s' % (rpcuser, rpcpass, rpcconn, rpcport)
print('RPC server: %s' % rpcurl)
return AuthServiceProxy(rpcurl)
def to_satoshi(s):
return int (100000000 * float (s));
def from_satoshi(s):
return float (s) / 100000000;
if len(sys.argv) < 3:
print ("Usage: %s <input size> <target output size in BTC>" % sys.argv[0]);<|fim▁hole|>
#service = AuthServiceProxy ("http://%s:%s@%s:%d" % (rpc_user, rpc_pass, rpc_host, rpc_port));
service = asp_from_config (config_file);
balance = to_satoshi (service.getbalance());
unspent = service.listunspent();
target_in = to_satoshi (sys.argv[1]);
target_out = to_satoshi (sys.argv[2]);
if balance < target_in:
print ("Cannot spend %f; only have %f in wallet." % (from_satoshi (target_in), from_satoshi (balance)));
exit (0);
if target_out > target_in:
print ("Please have a smaller target output than input value.");
exit (0);
# FIND INPUTS
# TODO: have a smarter coin selection algo
# For now we just sort the coins by increasing abs(value - target output), then select in order
inputs = [];
donation = 0;
total_in = 0;
unspent.sort (key=lambda coin: abs(to_satoshi (coin['amount']) - target_in));
for coin in unspent:
total_in += to_satoshi (coin['amount']);
donation += donation_per_input;
inputs.append (dict (txid = coin['txid'], vout = coin['vout']));
if total_in > target_in:
break;
if donation < donation_minimum:
donation = donation_minimum;
# FIND OUTPUTS
outputs = dict ();
outputs[donation_address] = from_satoshi (donation);
total_in -= donation;
while total_in > target_out:
outputs[service.getnewaddress()] = from_satoshi (target_out);
total_in -= target_out;
outputs[service.getnewaddress()] = from_satoshi (total_in);
# Make the transaction
print service.createrawtransaction (inputs, outputs);<|fim▁end|> | exit (0); |
<|file_name|>datetime.rs<|end_file_name|><|fim▁begin|>// This is a part of Chrono.
// See README.md and LICENSE.txt for details.
//! ISO 8601 date and time with time zone.
use std::{str, fmt, hash};
use std::cmp::Ordering;
use std::ops::{Add, Sub};
use std::time::{SystemTime, UNIX_EPOCH};
use oldtime::Duration as OldDuration;
use {Weekday, Timelike, Datelike};
#[cfg(feature="clock")]
use offset::Local;
use offset::{TimeZone, Offset, Utc, FixedOffset};
use naive::{NaiveTime, NaiveDateTime, IsoWeek};
use Date;
use format::{Item, Numeric, Pad, Fixed};
use format::{parse, Parsed, ParseError, ParseResult, DelayedFormat, StrftimeItems};
/// Specific formatting options for seconds. This may be extended in the
/// future, so exhaustive matching in external code is not recommended.
///
/// See the `TimeZone::to_rfc3339_opts` function for usage.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum SecondsFormat {
/// Format whole seconds only, with no decimal point nor subseconds.
Secs,
/// Use fixed 3 subsecond digits. This corresponds to
/// [Fixed::Nanosecond3](format/enum.Fixed.html#variant.Nanosecond3).
Millis,
/// Use fixed 6 subsecond digits. This corresponds to
/// [Fixed::Nanosecond6](format/enum.Fixed.html#variant.Nanosecond6).
Micros,
/// Use fixed 9 subsecond digits. This corresponds to
/// [Fixed::Nanosecond9](format/enum.Fixed.html#variant.Nanosecond9).
Nanos,
/// Automatically select one of `Secs`, `Millis`, `Micros`, or `Nanos` to
/// display all available non-zero sub-second digits. This corresponds to
/// [Fixed::Nanosecond](format/enum.Fixed.html#variant.Nanosecond).
AutoSi,
// Do not match against this.
#[doc(hidden)]
__NonExhaustive,
}
/// ISO 8601 combined date and time with time zone.
///
/// There are some constructors implemented here (the `from_*` methods), but
/// the general-purpose constructors are all via the methods on the
/// [`TimeZone`](./offset/trait.TimeZone.html) implementations.
#[derive(Clone)]
pub struct DateTime<Tz: TimeZone> {
datetime: NaiveDateTime,
offset: Tz::Offset,
}
impl<Tz: TimeZone> DateTime<Tz> {
/// Makes a new `DateTime` with given *UTC* datetime and offset.
/// The local datetime should be constructed via the `TimeZone` trait.
///
/// # Example
///
/// ~~~~
/// use chrono::{DateTime, TimeZone, NaiveDateTime, Utc};
///
/// let dt = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(61, 0), Utc);
/// assert_eq!(Utc.timestamp(61, 0), dt);
/// ~~~~
//
// note: this constructor is purposedly not named to `new` to discourage the direct usage.
#[inline]
pub fn from_utc(datetime: NaiveDateTime, offset: Tz::Offset) -> DateTime<Tz> {
DateTime { datetime: datetime, offset: offset }
}
/// Retrieves a date component.
#[inline]
pub fn date(&self) -> Date<Tz> {
Date::from_utc(self.naive_local().date(), self.offset.clone())
}
/// Retrieves a time component.
/// Unlike `date`, this is not associated to the time zone.
#[inline]
pub fn time(&self) -> NaiveTime {
self.datetime.time() + self.offset.fix()
}
/// Returns the number of non-leap seconds since January 1, 1970 0:00:00 UTC
/// (aka "UNIX timestamp").
#[inline]
pub fn timestamp(&self) -> i64 {
self.datetime.timestamp()
}
/// Returns the number of non-leap-milliseconds since January 1, 1970 UTC
///
/// Note that this does reduce the number of years that can be represented
/// from ~584 Billion to ~584 Million. (If this is a problem, please file
/// an issue to let me know what domain needs millisecond precision over
/// billions of years, I'm curious.)
///
/// # Example
///
/// ~~~~
/// use chrono::Utc;
/// use chrono::TimeZone;
///
/// let dt = Utc.ymd(1970, 1, 1).and_hms_milli(0, 0, 1, 444);
/// assert_eq!(dt.timestamp_millis(), 1_444);
///
/// let dt = Utc.ymd(2001, 9, 9).and_hms_milli(1, 46, 40, 555);
/// assert_eq!(dt.timestamp_millis(), 1_000_000_000_555);
/// ~~~~
#[inline]
pub fn timestamp_millis(&self) -> i64 {
self.datetime.timestamp_millis()
}
/// Returns the number of non-leap-nanoseconds since January 1, 1970 UTC
///
/// Note that this does reduce the number of years that can be represented
/// from ~584 Billion to ~584. (If this is a problem, please file
/// an issue to let me know what domain needs nanosecond precision over
/// millenia, I'm curious.)
///
/// # Example
///
/// ~~~~
/// use chrono::Utc;
/// use chrono::TimeZone;
///
/// let dt = Utc.ymd(1970, 1, 1).and_hms_nano(0, 0, 1, 444);
/// assert_eq!(dt.timestamp_nanos(), 1_000_000_444);
///
/// let dt = Utc.ymd(2001, 9, 9).and_hms_nano(1, 46, 40, 555);
/// assert_eq!(dt.timestamp_nanos(), 1_000_000_000_000_000_555);
/// ~~~~
#[inline]
pub fn timestamp_nanos(&self) -> i64 {
self.datetime.timestamp_nanos()
}
/// Returns the number of milliseconds since the last second boundary
///
/// warning: in event of a leap second, this may exceed 999
///
/// note: this is not the number of milliseconds since January 1, 1970 0:00:00 UTC
#[inline]
pub fn timestamp_subsec_millis(&self) -> u32 {
self.datetime.timestamp_subsec_millis()
}
/// Returns the number of microseconds since the last second boundary
///
/// warning: in event of a leap second, this may exceed 999_999
///
/// note: this is not the number of microseconds since January 1, 1970 0:00:00 UTC
#[inline]
pub fn timestamp_subsec_micros(&self) -> u32 {
self.datetime.timestamp_subsec_micros()
}
/// Returns the number of nanoseconds since the last second boundary
///
/// warning: in event of a leap second, this may exceed 999_999_999
///
/// note: this is not the number of nanoseconds since January 1, 1970 0:00:00 UTC
#[inline]
pub fn timestamp_subsec_nanos(&self) -> u32 {
self.datetime.timestamp_subsec_nanos()
}
/// Retrieves an associated offset from UTC.
#[inline]
pub fn offset(&self) -> &Tz::Offset {
&self.offset
}
/// Retrieves an associated time zone.
#[inline]
pub fn timezone(&self) -> Tz {
TimeZone::from_offset(&self.offset)
}
/// Changes the associated time zone.
/// This does not change the actual `DateTime` (but will change the string representation).
#[inline]
pub fn with_timezone<Tz2: TimeZone>(&self, tz: &Tz2) -> DateTime<Tz2> {
tz.from_utc_datetime(&self.datetime)
}
/// Adds given `Duration` to the current date and time.
///
/// Returns `None` when it will result in overflow.
#[inline]
pub fn checked_add_signed(self, rhs: OldDuration) -> Option<DateTime<Tz>> {
let datetime = try_opt!(self.datetime.checked_add_signed(rhs));
Some(DateTime { datetime: datetime, offset: self.offset })
}
/// Subtracts given `Duration` from the current date and time.
///
/// Returns `None` when it will result in overflow.
#[inline]
pub fn checked_sub_signed(self, rhs: OldDuration) -> Option<DateTime<Tz>> {
let datetime = try_opt!(self.datetime.checked_sub_signed(rhs));
Some(DateTime { datetime: datetime, offset: self.offset })
}
/// Subtracts another `DateTime` from the current date and time.
/// This does not overflow or underflow at all.
#[cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
#[inline]
pub fn signed_duration_since<Tz2: TimeZone>(self, rhs: DateTime<Tz2>) -> OldDuration {
self.datetime.signed_duration_since(rhs.datetime)
}
/// Returns a view to the naive UTC datetime.
#[inline]
pub fn naive_utc(&self) -> NaiveDateTime {
self.datetime
}
/// Returns a view to the naive local datetime.
#[inline]
pub fn naive_local(&self) -> NaiveDateTime {
self.datetime + self.offset.fix()
}
}
/// Maps the local datetime to other datetime with given conversion function.
fn map_local<Tz: TimeZone, F>(dt: &DateTime<Tz>, mut f: F) -> Option<DateTime<Tz>>
where F: FnMut(NaiveDateTime) -> Option<NaiveDateTime> {
f(dt.naive_local()).and_then(|datetime| dt.timezone().from_local_datetime(&datetime).single())
}
impl DateTime<FixedOffset> {
/// Parses an RFC 2822 date and time string such as `Tue, 1 Jul 2003 10:52:37 +0200`,
/// then returns a new `DateTime` with a parsed `FixedOffset`.
pub fn parse_from_rfc2822(s: &str) -> ParseResult<DateTime<FixedOffset>> {
const ITEMS: &'static [Item<'static>] = &[Item::Fixed(Fixed::RFC2822)];
let mut parsed = Parsed::new();
try!(parse(&mut parsed, s, ITEMS.iter().cloned()));
parsed.to_datetime()
}
/// Parses an RFC 3339 and ISO 8601 date and time string such as `1996-12-19T16:39:57-08:00`,
/// then returns a new `DateTime` with a parsed `FixedOffset`.
///
/// Why isn't this named `parse_from_iso8601`? That's because ISO 8601 allows some freedom
/// over the syntax and RFC 3339 exercises that freedom to rigidly define a fixed format.
pub fn parse_from_rfc3339(s: &str) -> ParseResult<DateTime<FixedOffset>> {
const ITEMS: &'static [Item<'static>] = &[Item::Fixed(Fixed::RFC3339)];
let mut parsed = Parsed::new();
try!(parse(&mut parsed, s, ITEMS.iter().cloned()));
parsed.to_datetime()
}
/// Parses a string with the specified format string and
/// returns a new `DateTime` with a parsed `FixedOffset`.
/// See the [`format::strftime` module](./format/strftime/index.html)
/// on the supported escape sequences.
///
/// See also `Offset::datetime_from_str` which gives a local `DateTime` on specific time zone.
///
/// Note that this method *requires a timezone* in the string. See
/// [`NaiveDateTime::parse_from_str`](./naive/struct.NaiveDateTime.html#method.parse_from_str)
/// for a version that does not require a timezone in the to-be-parsed str.
///
/// # Example
///
/// ```rust
/// use chrono::{DateTime, FixedOffset, TimeZone};
///
/// let dt = DateTime::parse_from_str(
/// "1983 Apr 13 12:09:14.274 +0000", "%Y %b %d %H:%M:%S%.3f %z");
/// assert_eq!(dt, Ok(FixedOffset::east(0).ymd(1983, 4, 13).and_hms_milli(12, 9, 14, 274)));
/// ```
pub fn parse_from_str(s: &str, fmt: &str) -> ParseResult<DateTime<FixedOffset>> {
let mut parsed = Parsed::new();
try!(parse(&mut parsed, s, StrftimeItems::new(fmt)));
parsed.to_datetime()
}
}
impl<Tz: TimeZone> DateTime<Tz> where Tz::Offset: fmt::Display {
/// Returns an RFC 2822 date and time string such as `Tue, 1 Jul 2003 10:52:37 +0200`.
pub fn to_rfc2822(&self) -> String {
const ITEMS: &'static [Item<'static>] = &[Item::Fixed(Fixed::RFC2822)];
self.format_with_items(ITEMS.iter().cloned()).to_string()
}
/// Returns an RFC 3339 and ISO 8601 date and time string such as `1996-12-19T16:39:57-08:00`.
pub fn to_rfc3339(&self) -> String {
const ITEMS: &'static [Item<'static>] = &[Item::Fixed(Fixed::RFC3339)];
self.format_with_items(ITEMS.iter().cloned()).to_string()
}
/// Return an RFC 3339 and ISO 8601 date and time string with subseconds
/// formatted as per a `SecondsFormat`. If passed `use_z` true and the
/// timezone is UTC (offset 0), use 'Z', as per
/// [Fixed::TimezoneOffsetColonZ](format/enum.Fixed.html#variant.TimezoneOffsetColonZ).
/// If passed `use_z` false, use
/// [Fixed::TimezoneOffsetColon](format/enum.Fixed.html#variant.TimezoneOffsetColon).
///
/// # Examples
///
/// ```rust
/// # use chrono::{DateTime, FixedOffset, SecondsFormat, TimeZone, Utc};
/// let dt = Utc.ymd(2018, 1, 26).and_hms_micro(18, 30, 9, 453_829);
/// assert_eq!(dt.to_rfc3339_opts(SecondsFormat::Millis, false),
/// "2018-01-26T18:30:09.453+00:00");
/// assert_eq!(dt.to_rfc3339_opts(SecondsFormat::Millis, true),
/// "2018-01-26T18:30:09.453Z");
/// assert_eq!(dt.to_rfc3339_opts(SecondsFormat::Secs, true),
/// "2018-01-26T18:30:09Z");
///
/// let pst = FixedOffset::east(8 * 60 * 60);
/// let dt = pst.ymd(2018, 1, 26).and_hms_micro(10, 30, 9, 453_829);
/// assert_eq!(dt.to_rfc3339_opts(SecondsFormat::Secs, true),
/// "2018-01-26T10:30:09+08:00");
/// ```
pub fn to_rfc3339_opts(&self, secform: SecondsFormat, use_z: bool) -> String {
use format::Numeric::*;
use format::Pad::Zero;
use SecondsFormat::*;
debug_assert!(secform != __NonExhaustive, "Do not use __NonExhaustive!");
const PREFIX: &'static [Item<'static>] = &[
Item::Numeric(Year, Zero),
Item::Literal("-"),
Item::Numeric(Month, Zero),
Item::Literal("-"),
Item::Numeric(Day, Zero),
Item::Literal("T"),
Item::Numeric(Hour, Zero),
Item::Literal(":"),
Item::Numeric(Minute, Zero),
Item::Literal(":"),
Item::Numeric(Second, Zero),
];
let ssitem = match secform {
Secs => None,
Millis => Some(Item::Fixed(Fixed::Nanosecond3)),
Micros => Some(Item::Fixed(Fixed::Nanosecond6)),
Nanos => Some(Item::Fixed(Fixed::Nanosecond9)),
AutoSi => Some(Item::Fixed(Fixed::Nanosecond)),
__NonExhaustive => unreachable!(),
};
let tzitem = Item::Fixed(
if use_z {
Fixed::TimezoneOffsetColonZ
} else {
Fixed::TimezoneOffsetColon
}
);
match ssitem {
None =>
self.format_with_items(
PREFIX.iter().chain([tzitem].iter()).cloned()
).to_string(),
Some(s) =>
self.format_with_items(
PREFIX.iter().chain([s, tzitem].iter()).cloned()
).to_string(),
}
}
/// Formats the combined date and time with the specified formatting items.
#[inline]
pub fn format_with_items<'a, I>(&self, items: I) -> DelayedFormat<I>
where I: Iterator<Item=Item<'a>> + Clone {
let local = self.naive_local();
DelayedFormat::new_with_offset(Some(local.date()), Some(local.time()), &self.offset, items)
}
/// Formats the combined date and time with the specified format string.
/// See the [`format::strftime` module](./format/strftime/index.html)
/// on the supported escape sequences.
#[inline]
pub fn format<'a>(&self, fmt: &'a str) -> DelayedFormat<StrftimeItems<'a>> {
self.format_with_items(StrftimeItems::new(fmt))
}
}
impl<Tz: TimeZone> Datelike for DateTime<Tz> {
#[inline] fn year(&self) -> i32 { self.naive_local().year() }
#[inline] fn month(&self) -> u32 { self.naive_local().month() }
#[inline] fn month0(&self) -> u32 { self.naive_local().month0() }
#[inline] fn day(&self) -> u32 { self.naive_local().day() }
#[inline] fn day0(&self) -> u32 { self.naive_local().day0() }
#[inline] fn ordinal(&self) -> u32 { self.naive_local().ordinal() }
#[inline] fn ordinal0(&self) -> u32 { self.naive_local().ordinal0() }
#[inline] fn weekday(&self) -> Weekday { self.naive_local().weekday() }
#[inline] fn iso_week(&self) -> IsoWeek { self.naive_local().iso_week() }
#[inline]
fn with_year(&self, year: i32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_year(year))
}
#[inline]
fn with_month(&self, month: u32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_month(month))
}
#[inline]
fn with_month0(&self, month0: u32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_month0(month0))
}
#[inline]
fn with_day(&self, day: u32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_day(day))
}
#[inline]
fn with_day0(&self, day0: u32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_day0(day0))
}
#[inline]
fn with_ordinal(&self, ordinal: u32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_ordinal(ordinal))
}
#[inline]
fn with_ordinal0(&self, ordinal0: u32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_ordinal0(ordinal0))
}
}
impl<Tz: TimeZone> Timelike for DateTime<Tz> {
#[inline] fn hour(&self) -> u32 { self.naive_local().hour() }
#[inline] fn minute(&self) -> u32 { self.naive_local().minute() }
#[inline] fn second(&self) -> u32 { self.naive_local().second() }
#[inline] fn nanosecond(&self) -> u32 { self.naive_local().nanosecond() }
#[inline]
fn with_hour(&self, hour: u32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_hour(hour))
}
#[inline]
fn with_minute(&self, min: u32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_minute(min))
}
#[inline]
fn with_second(&self, sec: u32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_second(sec))
}
#[inline]
fn with_nanosecond(&self, nano: u32) -> Option<DateTime<Tz>> {
map_local(self, |datetime| datetime.with_nanosecond(nano))
}
}
// we need them as automatic impls cannot handle associated types
impl<Tz: TimeZone> Copy for DateTime<Tz> where <Tz as TimeZone>::Offset: Copy {}
unsafe impl<Tz: TimeZone> Send for DateTime<Tz> where <Tz as TimeZone>::Offset: Send {}
impl<Tz: TimeZone, Tz2: TimeZone> PartialEq<DateTime<Tz2>> for DateTime<Tz> {
fn eq(&self, other: &DateTime<Tz2>) -> bool { self.datetime == other.datetime }
}
impl<Tz: TimeZone> Eq for DateTime<Tz> {
}
impl<Tz: TimeZone> PartialOrd for DateTime<Tz> {
fn partial_cmp(&self, other: &DateTime<Tz>) -> Option<Ordering> {
self.datetime.partial_cmp(&other.datetime)
}
}
impl<Tz: TimeZone> Ord for DateTime<Tz> {
fn cmp(&self, other: &DateTime<Tz>) -> Ordering { self.datetime.cmp(&other.datetime) }
}
impl<Tz: TimeZone> hash::Hash for DateTime<Tz> {
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.datetime.hash(state) }
}
impl<Tz: TimeZone> Add<OldDuration> for DateTime<Tz> {
type Output = DateTime<Tz>;
#[inline]
fn add(self, rhs: OldDuration) -> DateTime<Tz> {
self.checked_add_signed(rhs).expect("`DateTime + Duration` overflowed")
}
}
impl<Tz: TimeZone> Sub<OldDuration> for DateTime<Tz> {
type Output = DateTime<Tz>;
#[inline]
fn sub(self, rhs: OldDuration) -> DateTime<Tz> {
self.checked_sub_signed(rhs).expect("`DateTime - Duration` overflowed")
}
}
impl<Tz: TimeZone> Sub<DateTime<Tz>> for DateTime<Tz> {
type Output = OldDuration;
#[inline]
fn sub(self, rhs: DateTime<Tz>) -> OldDuration {
self.signed_duration_since(rhs)
}
}
impl<Tz: TimeZone> fmt::Debug for DateTime<Tz> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}{:?}", self.naive_local(), self.offset)
}
}
impl<Tz: TimeZone> fmt::Display for DateTime<Tz> where Tz::Offset: fmt::Display {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}", self.naive_local(), self.offset)
}
}
impl str::FromStr for DateTime<FixedOffset> {
type Err = ParseError;
fn from_str(s: &str) -> ParseResult<DateTime<FixedOffset>> {
const ITEMS: &'static [Item<'static>] = &[
Item::Space(""), Item::Numeric(Numeric::Year, Pad::Zero),
Item::Space(""), Item::Literal("-"),
Item::Space(""), Item::Numeric(Numeric::Month, Pad::Zero),
Item::Space(""), Item::Literal("-"),
Item::Space(""), Item::Numeric(Numeric::Day, Pad::Zero),
Item::Space(""), Item::Literal("T"), // XXX shouldn't this be case-insensitive?
Item::Space(""), Item::Numeric(Numeric::Hour, Pad::Zero),
Item::Space(""), Item::Literal(":"),
Item::Space(""), Item::Numeric(Numeric::Minute, Pad::Zero),
Item::Space(""), Item::Literal(":"),
Item::Space(""), Item::Numeric(Numeric::Second, Pad::Zero),
Item::Fixed(Fixed::Nanosecond),
Item::Space(""), Item::Fixed(Fixed::TimezoneOffsetZ),
Item::Space(""),
];
let mut parsed = Parsed::new();
try!(parse(&mut parsed, s, ITEMS.iter().cloned()));
parsed.to_datetime()
}
}
impl str::FromStr for DateTime<Utc> {
type Err = ParseError;
fn from_str(s: &str) -> ParseResult<DateTime<Utc>> {
s.parse::<DateTime<FixedOffset>>().map(|dt| dt.with_timezone(&Utc))
}
}
#[cfg(feature="clock")]
impl str::FromStr for DateTime<Local> {
type Err = ParseError;
fn from_str(s: &str) -> ParseResult<DateTime<Local>> {
s.parse::<DateTime<FixedOffset>>().map(|dt| dt.with_timezone(&Local))
}
}
impl From<SystemTime> for DateTime<Utc> {
fn from(t: SystemTime) -> DateTime<Utc> {
let (sec, nsec) = match t.duration_since(UNIX_EPOCH) {
Ok(dur) => (dur.as_secs() as i64, dur.subsec_nanos()),
Err(e) => { // unlikely but should be handled
let dur = e.duration();
let (sec, nsec) = (dur.as_secs() as i64, dur.subsec_nanos());
if nsec == 0 {
(-sec, 0)
} else {
(-sec - 1, 1_000_000_000 - nsec)
}
},
};
Utc.timestamp(sec, nsec)
}
}
#[cfg(feature="clock")]
impl From<SystemTime> for DateTime<Local> {
fn from(t: SystemTime) -> DateTime<Local> {
DateTime::<Utc>::from(t).with_timezone(&Local)
}
}
impl<Tz: TimeZone> From<DateTime<Tz>> for SystemTime {
fn from(dt: DateTime<Tz>) -> SystemTime {
use std::time::Duration;
let sec = dt.timestamp();
let nsec = dt.timestamp_subsec_nanos();
if sec < 0 {
// unlikely but should be handled
UNIX_EPOCH - Duration::new(-sec as u64, 0) + Duration::new(0, nsec)
} else {
UNIX_EPOCH + Duration::new(sec as u64, nsec)
}
}
}
#[cfg(all(test, any(feature = "rustc-serialize", feature = "serde")))]
fn test_encodable_json<FUtc, FFixed, E>(to_string_utc: FUtc, to_string_fixed: FFixed)
where FUtc: Fn(&DateTime<Utc>) -> Result<String, E>,
FFixed: Fn(&DateTime<FixedOffset>) -> Result<String, E>,
E: ::std::fmt::Debug
{
assert_eq!(to_string_utc(&Utc.ymd(2014, 7, 24).and_hms(12, 34, 6)).ok(),
Some(r#""2014-07-24T12:34:06Z""#.into()));
assert_eq!(to_string_fixed(&FixedOffset::east(3660).ymd(2014, 7, 24).and_hms(12, 34, 6)).ok(),
Some(r#""2014-07-24T12:34:06+01:01""#.into()));
assert_eq!(to_string_fixed(&FixedOffset::east(3650).ymd(2014, 7, 24).and_hms(12, 34, 6)).ok(),
Some(r#""2014-07-24T12:34:06+01:00:50""#.into()));
}
#[cfg(all(test, feature="clock", any(feature = "rustc-serialize", feature = "serde")))]
fn test_decodable_json<FUtc, FFixed, FLocal, E>(utc_from_str: FUtc,
fixed_from_str: FFixed,
local_from_str: FLocal)
where FUtc: Fn(&str) -> Result<DateTime<Utc>, E>,
FFixed: Fn(&str) -> Result<DateTime<FixedOffset>, E>,
FLocal: Fn(&str) -> Result<DateTime<Local>, E>,
E: ::std::fmt::Debug
{
// should check against the offset as well (the normal DateTime comparison will ignore them)
fn norm<Tz: TimeZone>(dt: &Option<DateTime<Tz>>) -> Option<(&DateTime<Tz>, &Tz::Offset)> {
dt.as_ref().map(|dt| (dt, dt.offset()))
}
assert_eq!(norm(&utc_from_str(r#""2014-07-24T12:34:06Z""#).ok()),
norm(&Some(Utc.ymd(2014, 7, 24).and_hms(12, 34, 6))));
assert_eq!(norm(&utc_from_str(r#""2014-07-24T13:57:06+01:23""#).ok()),
norm(&Some(Utc.ymd(2014, 7, 24).and_hms(12, 34, 6))));
assert_eq!(norm(&fixed_from_str(r#""2014-07-24T12:34:06Z""#).ok()),
norm(&Some(FixedOffset::east(0).ymd(2014, 7, 24).and_hms(12, 34, 6))));
assert_eq!(norm(&fixed_from_str(r#""2014-07-24T13:57:06+01:23""#).ok()),
norm(&Some(FixedOffset::east(60*60 + 23*60).ymd(2014, 7, 24).and_hms(13, 57, 6))));
// we don't know the exact local offset but we can check that
// the conversion didn't change the instant itself
assert_eq!(local_from_str(r#""2014-07-24T12:34:06Z""#)
.expect("local shouuld parse"),
Utc.ymd(2014, 7, 24).and_hms(12, 34, 6));
assert_eq!(local_from_str(r#""2014-07-24T13:57:06+01:23""#)
.expect("local should parse with offset"),
Utc.ymd(2014, 7, 24).and_hms(12, 34, 6));
assert!(utc_from_str(r#""2014-07-32T12:34:06Z""#).is_err());
assert!(fixed_from_str(r#""2014-07-32T12:34:06Z""#).is_err());
}
#[cfg(all(test, feature="clock", feature = "rustc-serialize"))]
fn test_decodable_json_timestamps<FUtc, FFixed, FLocal, E>(utc_from_str: FUtc,
fixed_from_str: FFixed,
local_from_str: FLocal)
where FUtc: Fn(&str) -> Result<rustc_serialize::TsSeconds<Utc>, E>,
FFixed: Fn(&str) -> Result<rustc_serialize::TsSeconds<FixedOffset>, E>,
FLocal: Fn(&str) -> Result<rustc_serialize::TsSeconds<Local>, E>,
E: ::std::fmt::Debug
{
fn norm<Tz: TimeZone>(dt: &Option<DateTime<Tz>>) -> Option<(&DateTime<Tz>, &Tz::Offset)> {
dt.as_ref().map(|dt| (dt, dt.offset()))
}
assert_eq!(norm(&utc_from_str("0").ok().map(DateTime::from)),
norm(&Some(Utc.ymd(1970, 1, 1).and_hms(0, 0, 0))));
assert_eq!(norm(&utc_from_str("-1").ok().map(DateTime::from)),
norm(&Some(Utc.ymd(1969, 12, 31).and_hms(23, 59, 59))));
assert_eq!(norm(&fixed_from_str("0").ok().map(DateTime::from)),
norm(&Some(FixedOffset::east(0).ymd(1970, 1, 1).and_hms(0, 0, 0))));
assert_eq!(norm(&fixed_from_str("-1").ok().map(DateTime::from)),
norm(&Some(FixedOffset::east(0).ymd(1969, 12, 31).and_hms(23, 59, 59))));
assert_eq!(*fixed_from_str("0").expect("0 timestamp should parse"),
Utc.ymd(1970, 1, 1).and_hms(0, 0, 0));
assert_eq!(*local_from_str("-1").expect("-1 timestamp should parse"),
Utc.ymd(1969, 12, 31).and_hms(23, 59, 59));
}
#[cfg(feature = "rustc-serialize")]
pub mod rustc_serialize {
use std::fmt;
use std::ops::Deref;
use super::DateTime;
#[cfg(feature="clock")]
use offset::Local;
use offset::{TimeZone, LocalResult, Utc, FixedOffset};
use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
impl<Tz: TimeZone> Encodable for DateTime<Tz> {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
format!("{:?}", self).encode(s)
}
}
// try!-like function to convert a LocalResult into a serde-ish Result
fn from<T, D>(me: LocalResult<T>, d: &mut D) -> Result<T, D::Error>
where D: Decoder,
T: fmt::Display,
{
match me {
LocalResult::None => Err(d.error(
"value is not a legal timestamp")),
LocalResult::Ambiguous(..) => Err(d.error(
"value is an ambiguous timestamp")),
LocalResult::Single(val) => Ok(val)
}
}
impl Decodable for DateTime<FixedOffset> {
fn decode<D: Decoder>(d: &mut D) -> Result<DateTime<FixedOffset>, D::Error> {
d.read_str()?.parse::<DateTime<FixedOffset>>()
.map_err(|_| d.error("invalid date and time"))
}
}
#[allow(deprecated)]
impl Decodable for TsSeconds<FixedOffset> {
#[allow(deprecated)]
fn decode<D: Decoder>(d: &mut D) -> Result<TsSeconds<FixedOffset>, D::Error> {
from(FixedOffset::east(0).timestamp_opt(d.read_i64()?, 0), d)
.map(TsSeconds)
}
}
impl Decodable for DateTime<Utc> {
fn decode<D: Decoder>(d: &mut D) -> Result<DateTime<Utc>, D::Error> {
d.read_str()?
.parse::<DateTime<FixedOffset>>()
.map(|dt| dt.with_timezone(&Utc))
.map_err(|_| d.error("invalid date and time"))
}
}
/// A `DateTime` that can be deserialized from a timestamp
///
/// A timestamp here is seconds since the epoch
#[derive(Debug)]
pub struct TsSeconds<Tz: TimeZone>(DateTime<Tz>);
#[allow(deprecated)]
impl<Tz: TimeZone> From<TsSeconds<Tz>> for DateTime<Tz> {
/// Pull the inner DateTime<Tz> out
#[allow(deprecated)]
fn from(obj: TsSeconds<Tz>) -> DateTime<Tz> {
obj.0
}
}
#[allow(deprecated)]
impl<Tz: TimeZone> Deref for TsSeconds<Tz> {
type Target = DateTime<Tz>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[allow(deprecated)]
impl Decodable for TsSeconds<Utc> {
fn decode<D: Decoder>(d: &mut D) -> Result<TsSeconds<Utc>, D::Error> {
from(Utc.timestamp_opt(d.read_i64()?, 0), d)
.map(TsSeconds)
}
}
#[cfg(feature="clock")]
impl Decodable for DateTime<Local> {
fn decode<D: Decoder>(d: &mut D) -> Result<DateTime<Local>, D::Error> {
match d.read_str()?.parse::<DateTime<FixedOffset>>() {
Ok(dt) => Ok(dt.with_timezone(&Local)),
Err(_) => Err(d.error("invalid date and time")),
}
}
}
#[cfg(feature="clock")]
#[allow(deprecated)]
impl Decodable for TsSeconds<Local> {
#[allow(deprecated)]
fn decode<D: Decoder>(d: &mut D) -> Result<TsSeconds<Local>, D::Error> {
from(Utc.timestamp_opt(d.read_i64()?, 0), d)
.map(|dt| TsSeconds(dt.with_timezone(&Local)))
}
}
#[cfg(test)] use rustc_serialize::json;
#[test]
fn test_encodable() {
super::test_encodable_json(json::encode, json::encode);
}
#[cfg(feature="clock")]
#[test]
fn test_decodable() {
super::test_decodable_json(json::decode, json::decode, json::decode);
}
#[cfg(feature="clock")]
#[test]
fn test_decodable_timestamps() {
super::test_decodable_json_timestamps(json::decode, json::decode, json::decode);
}
}
/// documented at re-export site
#[cfg(feature = "serde")]
pub mod serde {
use std::fmt;
use super::DateTime;
#[cfg(feature="clock")]
use offset::Local;
use offset::{LocalResult, TimeZone, Utc, FixedOffset};
use serdelib::{ser, de};
// try!-like function to convert a LocalResult into a serde-ish Result
fn serde_from<T, E, V>(me: LocalResult<T>, ts: &V) -> Result<T, E>
where E: de::Error,
V: fmt::Display,
T: fmt::Display,
{
match me {
LocalResult::None => Err(E::custom(
format!("value is not a legal timestamp: {}", ts))),
LocalResult::Ambiguous(min, max) => Err(E::custom(
format!("value is an ambiguous timestamp: {}, could be either of {}, {}",
ts, min, max))),
LocalResult::Single(val) => Ok(val)
}
}
/// Ser/de to/from timestamps in nanoseconds
///
/// Intended for use with `serde`'s `with` attribute.
///
/// # Example:
///
/// ```rust
/// # // We mark this ignored so that we can test on 1.13 (which does not
/// # // support custom derive), and run tests with --ignored on beta and
/// # // nightly to actually trigger these.
/// #
/// # #[macro_use] extern crate serde_derive;
/// # #[macro_use] extern crate serde_json;
/// # extern crate chrono;
/// # use chrono::{TimeZone, DateTime, Utc};
/// use chrono::serde::ts_nanoseconds;
/// #[derive(Deserialize, Serialize)]
/// struct S {
/// #[serde(with = "ts_nanoseconds")]
/// time: DateTime<Utc>
/// }
///
/// # fn example() -> Result<S, serde_json::Error> {
/// let time = Utc.ymd(2018, 5, 17).and_hms_nano(02, 04, 59, 918355733);
/// let my_s = S {
/// time: time.clone(),
/// };
///
/// let as_string = serde_json::to_string(&my_s)?;
/// assert_eq!(as_string, r#"{"time":1526522699918355733}"#);
/// let my_s: S = serde_json::from_str(&as_string)?;
/// assert_eq!(my_s.time, time);
/// # Ok(my_s)
/// # }
/// # fn main() { example().unwrap(); }
/// ```
pub mod ts_nanoseconds {
use std::fmt;
use serdelib::{ser, de};
use {DateTime, Utc};
use offset::TimeZone;
use super::serde_from;
/// Serialize a UTC datetime into an integer number of nanoseconds since the epoch
///
/// Intended for use with `serde`s `serialize_with` attribute.
///
/// # Example:
///
/// ```rust
/// # // We mark this ignored so that we can test on 1.13 (which does not
/// # // support custom derive), and run tests with --ignored on beta and
/// # // nightly to actually trigger these.
/// #
/// # #[macro_use] extern crate serde_derive;
/// # #[macro_use] extern crate serde_json;
/// # extern crate chrono;
/// # use chrono::{TimeZone, DateTime, Utc};
/// use chrono::serde::ts_nanoseconds::serialize as to_nano_ts;
/// #[derive(Serialize)]
/// struct S {
/// #[serde(serialize_with = "to_nano_ts")]
/// time: DateTime<Utc>
/// }
///
/// # fn example() -> Result<String, serde_json::Error> {
/// let my_s = S {
/// time: Utc.ymd(2018, 5, 17).and_hms_nano(02, 04, 59, 918355733),
/// };
/// let as_string = serde_json::to_string(&my_s)?;
/// assert_eq!(as_string, r#"{"time":1526522699918355733}"#);
/// # Ok(as_string)
/// # }
/// # fn main() { example().unwrap(); }
/// ```
pub fn serialize<S>(dt: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer<|fim▁hole|>
/// Deserialize a `DateTime` from a nanosecond timestamp
///
/// Intended for use with `serde`s `deserialize_with` attribute.
///
/// # Example:
///
/// ```rust
/// # // We mark this ignored so that we can test on 1.13 (which does not
/// # // support custom derive), and run tests with --ignored on beta and
/// # // nightly to actually trigger these.
/// #
/// # #[macro_use] extern crate serde_derive;
/// # #[macro_use] extern crate serde_json;
/// # extern crate chrono;
/// # use chrono::{DateTime, Utc};
/// use chrono::serde::ts_nanoseconds::deserialize as from_nano_ts;
/// #[derive(Deserialize)]
/// struct S {
/// #[serde(deserialize_with = "from_nano_ts")]
/// time: DateTime<Utc>
/// }
///
/// # fn example() -> Result<S, serde_json::Error> {
/// let my_s: S = serde_json::from_str(r#"{ "time": 1526522699918355733 }"#)?;
/// # Ok(my_s)
/// # }
/// # fn main() { example().unwrap(); }
/// ```
pub fn deserialize<'de, D>(d: D) -> Result<DateTime<Utc>, D::Error>
where D: de::Deserializer<'de>
{
Ok(try!(d.deserialize_i64(NanoSecondsTimestampVisitor)))
}
struct NanoSecondsTimestampVisitor;
impl<'de> de::Visitor<'de> for NanoSecondsTimestampVisitor {
type Value = DateTime<Utc>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
{
write!(formatter, "a unix timestamp in seconds")
}
/// Deserialize a timestamp in nanoseconds since the epoch
fn visit_i64<E>(self, value: i64) -> Result<DateTime<Utc>, E>
where E: de::Error
{
serde_from(Utc.timestamp_opt(value / 1_000_000_000,
(value % 1_000_000_000) as u32),
&value)
}
/// Deserialize a timestamp in nanoseconds since the epoch
fn visit_u64<E>(self, value: u64) -> Result<DateTime<Utc>, E>
where E: de::Error
{
serde_from(Utc.timestamp_opt((value / 1_000_000_000) as i64,
(value % 1_000_000_000) as u32),
&value)
}
}
}
/// Ser/de to/from timestamps in milliseconds
///
/// Intended for use with `serde`s `with` attribute.
///
/// # Example
///
/// ```rust
/// # // We mark this ignored so that we can test on 1.13 (which does not
/// # // support custom derive), and run tests with --ignored on beta and
/// # // nightly to actually trigger these.
/// #
/// # #[macro_use] extern crate serde_derive;
/// # #[macro_use] extern crate serde_json;
/// # extern crate chrono;
/// # use chrono::{TimeZone, DateTime, Utc};
/// use chrono::serde::ts_milliseconds;
/// #[derive(Deserialize, Serialize)]
/// struct S {
/// #[serde(with = "ts_milliseconds")]
/// time: DateTime<Utc>
/// }
///
/// # fn example() -> Result<S, serde_json::Error> {
/// let time = Utc.ymd(2018, 5, 17).and_hms_milli(02, 04, 59, 918);
/// let my_s = S {
/// time: time.clone(),
/// };
///
/// let as_string = serde_json::to_string(&my_s)?;
/// assert_eq!(as_string, r#"{"time":1526522699918}"#);
/// let my_s: S = serde_json::from_str(&as_string)?;
/// assert_eq!(my_s.time, time);
/// # Ok(my_s)
/// # }
/// # fn main() { example().unwrap(); }
/// ```
pub mod ts_milliseconds {
use std::fmt;
use serdelib::{ser, de};
use {DateTime, Utc};
use offset::TimeZone;
use super::serde_from;
/// Serialize a UTC datetime into an integer number of milliseconds since the epoch
///
/// Intended for use with `serde`s `serialize_with` attribute.
///
/// # Example:
///
/// ```rust
/// # // We mark this ignored so that we can test on 1.13 (which does not
/// # // support custom derive), and run tests with --ignored on beta and
/// # // nightly to actually trigger these.
/// #
/// # #[macro_use] extern crate serde_derive;
/// # #[macro_use] extern crate serde_json;
/// # extern crate chrono;
/// # use chrono::{TimeZone, DateTime, Utc};
/// use chrono::serde::ts_milliseconds::serialize as to_milli_ts;
/// #[derive(Serialize)]
/// struct S {
/// #[serde(serialize_with = "to_milli_ts")]
/// time: DateTime<Utc>
/// }
///
/// # fn example() -> Result<String, serde_json::Error> {
/// let my_s = S {
/// time: Utc.ymd(2018, 5, 17).and_hms_milli(02, 04, 59, 918),
/// };
/// let as_string = serde_json::to_string(&my_s)?;
/// assert_eq!(as_string, r#"{"time":1526522699918}"#);
/// # Ok(as_string)
/// # }
/// # fn main() { example().unwrap(); }
/// ```
pub fn serialize<S>(dt: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer
{
serializer.serialize_i64(dt.timestamp_millis())
}
/// Deserialize a `DateTime` from a millisecond timestamp
///
/// Intended for use with `serde`s `deserialize_with` attribute.
///
/// # Example:
///
/// ```rust
/// # // We mark this ignored so that we can test on 1.13 (which does not
/// # // support custom derive), and run tests with --ignored on beta and
/// # // nightly to actually trigger these.
/// #
/// # #[macro_use] extern crate serde_derive;
/// # #[macro_use] extern crate serde_json;
/// # extern crate chrono;
/// # use chrono::{DateTime, Utc};
/// use chrono::serde::ts_milliseconds::deserialize as from_milli_ts;
/// #[derive(Deserialize)]
/// struct S {
/// #[serde(deserialize_with = "from_milli_ts")]
/// time: DateTime<Utc>
/// }
///
/// # fn example() -> Result<S, serde_json::Error> {
/// let my_s: S = serde_json::from_str(r#"{ "time": 1526522699918 }"#)?;
/// # Ok(my_s)
/// # }
/// # fn main() { example().unwrap(); }
/// ```
pub fn deserialize<'de, D>(d: D) -> Result<DateTime<Utc>, D::Error>
where D: de::Deserializer<'de>
{
Ok(try!(d.deserialize_i64(MilliSecondsTimestampVisitor).map(|dt| dt.with_timezone(&Utc))))
}
struct MilliSecondsTimestampVisitor;
impl<'de> de::Visitor<'de> for MilliSecondsTimestampVisitor {
type Value = DateTime<Utc>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
{
formatter.write_str("a unix timestamp in milliseconds")
}
/// Deserialize a timestamp in milliseconds since the epoch
fn visit_i64<E>(self, value: i64) -> Result<DateTime<Utc>, E>
where E: de::Error
{
serde_from(Utc.timestamp_opt(value / 1000,
((value % 1000) * 1_000_000) as u32),
&value)
}
/// Deserialize a timestamp in milliseconds since the epoch
fn visit_u64<E>(self, value: u64) -> Result<DateTime<Utc>, E>
where E: de::Error
{
serde_from(Utc.timestamp_opt((value / 1000) as i64,
((value % 1000) * 1_000_000) as u32),
&value)
}
}
}
/// Ser/de to/from timestamps in seconds
///
/// Intended for use with `serde`'s `with` attribute.
///
/// # Example:
///
/// ```rust
/// # // We mark this ignored so that we can test on 1.13 (which does not
/// # // support custom derive), and run tests with --ignored on beta and
/// # // nightly to actually trigger these.
/// #
/// # #[macro_use] extern crate serde_derive;
/// # #[macro_use] extern crate serde_json;
/// # extern crate chrono;
/// # use chrono::{TimeZone, DateTime, Utc};
/// use chrono::serde::ts_seconds;
/// #[derive(Deserialize, Serialize)]
/// struct S {
/// #[serde(with = "ts_seconds")]
/// time: DateTime<Utc>
/// }
///
/// # fn example() -> Result<S, serde_json::Error> {
/// let time = Utc.ymd(2015, 5, 15).and_hms(10, 0, 0);
/// let my_s = S {
/// time: time.clone(),
/// };
///
/// let as_string = serde_json::to_string(&my_s)?;
/// assert_eq!(as_string, r#"{"time":1431684000}"#);
/// let my_s: S = serde_json::from_str(&as_string)?;
/// assert_eq!(my_s.time, time);
/// # Ok(my_s)
/// # }
/// # fn main() { example().unwrap(); }
/// ```
pub mod ts_seconds {
use std::fmt;
use serdelib::{ser, de};
use {DateTime, Utc};
use offset::TimeZone;
use super::serde_from;
/// Serialize a UTC datetime into an integer number of seconds since the epoch
///
/// Intended for use with `serde`s `serialize_with` attribute.
///
/// # Example:
///
/// ```rust
/// # // We mark this ignored so that we can test on 1.13 (which does not
/// # // support custom derive), and run tests with --ignored on beta and
/// # // nightly to actually trigger these.
/// #
/// # #[macro_use] extern crate serde_derive;
/// # #[macro_use] extern crate serde_json;
/// # extern crate chrono;
/// # use chrono::{TimeZone, DateTime, Utc};
/// use chrono::serde::ts_seconds::serialize as to_ts;
/// #[derive(Serialize)]
/// struct S {
/// #[serde(serialize_with = "to_ts")]
/// time: DateTime<Utc>
/// }
///
/// # fn example() -> Result<String, serde_json::Error> {
/// let my_s = S {
/// time: Utc.ymd(2015, 5, 15).and_hms(10, 0, 0),
/// };
/// let as_string = serde_json::to_string(&my_s)?;
/// assert_eq!(as_string, r#"{"time":1431684000}"#);
/// # Ok(as_string)
/// # }
/// # fn main() { example().unwrap(); }
/// ```
pub fn serialize<S>(dt: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer
{
serializer.serialize_i64(dt.timestamp())
}
/// Deserialize a `DateTime` from a seconds timestamp
///
/// Intended for use with `serde`s `deserialize_with` attribute.
///
/// # Example:
///
/// ```rust
/// # // We mark this ignored so that we can test on 1.13 (which does not
/// # // support custom derive), and run tests with --ignored on beta and
/// # // nightly to actually trigger these.
/// #
/// # #[macro_use] extern crate serde_derive;
/// # #[macro_use] extern crate serde_json;
/// # extern crate chrono;
/// # use chrono::{DateTime, Utc};
/// use chrono::serde::ts_seconds::deserialize as from_ts;
/// #[derive(Deserialize)]
/// struct S {
/// #[serde(deserialize_with = "from_ts")]
/// time: DateTime<Utc>
/// }
///
/// # fn example() -> Result<S, serde_json::Error> {
/// let my_s: S = serde_json::from_str(r#"{ "time": 1431684000 }"#)?;
/// # Ok(my_s)
/// # }
/// # fn main() { example().unwrap(); }
/// ```
pub fn deserialize<'de, D>(d: D) -> Result<DateTime<Utc>, D::Error>
where D: de::Deserializer<'de>
{
Ok(try!(d.deserialize_i64(SecondsTimestampVisitor)))
}
struct SecondsTimestampVisitor;
impl<'de> de::Visitor<'de> for SecondsTimestampVisitor {
type Value = DateTime<Utc>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
{
formatter.write_str("a unix timestamp in seconds")
}
/// Deserialize a timestamp in seconds since the epoch
fn visit_i64<E>(self, value: i64) -> Result<DateTime<Utc>, E>
where E: de::Error
{
serde_from(Utc.timestamp_opt(value, 0), &value)
}
/// Deserialize a timestamp in seconds since the epoch
fn visit_u64<E>(self, value: u64) -> Result<DateTime<Utc>, E>
where E: de::Error
{
serde_from(Utc.timestamp_opt(value as i64, 0), &value)
}
}
}
impl<Tz: TimeZone> ser::Serialize for DateTime<Tz> {
/// Serialize into a rfc3339 time string
///
/// See [the `serde` module](./serde/index.html) for alternate
/// serializations.
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer
{
struct FormatWrapped<'a, D: 'a> {
inner: &'a D
}
impl<'a, D: fmt::Debug> fmt::Display for FormatWrapped<'a, D> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
// Debug formatting is correct RFC3339, and it allows Zulu.
serializer.collect_str(&FormatWrapped { inner: &self })
}
}
struct DateTimeVisitor;
impl<'de> de::Visitor<'de> for DateTimeVisitor {
type Value = DateTime<FixedOffset>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
{
write!(formatter, "a formatted date and time string or a unix timestamp")
}
fn visit_str<E>(self, value: &str) -> Result<DateTime<FixedOffset>, E>
where E: de::Error
{
value.parse().map_err(|err| E::custom(format!("{}", err)))
}
}
/// Deserialize a value that optionally includes a timezone offset in its
/// string representation
///
/// The value to be deserialized must be an rfc3339 string.
///
/// See [the `serde` module](./serde/index.html) for alternate
/// deserialization formats.
impl<'de> de::Deserialize<'de> for DateTime<FixedOffset> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: de::Deserializer<'de>
{
deserializer.deserialize_str(DateTimeVisitor)
}
}
/// Deserialize into a UTC value
///
/// The value to be deserialized must be an rfc3339 string.
///
/// See [the `serde` module](./serde/index.html) for alternate
/// deserialization formats.
impl<'de> de::Deserialize<'de> for DateTime<Utc> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: de::Deserializer<'de>
{
deserializer.deserialize_str(DateTimeVisitor).map(|dt| dt.with_timezone(&Utc))
}
}
/// Deserialize a value that includes no timezone in its string
/// representation
///
/// The value to be deserialized must be an rfc3339 string.
///
/// See [the `serde` module](./serde/index.html) for alternate
/// serialization formats.
#[cfg(feature="clock")]
impl<'de> de::Deserialize<'de> for DateTime<Local> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: de::Deserializer<'de>
{
deserializer.deserialize_str(DateTimeVisitor).map(|dt| dt.with_timezone(&Local))
}
}
#[cfg(test)] extern crate serde_json;
#[cfg(test)] extern crate bincode;
#[test]
fn test_serde_serialize() {
super::test_encodable_json(self::serde_json::to_string, self::serde_json::to_string);
}
#[cfg(feature="clock")]
#[test]
fn test_serde_deserialize() {
super::test_decodable_json(|input| self::serde_json::from_str(&input), |input| self::serde_json::from_str(&input),
|input| self::serde_json::from_str(&input));
}
#[test]
fn test_serde_bincode() {
// Bincode is relevant to test separately from JSON because
// it is not self-describing.
use self::bincode::{Infinite, serialize, deserialize};
let dt = Utc.ymd(2014, 7, 24).and_hms(12, 34, 6);
let encoded = serialize(&dt, Infinite).unwrap();
let decoded: DateTime<Utc> = deserialize(&encoded).unwrap();
assert_eq!(dt, decoded);
assert_eq!(dt.offset(), decoded.offset());
}
}
#[cfg(test)]
mod tests {
use super::DateTime;
#[cfg(feature="clock")]
use Datelike;
use naive::{NaiveTime, NaiveDate};
#[cfg(feature="clock")]
use offset::Local;
use offset::{TimeZone, Utc, FixedOffset};
use oldtime::Duration;
use std::time::{SystemTime, UNIX_EPOCH};
#[test]
#[allow(non_snake_case)]
fn test_datetime_offset() {
let Est = FixedOffset::west(5*60*60);
let Edt = FixedOffset::west(4*60*60);
let Kst = FixedOffset::east(9*60*60);
assert_eq!(format!("{}", Utc.ymd(2014, 5, 6).and_hms(7, 8, 9)),
"2014-05-06 07:08:09 UTC");
assert_eq!(format!("{}", Edt.ymd(2014, 5, 6).and_hms(7, 8, 9)),
"2014-05-06 07:08:09 -04:00");
assert_eq!(format!("{}", Kst.ymd(2014, 5, 6).and_hms(7, 8, 9)),
"2014-05-06 07:08:09 +09:00");
assert_eq!(format!("{:?}", Utc.ymd(2014, 5, 6).and_hms(7, 8, 9)),
"2014-05-06T07:08:09Z");
assert_eq!(format!("{:?}", Edt.ymd(2014, 5, 6).and_hms(7, 8, 9)),
"2014-05-06T07:08:09-04:00");
assert_eq!(format!("{:?}", Kst.ymd(2014, 5, 6).and_hms(7, 8, 9)),
"2014-05-06T07:08:09+09:00");
// edge cases
assert_eq!(format!("{:?}", Utc.ymd(2014, 5, 6).and_hms(0, 0, 0)),
"2014-05-06T00:00:00Z");
assert_eq!(format!("{:?}", Edt.ymd(2014, 5, 6).and_hms(0, 0, 0)),
"2014-05-06T00:00:00-04:00");
assert_eq!(format!("{:?}", Kst.ymd(2014, 5, 6).and_hms(0, 0, 0)),
"2014-05-06T00:00:00+09:00");
assert_eq!(format!("{:?}", Utc.ymd(2014, 5, 6).and_hms(23, 59, 59)),
"2014-05-06T23:59:59Z");
assert_eq!(format!("{:?}", Edt.ymd(2014, 5, 6).and_hms(23, 59, 59)),
"2014-05-06T23:59:59-04:00");
assert_eq!(format!("{:?}", Kst.ymd(2014, 5, 6).and_hms(23, 59, 59)),
"2014-05-06T23:59:59+09:00");
let dt = Utc.ymd(2014, 5, 6).and_hms(7, 8, 9);
assert_eq!(dt, Edt.ymd(2014, 5, 6).and_hms(3, 8, 9));
assert_eq!(dt + Duration::seconds(3600 + 60 + 1), Utc.ymd(2014, 5, 6).and_hms(8, 9, 10));
assert_eq!(dt.signed_duration_since(Edt.ymd(2014, 5, 6).and_hms(10, 11, 12)),
Duration::seconds(-7*3600 - 3*60 - 3));
assert_eq!(*Utc.ymd(2014, 5, 6).and_hms(7, 8, 9).offset(), Utc);
assert_eq!(*Edt.ymd(2014, 5, 6).and_hms(7, 8, 9).offset(), Edt);
assert!(*Edt.ymd(2014, 5, 6).and_hms(7, 8, 9).offset() != Est);
}
#[test]
fn test_datetime_date_and_time() {
let tz = FixedOffset::east(5*60*60);
let d = tz.ymd(2014, 5, 6).and_hms(7, 8, 9);
assert_eq!(d.time(), NaiveTime::from_hms(7, 8, 9));
assert_eq!(d.date(), tz.ymd(2014, 5, 6));
assert_eq!(d.date().naive_local(), NaiveDate::from_ymd(2014, 5, 6));
assert_eq!(d.date().and_time(d.time()), Some(d));
let tz = FixedOffset::east(4*60*60);
let d = tz.ymd(2016, 5, 4).and_hms(3, 2, 1);
assert_eq!(d.time(), NaiveTime::from_hms(3, 2, 1));
assert_eq!(d.date(), tz.ymd(2016, 5, 4));
assert_eq!(d.date().naive_local(), NaiveDate::from_ymd(2016, 5, 4));
assert_eq!(d.date().and_time(d.time()), Some(d));
let tz = FixedOffset::west(13*60*60);
let d = tz.ymd(2017, 8, 9).and_hms(12, 34, 56);
assert_eq!(d.time(), NaiveTime::from_hms(12, 34, 56));
assert_eq!(d.date(), tz.ymd(2017, 8, 9));
assert_eq!(d.date().naive_local(), NaiveDate::from_ymd(2017, 8, 9));
assert_eq!(d.date().and_time(d.time()), Some(d));
}
#[test]
#[cfg(feature="clock")]
fn test_datetime_with_timezone() {
let local_now = Local::now();
let utc_now = local_now.with_timezone(&Utc);
let local_now2 = utc_now.with_timezone(&Local);
assert_eq!(local_now, local_now2);
}
#[test]
#[allow(non_snake_case)]
fn test_datetime_rfc2822_and_rfc3339() {
let EDT = FixedOffset::east(5*60*60);
assert_eq!(Utc.ymd(2015, 2, 18).and_hms(23, 16, 9).to_rfc2822(),
"Wed, 18 Feb 2015 23:16:09 +0000");
assert_eq!(Utc.ymd(2015, 2, 18).and_hms(23, 16, 9).to_rfc3339(),
"2015-02-18T23:16:09+00:00");
assert_eq!(EDT.ymd(2015, 2, 18).and_hms_milli(23, 16, 9, 150).to_rfc2822(),
"Wed, 18 Feb 2015 23:16:09 +0500");
assert_eq!(EDT.ymd(2015, 2, 18).and_hms_milli(23, 16, 9, 150).to_rfc3339(),
"2015-02-18T23:16:09.150+05:00");
assert_eq!(EDT.ymd(2015, 2, 18).and_hms_micro(23, 59, 59, 1_234_567).to_rfc2822(),
"Wed, 18 Feb 2015 23:59:60 +0500");
assert_eq!(EDT.ymd(2015, 2, 18).and_hms_micro(23, 59, 59, 1_234_567).to_rfc3339(),
"2015-02-18T23:59:60.234567+05:00");
assert_eq!(DateTime::parse_from_rfc2822("Wed, 18 Feb 2015 23:16:09 +0000"),
Ok(FixedOffset::east(0).ymd(2015, 2, 18).and_hms(23, 16, 9)));
assert_eq!(DateTime::parse_from_rfc3339("2015-02-18T23:16:09Z"),
Ok(FixedOffset::east(0).ymd(2015, 2, 18).and_hms(23, 16, 9)));
assert_eq!(DateTime::parse_from_rfc2822("Wed, 18 Feb 2015 23:59:60 +0500"),
Ok(EDT.ymd(2015, 2, 18).and_hms_milli(23, 59, 59, 1_000)));
assert_eq!(DateTime::parse_from_rfc3339("2015-02-18T23:59:60.234567+05:00"),
Ok(EDT.ymd(2015, 2, 18).and_hms_micro(23, 59, 59, 1_234_567)));
}
#[test]
fn test_rfc3339_opts() {
use SecondsFormat::*;
let pst = FixedOffset::east(8 * 60 * 60);
let dt = pst.ymd(2018, 1, 11).and_hms_nano(10, 5, 13, 084_660_000);
assert_eq!(dt.to_rfc3339_opts(Secs, false), "2018-01-11T10:05:13+08:00");
assert_eq!(dt.to_rfc3339_opts(Secs, true), "2018-01-11T10:05:13+08:00");
assert_eq!(dt.to_rfc3339_opts(Millis, false), "2018-01-11T10:05:13.084+08:00");
assert_eq!(dt.to_rfc3339_opts(Micros, false), "2018-01-11T10:05:13.084660+08:00");
assert_eq!(dt.to_rfc3339_opts(Nanos, false), "2018-01-11T10:05:13.084660000+08:00");
assert_eq!(dt.to_rfc3339_opts(AutoSi, false), "2018-01-11T10:05:13.084660+08:00");
let ut = DateTime::<Utc>::from_utc(dt.naive_utc(), Utc);
assert_eq!(ut.to_rfc3339_opts(Secs, false), "2018-01-11T02:05:13+00:00");
assert_eq!(ut.to_rfc3339_opts(Secs, true), "2018-01-11T02:05:13Z");
assert_eq!(ut.to_rfc3339_opts(Millis, false), "2018-01-11T02:05:13.084+00:00");
assert_eq!(ut.to_rfc3339_opts(Millis, true), "2018-01-11T02:05:13.084Z");
assert_eq!(ut.to_rfc3339_opts(Micros, true), "2018-01-11T02:05:13.084660Z");
assert_eq!(ut.to_rfc3339_opts(Nanos, true), "2018-01-11T02:05:13.084660000Z");
assert_eq!(ut.to_rfc3339_opts(AutoSi, true), "2018-01-11T02:05:13.084660Z");
}
#[test]
#[should_panic]
fn test_rfc3339_opts_nonexhaustive() {
use SecondsFormat;
let dt = Utc.ymd(1999, 10, 9).and_hms(1, 2, 3);
dt.to_rfc3339_opts(SecondsFormat::__NonExhaustive, true);
}
#[test]
fn test_datetime_from_str() {
assert_eq!("2015-2-18T23:16:9.15Z".parse::<DateTime<FixedOffset>>(),
Ok(FixedOffset::east(0).ymd(2015, 2, 18).and_hms_milli(23, 16, 9, 150)));
assert_eq!("2015-2-18T13:16:9.15-10:00".parse::<DateTime<FixedOffset>>(),
Ok(FixedOffset::west(10 * 3600).ymd(2015, 2, 18).and_hms_milli(13, 16, 9, 150)));
assert!("2015-2-18T23:16:9.15".parse::<DateTime<FixedOffset>>().is_err());
assert_eq!("2015-2-18T23:16:9.15Z".parse::<DateTime<Utc>>(),
Ok(Utc.ymd(2015, 2, 18).and_hms_milli(23, 16, 9, 150)));
assert_eq!("2015-2-18T13:16:9.15-10:00".parse::<DateTime<Utc>>(),
Ok(Utc.ymd(2015, 2, 18).and_hms_milli(23, 16, 9, 150)));
assert!("2015-2-18T23:16:9.15".parse::<DateTime<Utc>>().is_err());
// no test for `DateTime<Local>`, we cannot verify that much.
}
#[test]
fn test_datetime_parse_from_str() {
let ymdhms = |y,m,d,h,n,s,off| FixedOffset::east(off).ymd(y,m,d).and_hms(h,n,s);
assert_eq!(DateTime::parse_from_str("2014-5-7T12:34:56+09:30", "%Y-%m-%dT%H:%M:%S%z"),
Ok(ymdhms(2014, 5, 7, 12, 34, 56, 570*60))); // ignore offset
assert!(DateTime::parse_from_str("20140507000000", "%Y%m%d%H%M%S").is_err()); // no offset
assert!(DateTime::parse_from_str("Fri, 09 Aug 2013 23:54:35 GMT",
"%a, %d %b %Y %H:%M:%S GMT").is_err());
assert_eq!(Utc.datetime_from_str("Fri, 09 Aug 2013 23:54:35 GMT",
"%a, %d %b %Y %H:%M:%S GMT"),
Ok(Utc.ymd(2013, 8, 9).and_hms(23, 54, 35)));
}
#[test]
#[cfg(feature="clock")]
fn test_datetime_format_with_local() {
// if we are not around the year boundary, local and UTC date should have the same year
let dt = Local::now().with_month(5).unwrap();
assert_eq!(dt.format("%Y").to_string(), dt.with_timezone(&Utc).format("%Y").to_string());
}
#[test]
#[cfg(feature="clock")]
fn test_datetime_is_copy() {
// UTC is known to be `Copy`.
let a = Utc::now();
let b = a;
assert_eq!(a, b);
}
#[test]
#[cfg(feature="clock")]
fn test_datetime_is_send() {
use std::thread;
// UTC is known to be `Send`.
let a = Utc::now();
thread::spawn(move || {
let _ = a;
}).join().unwrap();
}
#[test]
fn test_subsecond_part() {
let datetime = Utc.ymd(2014, 7, 8).and_hms_nano(9, 10, 11, 1234567);
assert_eq!(1, datetime.timestamp_subsec_millis());
assert_eq!(1234, datetime.timestamp_subsec_micros());
assert_eq!(1234567, datetime.timestamp_subsec_nanos());
}
#[test]
fn test_from_system_time() {
use std::time::Duration;
let epoch = Utc.ymd(1970, 1, 1).and_hms(0, 0, 0);
// SystemTime -> DateTime<Utc>
assert_eq!(DateTime::<Utc>::from(UNIX_EPOCH), epoch);
assert_eq!(DateTime::<Utc>::from(UNIX_EPOCH + Duration::new(999_999_999, 999_999_999)),
Utc.ymd(2001, 9, 9).and_hms_nano(1, 46, 39, 999_999_999));
assert_eq!(DateTime::<Utc>::from(UNIX_EPOCH - Duration::new(999_999_999, 999_999_999)),
Utc.ymd(1938, 4, 24).and_hms_nano(22, 13, 20, 1));
// DateTime<Utc> -> SystemTime
assert_eq!(SystemTime::from(epoch), UNIX_EPOCH);
assert_eq!(SystemTime::from(Utc.ymd(2001, 9, 9).and_hms_nano(1, 46, 39, 999_999_999)),
UNIX_EPOCH + Duration::new(999_999_999, 999_999_999));
assert_eq!(SystemTime::from(Utc.ymd(1938, 4, 24).and_hms_nano(22, 13, 20, 1)),
UNIX_EPOCH - Duration::new(999_999_999, 999_999_999));
// DateTime<any tz> -> SystemTime (via `with_timezone`)
#[cfg(feature="clock")] {
assert_eq!(SystemTime::from(epoch.with_timezone(&Local)), UNIX_EPOCH);
}
assert_eq!(SystemTime::from(epoch.with_timezone(&FixedOffset::east(32400))), UNIX_EPOCH);
assert_eq!(SystemTime::from(epoch.with_timezone(&FixedOffset::west(28800))), UNIX_EPOCH);
}
}<|fim▁end|> | {
serializer.serialize_i64(dt.timestamp_nanos())
} |
<|file_name|>scheduler_state.rs<|end_file_name|><|fim▁begin|>use time::precise_time_ns;
<|fim▁hole|>data! (
SchedulerState {
current_time : u64 = precise_time_ns()
max_fps : u64 = 60
target_frame_time : u64 = 1000000000 / max_fps
last_frame_time : u64 = precise_time_ns()
last_cycle_time : u64 = precise_time_ns()
}
);<|fim▁end|> | |
<|file_name|>streamexcept.py<|end_file_name|><|fim▁begin|>import functions
import heapq
import vtbase
### Classic stream iterator
registered = True
class StreamExcept(vtbase.VT):
def BestIndex(self, constraints, orderbys):
return (None, 0, None, True, 1000)
def VTiter(self, *parsedArgs, **envars):
largs, dictargs = self.full_parse(parsedArgs)
if len(largs) < 1:
raise functions.OperatorError(__name__.rsplit('.')[-1], "Not defined union tables ")
streams = str(largs[0]).split(",")
if len(streams) < 2:
raise functions.OperatorError(__name__.rsplit('.')[-1], "Union tables must be more than one ")
cursors = []
execs = []
for stream in streams:
cursors.append(envars['db'].cursor())
execs.append(cursors[-1].execute("select * from " + str(stream) + ";"))
comparedcursor = str(cursors[0].getdescriptionsafe())
# for cursor in cursors:
# if str(cursor.getdescriptionsafe()) != comparedcursor:
# raise functions.OperatorError(__name__.rsplit('.')[-1],"Union tables with different schemas ")
if 'cols' in dictargs:
try:
cols = int(dictargs['cols'])
except ValueError:<|fim▁hole|> try:
cols = [y[0] for y in cursors[0].getdescriptionsafe()].index(dictargs['cols'])
except ValueError:
raise functions.OperatorError(__name__.rsplit('.')[-1], "Column name does not exists ")
else:
cols = 0
if cols >= len(cursors[0].getdescriptionsafe()):
raise functions.OperatorError(__name__.rsplit('.')[-1], "Column position does not exists ")
for x in range(0, len(streams)):
if x is 0:
execs[0] = ((v[cols], (0,) + v) for v in execs[0])
elif x is 1:
execs[1] = ((v[cols], (1,) + v) for v in execs[1])
elif x is 2:
execs[2] = ((v[cols], (2,) + v) for v in execs[2])
elif x is 3:
execs[3] = ((v[cols], (3,) + v) for v in execs[3])
elif x is 4:
execs[4] = ((v[cols], (4,) + v) for v in execs[4])
try:
yield list(cursors[0].getdescriptionsafe())
except StopIteration:
try:
raise
finally:
try:
for cur in cursors:
cur.close()
except:
pass
currentgroup = None
lists = [[]] * len(streams)
for k, v in heapq.merge(*execs):
if currentgroup is None or currentgroup != k:
unionset = set().union(*lists[1:])
for t in (set(lists[0]) - unionset):
yield t
lists = [[]] * len(streams)
lists[v[0]] = lists[v[0]] + [tuple(v[1:])]
currentgroup = k
unionset = set().union(*lists[1:])
for t in list(set(lists[0]) - unionset):
yield t
def Source():
return vtbase.VTGenerator(StreamExcept)
if not ('.' in __name__):
"""
This is needed to be able to test the function, put it at the end of every
new function you create
"""
import sys
from functions import *
testfunction()
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
import doctest
doctest.testmod()<|fim▁end|> | |
<|file_name|>lint_output_format.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// except according to those terms.
#![crate_id="lint_output_format#0.1"]
#![crate_type = "lib"]
#[deprecated]
pub fn foo() -> uint {
20
}
#[experimental]
pub fn bar() -> uint {
40
}
#[unstable]
pub fn baz() -> uint {
30
}<|fim▁end|> | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed |
<|file_name|>test_devstack.py<|end_file_name|><|fim▁begin|># Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import jsonschema
import mock
from rally.deploy.engines import devstack
from tests.unit import test
SAMPLE_CONFIG = {
"type": "DevstackEngine",
"provider": {
"name": "ExistingServers",
"credentials": [{"user": "root", "host": "example.com"}],
},
"localrc": {
"ADMIN_PASSWORD": "secret",
},
}
DEVSTACK_REPO = "https://git.openstack.org/cgit/openstack-dev/devstack.git"
class DevstackEngineTestCase(test.TestCase):
def setUp(self):
super(DevstackEngineTestCase, self).setUp()
self.deployment = {
"uuid": "de641026-dbe3-4abe-844a-ffef930a600a",
"config": SAMPLE_CONFIG,
}
self.engine = devstack.DevstackEngine(self.deployment)
def test_invalid_config(self):
self.deployment = SAMPLE_CONFIG.copy()
self.deployment["config"] = {"type": 42}
engine = devstack.DevstackEngine(self.deployment)
self.assertRaises(jsonschema.ValidationError,
engine.validate)
def test_construct(self):
self.assertEqual(self.engine.localrc["ADMIN_PASSWORD"], "secret")
@mock.patch("rally.deploy.engines.devstack.open", create=True)
def test_prepare_server(self, m_open):
m_open.return_value = "fake_file"
server = mock.Mock()
server.password = "secret"
self.engine.prepare_server(server)
calls = [
mock.call("/bin/sh -e", stdin="fake_file"),
mock.call("chpasswd", stdin="rally:secret"),
]
self.assertEqual(calls, server.ssh.run.mock_calls)
filename = m_open.mock_calls[0][1][0]
self.assertTrue(filename.endswith("rally/deploy/engines/"
"devstack/install.sh"))
self.assertEqual([mock.call(filename, "rb")], m_open.mock_calls)
@mock.patch("rally.deploy.engine.EngineFactory.get_provider")
@mock.patch("rally.deploy.engines.devstack.get_updated_server")
@mock.patch("rally.deploy.engines.devstack.get_script")
@mock.patch("rally.deploy.serverprovider.provider.Server")<|fim▁hole|> server = mock.Mock()
server.host = "host"
m_endpoint.return_value = "fake_endpoint"
m_gus.return_value = ds_server = mock.Mock()
m_gs.return_value = "fake_script"
server.get_credentials.return_value = "fake_credentials"
fake_provider.create_servers.return_value = [server]
with mock.patch.object(self.engine, "deployment") as m_d:
endpoints = self.engine.deploy()
self.assertEqual({"admin": "fake_endpoint"}, endpoints)
m_endpoint.assert_called_once_with("http://host:5000/v2.0/", "admin",
"secret", "admin", "admin")
m_d.add_resource.assert_called_once_with(
info="fake_credentials",
provider_name="DevstackEngine",
type="credentials")
repo = "https://git.openstack.org/cgit/openstack-dev/devstack.git"
cmd = "/bin/sh -e -s %s master" % repo
server.ssh.run.assert_called_once_with(cmd, stdin="fake_script")
ds_calls = [
mock.call.ssh.run("cat > ~/devstack/localrc", stdin=mock.ANY),
mock.call.ssh.run("~/devstack/stack.sh")
]
self.assertEqual(ds_calls, ds_server.mock_calls)
localrc = ds_server.mock_calls[0][2]["stdin"]
self.assertIn("ADMIN_PASSWORD=secret", localrc)<|fim▁end|> | @mock.patch("rally.deploy.engines.devstack.objects.Endpoint")
def test_deploy(self, m_endpoint, m_server, m_gs, m_gus, m_gp):
m_gp.return_value = fake_provider = mock.Mock() |
<|file_name|>envInformationModule.js<|end_file_name|><|fim▁begin|>var exec = require('child_process').exec;
exports.setHostname = function(envSettings){
var command = 'hostname';
exec(command,[], function (error, stdout, stderr) {
if(error){
console.log('error when executing: ' + command);
console.log('output : ' + stderr);
}
console.log('hostname : ' + stdout);
var result = stdout.replace(/(\r\n|\n|\r)/gm,"");<|fim▁hole|><|fim▁end|> | envSettings.serverPath = result;
});
}; |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Liquid Processing Errors.
#![warn(missing_docs)]
#![warn(missing_debug_implementations)]
#![warn(unused_extern_crates)]
mod clone;<|fim▁hole|>pub use clone::*;
pub use error::*;
pub use result_ext::*;
use trace::*;<|fim▁end|> | mod error;
mod result_ext;
mod trace;
|
<|file_name|>extern-stress.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This creates a bunch of yielding tasks that run concurrently
// while holding onto C stacks
mod rustrt {
pub extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
<|fim▁hole|> task::yield();
count(data - 1u) + count(data - 1u)
}
}
fn count(n: uint) -> uint {
unsafe {
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
for old_iter::repeat(100u) {
do task::spawn {
assert!(count(5u) == 16u);
};
}
}<|fim▁end|> | extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else { |
<|file_name|>migration.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
"""Migration of objects from SRC to DST clouds."""
from cloudferry.lib.utils import utils
CLOUD = 'cloud'
SRC, DST = 'src', 'dst'
class Migration(object):
""" Map SRC objects to corresponding DST objects they migrated to."""
def __init__(self, src_cloud, dst_cloud, resource):
self.cloud = {
SRC: src_cloud,
DST: dst_cloud,
}
self.obj_map = {}
if resource not in utils.RESOURCE_TYPES:
raise NotImplementedError('Unknown resource: %s', resource)
self.default_resource_type = utils.RESOURCE_TYPES[resource]
self.resource = {
SRC: self.cloud[SRC].resources.get(resource),
DST: self.cloud[DST].resources.get(resource),
}
def get_default(self, resource_type):
""" Get default ID by `resource_type` or None.
:return: str
"""
if resource_type in (utils.TENANTS_TYPE, utils.USERS_TYPE):
return self.resource[DST].get_default_id(resource_type)
def map_migrated_objects(self, resource_type=None):
"""Build map SRC -> DST object IDs.
:return: dict
"""
if not resource_type:
resource_type = self.default_resource_type
objs = {
pos: self.read_objects(pos, resource_type)
for pos in (SRC, DST)
}
# objects -> object
body = resource_type[:-1]
obj_map = dict(
[(src[body]['id'], dst[body]['id'])
for src in objs[SRC] for dst in objs[DST]
if self.obj_identical(src[body], dst[body])])
return obj_map
def migrated_id(self, src_object_id, resource_type=None):
""" Get migrated object ID by SRC object ID.
:return: DST object ID
"""
if not resource_type:
resource_type = self.default_resource_type
if resource_type not in self.obj_map:
self.obj_map[resource_type] = \
self.map_migrated_objects(resource_type)
return self.obj_map[resource_type].get(src_object_id,
self.get_default(resource_type))
def identical(self, src_id, dst_id, resource_type=None):<|fim▁hole|> """
if not resource_type:
resource_type = self.default_resource_type
return dst_id == self.migrated_id(src_id, resource_type=resource_type)
def obj_identical(self, src_obj, dst_obj):
"""Compare src and dst objects from resource info.
:return: boolean
"""
dst_res = self.resource[DST]
return dst_res.identical(src_obj, dst_obj)
def read_objects(self, position, resource_type):
"""Read objects info from `position` cloud.
:return: list
"""
res = self.resource[position]
objs = res.read_info()[resource_type]
return objs.values() if isinstance(objs, dict) else objs<|fim▁end|> | """ Check if SRC object with `src_id` === DST object with `dst_id`.
:return: boolean
|
<|file_name|>sticky_spec.js<|end_file_name|><|fim▁begin|>import { isSticky } from '~/lib/utils/sticky';
describe('sticky', () => {
const el = {
offsetTop: 0,
classList: {},
};
beforeEach(() => {
el.offsetTop = 0;
el.classList.add = jasmine.createSpy('spy');
el.classList.remove = jasmine.createSpy('spy');
});
describe('classList.remove', () => {
it('does not call classList.remove when stuck', () => {
isSticky(el, 0, 0);
expect(
el.classList.remove,
).not.toHaveBeenCalled();
});
it('calls classList.remove when not stuck', () => {
el.offsetTop = 10;
isSticky(el, 0, 0);
expect(
el.classList.remove,
).toHaveBeenCalledWith('is-stuck');
});
});
describe('classList.add', () => {
it('calls classList.add when stuck', () => {
isSticky(el, 0, 0);
<|fim▁hole|> expect(
el.classList.add,
).toHaveBeenCalledWith('is-stuck');
});
it('does not call classList.add when not stuck', () => {
el.offsetTop = 10;
isSticky(el, 0, 0);
expect(
el.classList.add,
).not.toHaveBeenCalled();
});
});
});<|fim▁end|> | |
<|file_name|>viewport.ts<|end_file_name|><|fim▁begin|>import type { GeneralWindow } from './shared/types';
import getCurrentDocument from './getCurrentDocument';
/**
* Get the current viewport element (scrolling element) of the current document, from a given element
*
* @param doc - Element to find the viewport element from
* @return The viewport element
*
* @example
*
* ```ts
* // Get the viewport of the current document
* viewport();
*
* // Get the viewport of the current window
* viewport(window);
*
* // Get the viewport of a given element
* viewport(someElementInSomeDocument);<|fim▁hole|>export default function viewport(elm?: Element | Document | GeneralWindow): Element | HTMLElement | null {
const doc = getCurrentDocument(elm || document);
return doc && (doc.scrollingElement || doc.documentElement);
}<|fim▁end|> | * ```
*/ |
<|file_name|>HW5_2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 25 21:46:31 2017
@author: sitibanc
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# =============================================================================
# Read CSV
# =============================================================================
df = pd.read_csv('TXF20112015.csv', sep=',', header = None) # dataframe (time, close, open, high, low, volume)
TAIEX = df.values # ndarray
tradeday = list(set(TAIEX[:, 0] // 10000)) # 交易日(YYYYMMDD)
tradeday.sort()
# =============================================================================
# Strategy 2.0: 開盤買進一口,30點停損,30點停利,收盤平倉
# =============================================================================
profit0 = np.zeros((len(tradeday),1))
for i in range(len(tradeday)):
date = tradeday[i]
idx = np.nonzero(TAIEX[:, 0] // 10000 == date)[0]
idx.sort()
p1 = TAIEX[idx[0], 2]
# 設定停損點<|fim▁hole|> p2 = TAIEX[idx[-1], 1] # 當日收盤價賣出
elif len(idx3) == 0: # 當日沒有停利但停損
p2 = TAIEX[idx[idx2[0]], 1] # 停損點收盤價賣出
elif len(idx2) == 0: # 當日沒有停損但停利
p2 = TAIEX[idx[idx3[0]], 1] # 停利點收盤價賣出
elif idx2[0] > idx3[0]: # 當日停利點先出現
p2 = TAIEX[idx[idx3[0]], 1] # 停利點收盤價賣出
else: # 當日停損點先出現
p2 = TAIEX[idx[idx2[0]], 1] # 停損點收盤價賣出
profit0[i] = p2 - p1
print('Strategy 2.0: 當日以開盤價買進一口,30點停損,30點停利,當日收盤價平倉\n逐日損益折線圖')
profit02 = np.cumsum(profit0) # 逐日損益獲利
plt.plot(profit02) # 逐日損益折線圖
plt.show()
print('每日損益分佈圖')
plt.hist(profit0, bins = 100) # 每日損益的分佈圖(直方圖)
plt.show()
# 計算數據
ans1 = len(profit0) # 進場次數
ans2 = profit02[-1] # 總損益點數
ans3 = np.sum(profit0 > 0) / len(profit0) * 100 # 勝率
ans4 = np.mean(profit0[profit0 > 0]) # 獲利時的平均獲利點數
ans5 = np.mean(profit0[profit0 <= 0]) # 虧損時的平均虧損點數
print('進場次數:', ans1, '\n總損益點數:', ans2, '\n勝率:', ans3, '%')
print('賺錢時平均每次獲利點數', ans4, '\n輸錢時平均每次損失點數:', ans5, '\n')
# =============================================================================
# Strategy 2.1: 開盤賣出一口,30點停損,30點停利,收盤平倉
# =============================================================================
profit1 = np.zeros((len(tradeday),1))
for i in range(len(tradeday)):
date = tradeday[i]
idx = np.nonzero(TAIEX[:, 0] // 10000 == date)[0]
idx.sort()
p1 = TAIEX[idx[0], 2]
# 設定停損點
idx2 = np.nonzero(TAIEX[idx, 3] >= p1 + 30)[0] # 最高價衝破停損點
# 設定停利點
idx3 = np.nonzero(TAIEX[idx, 4] <= p1 - 30)[0] # 最低價跌破停利點
if len(idx2) == 0 and len(idx3) == 0: # 當日沒有觸及平損停利點
p2 = TAIEX[idx[-1], 1] # 當日收盤價買回
elif len(idx3) == 0: # 當日沒有停利但停損
p2 = TAIEX[idx[idx2[0]], 1] # 停損點收盤價買回
elif len(idx2) == 0: # 當日沒有停損但停利
p2 = TAIEX[idx[idx3[0]], 1] # 停利點收盤價買回
elif idx2[0] > idx3[0]: # 當日停利點先出現
p2 = TAIEX[idx[idx3[0]], 1] # 停利點收盤價買回
else: # 當日停損點先出現
p2 = TAIEX[idx[idx2[0]], 1] # 停損點收盤價買回
profit1[i] = p1 - p2
print('Strategy 2.1: 當日以開盤價賣出一口,30點停損,30點停利,當日收盤價平倉\n每日獲利折線圖')
profit12 = np.cumsum(profit1) # 逐日累積損益
plt.plot(profit12) # 逐日損益折線圖
plt.show()
print('每日損益分佈圖')
plt.hist(profit1, bins = 100) # 每日損益的分佈圖
plt.show()
# 計算數據
ans1 = len(profit1) # 進場次數
ans2 = profit12[-1] # 總損益點數
ans3 = np.sum(profit1 > 0) / len(profit1) * 100 # 勝率
ans4 = np.mean(profit1[profit1 > 0]) # 獲利時的平均獲利點數
ans5 = np.mean(profit1[profit1 <= 0]) # 虧損時的平均虧損點數
print('進場次數:', ans1, '\n總損益點數:', ans2, '\n勝率:', ans3, '%')
print('賺錢時平均每次獲利點數', ans4, '\n輸錢時平均每次損失點數:', ans5)<|fim▁end|> | idx2 = np.nonzero(TAIEX[idx, 4] <= p1 - 30)[0] # 最低價跌破停損點
# 設定停利點
idx3 = np.nonzero(TAIEX[idx, 3] >= p1 + 30)[0] # 最高價衝破停利點
if len(idx2) == 0 and len(idx3) == 0: # 當日沒有觸及平損停利點 |
<|file_name|>Application.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 S2S Network Consultoria e Tecnologia da Informacao LTDA
#
# Author: Zhongjie Wang <[email protected]>
# Tianwei Liu <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""
Entrance of ICM Desktop Agent
"""
import os
import signal
import sys
import time
import socket
from twisted.internet import reactor
from twisted.internet import task
<|fim▁hole|>from umit.icm.agent.Global import *
from umit.icm.agent.Version import VERSION
from umit.icm.agent.rpc.message import *
from umit.icm.agent.rpc.MessageFactory import MessageFactory
from umit.icm.agent.I18N import _
# Script found at http://www.py2exe.org/index.cgi/HowToDetermineIfRunningFromExe
import imp
frozen = (hasattr(sys, "frozen") or # new py2exe
hasattr(sys, "importers") # old py2exe
or imp.is_frozen("__main__")) # tools/freeze
del(imp)
def main_is_frozen():
return frozen
class Application(object):
def __init__(self):
pass
def _init_components(self, aggregator):
from umit.icm.agent.core.PeerInfo import PeerInfo
self.peer_info = PeerInfo()
from umit.icm.agent.core.PeerManager import PeerManager
self.peer_manager = PeerManager()
from umit.icm.agent.core.EventManager import EventManager
self.event_manager = EventManager()
from umit.icm.agent.core.TaskManager import TaskManager
self.task_manager = TaskManager()
from umit.icm.agent.core.ReportManager import ReportManager
self.report_manager = ReportManager()
from umit.icm.agent.core.ReportUploader import ReportUploader
self.report_uploader = ReportUploader(self.report_manager)
from umit.icm.agent.core.TaskScheduler import TaskScheduler
self.task_scheduler = TaskScheduler(self.task_manager,
self.report_manager)
from umit.icm.agent.core.TaskAssignFetch import TaskAssignFetch
self.task_assign = TaskAssignFetch(self.task_manager)
from umit.icm.agent.core.TestSetsFetcher import TestSetsFetcher
self.test_sets = TestSetsFetcher(self.task_manager,
self.report_manager)
from umit.icm.agent.secure.KeyManager import KeyManager
self.key_manager = KeyManager()
from umit.icm.agent.core.Statistics import Statistics
self.statistics = Statistics()
from umit.icm.agent.rpc.aggregator import AggregatorAPI
self.aggregator = AggregatorAPI(aggregator)
from umit.icm.agent.super.SuperBehaviourByManual import SuperBehaviourByManual
self.speer_by_manual = SuperBehaviourByManual(self)
self.quitting = False
self.is_auto_login = False
self.is_successful_login = False #fix the login failure, save DB problem
def _load_from_db(self):
"""
"""
self.peer_manager.load_from_db()
# restore unsent reports
self.report_manager.load_unsent_reports()
# desktop agent stats saving
self.statistics.load_from_db()
def init_after_running(self, port=None, username=None, password=None,
server_enabled=True, skip_server_check=False):
"""
"""
#####################################################
# Create agent service(need to add the port confilct)
if server_enabled:
self.listen_port = port if port is not None else g_config.getint('network', 'listen_port')
try:
from umit.icm.agent.rpc.AgentService import AgentFactory
self.factory = AgentFactory()
g_logger.info("Listening on port %d.", self.listen_port)
reactor.listenTCP(self.listen_port, self.factory)
except Exception,info:
#There can add more information
self.quit_window_in_wrong(primary_text = _("The Listen Port has been used by other applications"), \
secondary_text = _("Please check the Port") )
#############################
# Create mobile agent service
from umit.icm.agent.rpc.mobile import MobileAgentService
self.ma_service = MobileAgentService()
if self.use_gui:
import gtk
# Init GUI
from umit.icm.agent.gui.GtkMain import GtkMain
self.gtk_main = GtkMain()
self.is_auto_login = g_config.getboolean('application', 'auto_login_swittch')
###################################################################
#debug switch: It can show the gtkWindow without any authentication
if g_config.getboolean('debug','debug_switch') and self.use_gui:
self.login_simulate()
######################################
#check aggregator can be reached first
if not skip_server_check:
defer_ = self.aggregator.check_aggregator_website()
defer_.addCallback(self.check_aggregator_success)
defer_.addErrback(self.check_aggregator_failed)
def check_aggregator_success(self,response):
"""
"""
if response == True:
self.login_window_show()
else:
self.speer_by_manual.peer_communication()
def login_window_show(self):
"""
"""
if self.is_auto_login and self.use_gui :
#######################################################
#login with saved username or password, not credentials
self.peer_info.load_from_db()
########################################
#Add more condition to check login legal
self.login(self.peer_info.Username,self.peer_info.Password, True)
else:
if self.use_gui:
self.gtk_main.show_login()
else:
self.login_without_gui()
g_logger.info("Auto-login is disabled. You need to manually login.")
def check_aggregator_failed(self,message):
"""
"""
self.aggregator.available = False
self.speer_by_manual.peer_communication()
def login_without_gui(self):
"""
Users login without username or password
"""
username = False
password = False
if g_config.has_section("credentials"):
username = g_config.get("credentials", "user")
password = g_config.get("credentials", "password")
if not username:
username = raw_input("User Name:")
if not password:
password = raw_input("Password:")
self.login(username, password, save_login=True)
def check_software_auto(self):
"""
check software: according the time and other configurations
"""
from umit.icm.agent.core.Updater import auto_check_update
##############################
#Software update automatically
if g_config.getboolean('application','auto_update'):
defer_ = auto_check_update(auto_upgrade=True)
defer_.addErrback(self._handle_errback)
else:
############################
#Detect update automatically
if g_config.getboolean('update', 'update_detect'):
#Here can set some update attributes
defer_ = auto_check_update(auto_upgrade=False)
defer_.addErrback(self._handle_errback)
def register_agent(self, username, password):
"""
"""
defer_ = self.aggregator.register(username, password)
defer_.addCallback(self._handle_register)
defer_.addErrback(self._handle_errback)
return defer_
def _handle_register(self, result):
if result:
self.peer_info.ID = result['id']
self.peer_info.CipheredPublicKeyHash = result['hash']
self.peer_info.is_registered = True
g_logger.debug("Register to Aggregator: %s" % result['id'])
return result
def _handle_errback(self, failure):
"""
"""
failure.printTraceback()
g_logger.error(">>> Failure from Application: %s" % failure)
def login(self, username, password, save_login=False, login_only=False):
"""
"""
if self.use_gui:
self.gtk_main.set_to_logging_in()
if self.is_auto_login and self.use_gui and self.check_username(username,password):
#auto-login, select the credentials username and password from DB
return self._login_after_register_callback(None, username,
password, save_login,
login_only)
else:
#manually login, we should check whether the username and password exists in database
#If *NOT*, we should register the username and password to aggregator
#IF *YES*, we will use credentials in DB
g_config.set('application', 'auto_login_swittch', False)
if self.check_username(username,password):
return self._login_after_register_callback(None, username,
password, save_login,
login_only)
else:
self.peer_info.clear_db()
deferred = self.register_agent(username, password)
deferred.addCallback(self._login_after_register_callback,
username, password, save_login, login_only)
deferred.addErrback(self._handle_errback)
return deferred
def check_username(self,username="",password=""):
"""
check username and password in DB, the information is got from Login-Window
"""
rs = g_db_helper.select("select * from peer_info where username='%s' and \
password='%s'"%(username,password))
if not rs:
g_logger.info("No matching peer info in db.\
icm-agent will register the username or password")
return False
else:
g_logger.info("Match the username and password, \
we will change the default credentials")
g_logger.debug(rs[0])
self.peer_info.ID = rs[0][0]
self.peer_info.Username = rs[0][1]
self.peer_info.Password = rs[0][2]
self.peer_info.Email = rs[0][3]
self.peer_info.CipheredPublicKeyHash = rs[0][4]
self.peer_info.Type = rs[0][5]
self.peer_info.is_registered = True
return True
def _login_after_register_callback(self, message, username,
password, save_login, login_only):
"""
"""
defer_ = self.aggregator.login(username, password)
defer_.addCallback(self._handle_login, username, password,
save_login, login_only)
defer_.addErrback(self._handle_login_errback)
return defer_
def _handle_login_errback(self,failure):
"""
"""
print "------------------login failed!-------------------"
failure.printTraceback()
g_logger.error(">>> Failure from Application: %s" % failure)
def _handle_login(self, result, username, password, save_login,login_only=False):
"""
"""
#login successfully
if result:
self.peer_info.Username = username if username !="" and username != None else self.peer_info.Username
self.peer_info.Password = password if password !="" and password != None else self.peer_info.Password
#print self.peer_info.Username, self.peer_info.Password
self.peer_info.is_logged_in = True
#self.peer_info.clear_db()
self.peer_info.save_to_db()
g_logger.debug("Login Successfully :%s@%s" % (username,password))
if save_login:
g_config.set('application', 'auto_login_swittch', True)
else:
g_config.set('application', 'auto_login_swittch', False)
if self.use_gui:
self.gtk_main.set_login_status(True)
if login_only:
return result
#Load peers and reports from DB
self._load_from_db()
#check the new software(should appear after login successfully)
self.check_software_auto()
#mark login-successful
self.is_successful_login = True
#Task Looping manager
self.task_loop_manager()
return result
def login_simulate(self):
"""
Only test GTK features
"""
#GTK show
if self.use_gui == True:
self.gtk_main.set_login_status(True)
#Basic Information
self.peer_info.load_from_db()
self._load_from_db()
#mark login-successful
self.is_successful_login = True
#TASK LOOP
self.task_loop_manager()
def task_loop_manager(self):
""""""
# Add looping calls
if not hasattr(self, 'peer_maintain_lc'):
self.peer_maintain_lc = task.LoopingCall(self.peer_manager.maintain)
self.peer_maintain_lc.start(7200)
if not hasattr(self, 'task_run_lc'):
g_logger.info("Starting task scheduler looping ")
self.task_run_lc = task.LoopingCall(self.task_scheduler.schedule)
task_scheduler_text = g_config.get("Timer","task_scheduler_timer")
if task_scheduler_text != "":
indival = float(task_scheduler_text)
else:
indival = 30
self.task_run_lc.start(indival)
if not hasattr(self, 'report_proc_lc'):
g_logger.info("Starting report upload looping ")
self.report_proc_lc = task.LoopingCall(self.report_uploader.process)
report_uploade_text = g_config.get("Timer","send_report_timer")
if report_uploade_text != "":
indival = float(report_uploade_text)
else:
indival = 30
self.report_proc_lc.start(indival)
if not hasattr(self,'task_assign_lc'):
g_logger.info("Starting get assigned task from Aggregator")
self.task_assgin_lc = task.LoopingCall(self.task_assign.fetch_task)
task_assign_text = g_config.get("Timer","task_assign_timer")
if task_assign_text != "":
indival = float(task_assign_text)
else:
indival = 30
self.task_assgin_lc.start(indival)
if not hasattr(self,'test_sets_fetch_lc'):
g_logger.info("Starting get test sets from Aggregator")
self.test_sets_fetch_lc = task.LoopingCall(self.test_sets.fetch_tests)
test_fetch_text = g_config.get("Timer","test_fetch_timer")
if test_fetch_text != "":
indival = float(test_fetch_text)
else:
indival = 30
self.test_sets_fetch_lc.start(indival)
def logout(self):
defer_ = self.aggregator.logout()
defer_.addCallback(self._handle_logout)
return defer_
def _handle_logout(self, result):
if self.use_gui:
self.gtk_main.set_login_status(False)
g_config.set('application', 'auto_login_swittch', False)
return result
def start(self, run_reactor=True, managed_mode=False, aggregator=None):
"""
The Main function
"""
g_logger.info("Starting ICM agent. Version: %s", VERSION)
self._init_components(aggregator)
reactor.addSystemEventTrigger('before', 'shutdown', self.on_quit)
if not managed_mode:
# This is necessary so the bot can take over and control the agent
reactor.callWhenRunning(self.init_after_running)
if run_reactor:
# This is necessary so the bot can take over and control the agent
reactor.run()
def quit_window_in_wrong(self,primary_text = "",secondary_text = ""):
"""
"""
#There can add more information
from higwidgets.higwindows import HIGAlertDialog
#print 'The exception is %s'%(info)
alter = HIGAlertDialog(primary_text = primary_text,\
secondary_text = secondary_text)
alter.show_all()
result = alter.run()
#cannot write return, if so the program cannot quit, and run in background
self.terminate()
def terminate(self):
#print 'quit'
reactor.callWhenRunning(reactor.stop)
def on_quit(self):
if hasattr(self, 'peer_info') and self.is_successful_login:
g_logger.info("[quit]:save peer_info into DB")
self.peer_info.save_to_db()
if hasattr(self, 'peer_manager') and self.is_successful_login:
g_logger.info("[quit]:save peer_manager into DB")
self.peer_manager.save_to_db()
if hasattr(self, 'statistics') and self.is_successful_login:
g_logger.info("[quit]:save statistics into DB")
self.statistics.save_to_db()
if hasattr(self,'test_sets') and self.is_successful_login \
and os.path.exists(CONFIG_PATH):
#store test_version id
self.test_sets.set_test_version(self.test_sets.current_test_version)
m = os.path.join(ROOT_DIR, 'umit', 'icm', 'agent', 'agent_restart_mark')
if os.path.exists(m):
os.remove(m)
self.quitting = True
g_logger.info("ICM Agent quit.")
theApp = Application()
if __name__ == "__main__":
#theApp.start()
pass<|fim▁end|> | from umit.icm.agent.logger import g_logger
from umit.icm.agent.BasePaths import *
|
<|file_name|>connectivity_source_py3.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ConnectivitySource(Model):
"""Parameters that define the source of the connection.
All required parameters must be populated in order to send to Azure.
:param resource_id: Required. The ID of the resource from which a
connectivity check will be initiated.
:type resource_id: str
:param port: The source port from which a connectivity check will be
performed.
:type port: int
"""
_validation = {
'resource_id': {'required': True},
}<|fim▁hole|> _attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(self, *, resource_id: str, port: int=None, **kwargs) -> None:
super(ConnectivitySource, self).__init__(**kwargs)
self.resource_id = resource_id
self.port = port<|fim▁end|> | |
<|file_name|>dump_source.py<|end_file_name|><|fim▁begin|># Import the JModelica.org Python packages
import pymodelica<|fim▁hole|># Create a compiler and compiler target object
mc = ModelicaCompiler()
# Build trees as if for an FMU or Model Exchange v 1.0
#target = mc.create_target_object("me", "1.0")
source = mc.parse_model("CauerLowPassAnalog.mo")
indent_amount = 2
def dump(src, fid, indent=0):
ind = " " * (indent_amount * indent)
try:
fid.write(ind + src.getNodeName() + "\n")
except:
fid.write(ind + "exception: " + str(src) + "\n")
try:
for idx in range(src.numChild):
dump(src.children[idx], fid, indent+1)
except:
fid.write(ind + "(exception)\n")
# dump the filter instance
with open('out.txt', 'w') as fid:
dump(source, fid, 0)
print "DONE!"<|fim▁end|> | from pymodelica.compiler_wrappers import ModelicaCompiler
|
<|file_name|>HomeDrawerRouter.js<|end_file_name|><|fim▁begin|>import React, { Component } from "react";
import Home from "../components/home/";
import BlankPage2 from "../components/blankPage2";
import { DrawerNavigator } from "react-navigation";
import DrawBar from "../components/DrawBar";
export default (DrawNav = DrawerNavigator(
{
Home: { screen: Home },<|fim▁hole|> {
contentComponent: props => <DrawBar {...props} />
}
));<|fim▁end|> | BlankPage2: { screen: BlankPage2 }
}, |
<|file_name|>aStarNode.hpp<|end_file_name|><|fim▁begin|>/// @file aStarNode.hpp
/// @brief Contains the class of nodes use by the astar pathfinder.
/// @author Enrico Fraccaroli
/// @date Nov 11 2016
/// @copyright
/// Copyright (c) 2016 Enrico Fraccaroli <[email protected]>
/// Permission is hereby granted, free of charge, to any person obtaining a
/// copy of this software and associated documentation files (the "Software"),
/// to deal in the Software without restriction, including without limitation
/// the rights to use, copy, modify, merge, publish, distribute, sublicense,
/// and/or sell copies of the Software, and to permit persons to whom the
/// Software is furnished to do so, subject to the following conditions:
/// The above copyright notice and this permission notice shall be included
/// in all copies or substantial portions of the Software.
/// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
/// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
/// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
/// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
/// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
/// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
/// DEALINGS IN THE SOFTWARE.
#pragma once
#include "pathFinderNode.hpp"
#include <memory>
#include <vector>
/// @brief The states of an AStar node.
using AStarNodeState = enum class AStarNodeState_t
{
Untested, /// <! The node has not been tested yet.
Open, /// <! The node is on the 'open' state.
Closed /// <! The node is on the 'closed' state.
};
/// @brief A supporting node for the AStart algorithm.
template<typename T>
class AStarNode :
public PathFinderNode<T>
{
private:
/// Node state.
AStarNodeState nodeState;
/// The length of the path from the start node to this node.
int g;
/// The straight-line distance from this node to the end node.
int h;
/// The previous node in path. It is used when recontructing the path
/// from the end node to the beginning.
std::shared_ptr<AStarNode<T>> parentNode;
/// Identifies the end node.
bool endNodeFlag;
public:
/// @brief Constructor.<|fim▁hole|> AStarNode(T _element) :
PathFinderNode<T>(_element),
nodeState(),
g(),
h(),
parentNode(),
endNodeFlag()
{
// Nothing to do.
}
/// @brief Allows to set the state of the node.
void setNodeState(const AStarNodeState & _nodeState)
{
nodeState = _nodeState;
}
/// @brief Allows to set the 'g' value.
void setG(const int & _g)
{
g = _g;
}
/// @brief Allows to set the 'h' value.
void setH(const int & _h)
{
h = _h;
}
/// @brief Allows to set the parent node.
void setParentNode(std::shared_ptr<AStarNode<T>> _parentNode)
{
parentNode = _parentNode;
}
/// @brief Allows to set if this node is the end node.
void setIsEndNode()
{
endNodeFlag = true;
}
/// @brief Provides the state of the node.
AStarNodeState getNodeState() const
{
return nodeState;
}
/// @brief Provides the 'g' value.
int getG() const
{
return g;
}
/// @brief Provides the 'f' value.
int getF() const
{
return g + h;
}
/// @brief Privdes the parent node.
std::shared_ptr<AStarNode<T>> getParentNode()
{
return parentNode;
}
/// @brief Gives information if this node is the end node.
bool isEndNode() const
{
return endNodeFlag;
}
};<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns # noqa
from django.conf.urls import url # noqa
from openstack_dashboard.dashboards.admin.images import views
urlpatterns = patterns('openstack_dashboard.dashboards.admin.images.views',
url(r'^images/$', views.IndexView.as_view(), name='index'),
url(r'^create/$', views.CreateView.as_view(), name='create'),
url(r'^(?P<image_id>[^/]+)/update/$',
views.UpdateView.as_view(), name='update'),
url(r'^(?P<image_id>[^/]+)/detail/$',
views.DetailView.as_view(), name='detail')
)<|fim▁end|> | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved. |
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// Enable this lint to catch violations in the generated code.
#![warn(elided_lifetimes_in_paths)]
extern crate core;
extern crate capnp;
pub mod test_capnp {
include!(concat!(env!("OUT_DIR"), "/test_capnp.rs"));
}
pub mod foo {
pub mod bar {
pub mod in_submodule_capnp {
include!(concat!(env!("OUT_DIR"), "/in_submodule_capnp.rs"));
}
}
}
pub mod baz {
pub mod in_other_submodule_capnp {
include!(concat!(env!("OUT_DIR"), "/in_other_submodule_capnp.rs"));
}
}
pub mod test_default_parent_module {
pub mod test_default_parent_module_inner {
// In build.rs we specify this is the default parent module.
pub mod test_default_parent_module_capnp {
include!(concat!(env!("OUT_DIR"), "/test_default_parent_module_capnp.rs"));
}
}
// Put this in somewhere other than the default parent module, to test whether the `parentModule`
// annotation successfully overrides the default.
pub mod test_default_parent_module_override_capnp {
include!(concat!(env!("OUT_DIR"), "/test_default_parent_module_override_capnp.rs"));
}
}
pub mod test_in_dir_capnp {
include!(concat!(env!("OUT_DIR"), "/schema/test_in_dir_capnp.rs"));
}
pub mod test_in_src_prefix_dir_capnp {
// The src_prefix gets stripped away, so the generated code ends up directly in OUT_DIR.
include!(concat!(env!("OUT_DIR"), "/test_in_src_prefix_dir_capnp.rs"));
}
#[cfg(test)]
mod test_util;
#[cfg(test)]
mod tests {
use capnp::message;
use capnp::message::{ReaderOptions};
#[test]
fn test_prim_list () {
use test_capnp::test_prim_list;
// Make the first segment small to force allocation of a second segment.
let mut message = message::Builder::new(message::HeapAllocator::new().first_segment_words(50));
let mut test_prim_list = message.init_root::<test_prim_list::Builder<'_>>();
assert_eq!(test_prim_list.has_bool_list(), false);
assert_eq!(test_prim_list.has_void_list(), false);
{
{
let mut uint8_list = test_prim_list.reborrow().init_uint8_list(100);
for i in 0..uint8_list.len() {
uint8_list.set(i, i as u8);
}
}
{
let mut uint64_list = test_prim_list.reborrow().init_uint64_list(20);
for i in 0..uint64_list.len() {
uint64_list.set(i, i as u64);
}
}
{
let mut bool_list = test_prim_list.reborrow().init_bool_list(65);
bool_list.set(0, true);
bool_list.set(1, true);
bool_list.set(2, true);
bool_list.set(3, true);
bool_list.set(5, true);
bool_list.set(8, true);
bool_list.set(13, true);
bool_list.set(64, true);
assert!(bool_list.get(0));
assert!(!bool_list.get(4));
assert!(!bool_list.get(63));
assert!(bool_list.get(64));
}
let mut void_list = test_prim_list.reborrow().init_void_list(1025);
void_list.set(257, ());
}
assert_eq!(test_prim_list.has_bool_list(), true);
assert_eq!(test_prim_list.has_void_list(), true);
let test_prim_list_reader = test_prim_list.into_reader();
let uint8_list = test_prim_list_reader.get_uint8_list().unwrap();
for i in 0..uint8_list.len() {
assert_eq!(uint8_list.get(i), i as u8);
}
let uint64_list = test_prim_list_reader.get_uint64_list().unwrap();
for i in 0..uint64_list.len() {
assert_eq!(uint64_list.get(i), i as u64);
}
assert_eq!(test_prim_list_reader.has_bool_list(), true);
let bool_list = test_prim_list_reader.get_bool_list().unwrap();
assert!(bool_list.get(0));<|fim▁hole|> assert!(bool_list.get(2));
assert!(bool_list.get(3));
assert!(!bool_list.get(4));
assert!(bool_list.get(5));
assert!(!bool_list.get(6));
assert!(!bool_list.get(7));
assert!(bool_list.get(8));
assert!(!bool_list.get(9));
assert!(!bool_list.get(10));
assert!(!bool_list.get(11));
assert!(!bool_list.get(12));
assert!(bool_list.get(13));
assert!(!bool_list.get(63));
assert!(bool_list.get(64));
assert_eq!(test_prim_list_reader.get_void_list().unwrap().len(), 1025);
}
#[test]
fn test_struct_list () {
use test_capnp::test_struct_list;
let mut message = message::Builder::new(message::HeapAllocator::new());
let mut test_struct_list = message.init_root::<test_struct_list::Builder<'_>>();
test_struct_list.reborrow().init_struct_list(4);
{
let struct_list = test_struct_list.reborrow().get_struct_list().unwrap();
struct_list.get(0).init_uint8_list(1).set(0, 5u8);
}
{
let reader = test_struct_list.into_reader();
assert_eq!(reader.get_struct_list().unwrap().get(0).get_uint8_list().unwrap().get(0), 5u8);
}
}
#[test]
fn test_blob () {
use test_capnp::test_blob;
let mut message = message::Builder::new(message::HeapAllocator::new());
let mut test_blob = message.init_root::<test_blob::Builder<'_>>();
assert_eq!(test_blob.has_text_field(), false);
test_blob.set_text_field("abcdefghi");
assert_eq!(test_blob.has_text_field(), true);
assert_eq!(test_blob.has_data_field(), false);
test_blob.set_data_field(&[0u8, 1u8, 2u8, 3u8, 4u8]);
assert_eq!(test_blob.has_data_field(), true);
{
let test_blob_reader = test_blob.reborrow_as_reader();
assert_eq!(test_blob_reader.has_text_field(), true);
assert_eq!(test_blob_reader.has_data_field(), true);
assert_eq!(test_blob_reader.get_text_field().unwrap(), "abcdefghi");
assert!(test_blob_reader.get_data_field().unwrap() == [0u8, 1u8, 2u8, 3u8, 4u8]);
}
{
let mut text = test_blob.reborrow().init_text_field(10);
assert_eq!(&*text,"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00");
text.push_str("aabbccddee");
}
test_blob.reborrow().init_data_field(7);
assert!(test_blob.reborrow().into_reader().get_data_field().unwrap() ==
[0u8,0u8,0u8,0u8,0u8,0u8,0u8]);
{
let data_builder = test_blob.reborrow().get_data_field().unwrap();
for c in data_builder.iter_mut() {
*c = 5;
}
data_builder[0] = 4u8;
}
assert_eq!(test_blob.reborrow().into_reader().get_text_field().unwrap(), "aabbccddee");
assert!(test_blob.reborrow().into_reader().get_data_field().unwrap() == [4u8,5u8,5u8,5u8,5u8,5u8,5u8]);
{
test_blob.reborrow().get_data_field().unwrap()[2] = 10;
}
assert!(test_blob.into_reader().get_data_field().unwrap() == [4u8,5u8,10u8,5u8,5u8,5u8,5u8]);
}
#[test]
fn test_big_struct() {
use test_capnp::test_big_struct;
// Make the first segment small to force allocation of a second segment.
let mut message = message::Builder::new(message::HeapAllocator::new().first_segment_words(5));
let mut big_struct = message.init_root::<test_big_struct::Builder<'_>>();
big_struct.set_bool_field(false);
big_struct.set_int8_field(-128);
big_struct.set_int16_field(0);
big_struct.set_int32_field(1009);
assert_eq!(big_struct.has_struct_field(), false);
big_struct.reborrow().init_struct_field();
assert_eq!(big_struct.has_struct_field(), true);
{
let mut inner = big_struct.reborrow().get_struct_field().unwrap();
inner.set_float64_field(0.1234567);
inner.set_bool_field_b(true);
}
big_struct.set_bool_field(true);
let big_struct_reader = big_struct.into_reader();
assert_eq!(big_struct_reader.has_struct_field(), true);
assert_eq!(big_struct_reader.get_int8_field(), -128);
assert_eq!(big_struct_reader.get_int32_field(), 1009);
let inner_reader = big_struct_reader.get_struct_field().unwrap();
assert!(!inner_reader.get_bool_field_a());
assert!(inner_reader.get_bool_field_b());
assert_eq!(inner_reader.get_float64_field(), 0.1234567);
}
#[test]
fn test_complex_list () {
use test_capnp::{test_complex_list, AnEnum};
let mut message = message::Builder::new_default();
let mut test_complex_list = message.init_root::<test_complex_list::Builder<'_>>();
{
{
let mut enum_list = test_complex_list.reborrow().init_enum_list(100);
for i in 0..10 {
enum_list.set(i, AnEnum::Qux);
}
for i in 10..20 {
enum_list.set(i, AnEnum::Bar);
}
}
{
let mut text_list = test_complex_list.reborrow().init_text_list(2);
text_list.set(0, "garply");
text_list.set(1, "foo");
}
{
let mut data_list = test_complex_list.reborrow().init_data_list(2);
data_list.set(0, &[0u8, 1u8, 2u8]);
data_list.set(1, &[255u8, 254u8, 253u8]);
}
{
let mut prim_list_list = test_complex_list.reborrow().init_prim_list_list(2);
{
let mut prim_list = prim_list_list.reborrow().init(0, 3);
prim_list.set(0, 5);
prim_list.set(1, 6);
prim_list.set(2, 7);
assert_eq!(prim_list.len(), 3);
}
let mut prim_list = prim_list_list.init(1, 1);
prim_list.set(0,-1);
}
{
let mut prim_list_list_list = test_complex_list.reborrow().init_prim_list_list_list(2);
{
let mut prim_list_list = prim_list_list_list.reborrow().init(0, 2);
{
let mut prim_list = prim_list_list.reborrow().init(0, 2);
prim_list.set(0, 0);
prim_list.set(1, 1);
}
let mut prim_list = prim_list_list.init(1, 1);
prim_list.set(0, 255);
}
let prim_list_list = prim_list_list_list.init(1, 1);
let mut prim_list = prim_list_list.init(0, 3);
prim_list.set(0, 10);
prim_list.set(1, 9);
prim_list.set(2, 8);
}
{
let mut enum_list_list = test_complex_list.reborrow().init_enum_list_list(2);
{
let mut enum_list = enum_list_list.reborrow().init(0, 1);
enum_list.set(0, AnEnum::Bar);
}
let mut enum_list = enum_list_list.init(1, 2);
enum_list.set(0, AnEnum::Foo);
enum_list.set(1, AnEnum::Qux);
}
{
let text_list_list = test_complex_list.reborrow().init_text_list_list(1);
text_list_list.init(0,1).set(0, "abc");
}
{
let data_list_list = test_complex_list.reborrow().init_data_list_list(1);
data_list_list.init(0,1).set(0, &[255, 254, 253]);
}
{
let struct_list_list = test_complex_list.reborrow().init_struct_list_list(1);
struct_list_list.init(0,1).get(0).set_int8_field(-1);
}
}
let complex_list_reader = test_complex_list.into_reader();
let enum_list_reader = complex_list_reader.get_enum_list().unwrap();
for i in 0..10 {
assert!(enum_list_reader.get(i) == Ok(AnEnum::Qux));
}
for i in 10..20 {
assert!(enum_list_reader.get(i) == Ok(AnEnum::Bar));
}
let text_list = complex_list_reader.get_text_list().unwrap();
assert_eq!(text_list.len(), 2);
assert_eq!(text_list.get(0).unwrap(), "garply");
assert_eq!(text_list.get(1).unwrap(), "foo");
let data_list = complex_list_reader.get_data_list().unwrap();
assert_eq!(data_list.len(), 2);
assert!(data_list.get(0).unwrap() == [0u8, 1u8, 2u8]);
assert!(data_list.get(1).unwrap() == [255u8, 254u8, 253u8]);
let prim_list_list = complex_list_reader.get_prim_list_list().unwrap();
assert_eq!(prim_list_list.len(), 2);
assert_eq!(prim_list_list.get(0).unwrap().len(), 3);
assert!(prim_list_list.get(0).unwrap().get(0) == 5);
assert!(prim_list_list.get(0).unwrap().get(1) == 6);
assert!(prim_list_list.get(0).unwrap().get(2) == 7);
assert!(prim_list_list.get(1).unwrap().get(0) == -1);
let prim_list_list_list = complex_list_reader.get_prim_list_list_list().unwrap();
assert!(prim_list_list_list.get(0).unwrap().get(0).unwrap().get(0) == 0);
assert!(prim_list_list_list.get(0).unwrap().get(0).unwrap().get(1) == 1);
assert!(prim_list_list_list.get(0).unwrap().get(1).unwrap().get(0) == 255);
assert!(prim_list_list_list.get(1).unwrap().get(0).unwrap().get(0) == 10);
assert!(prim_list_list_list.get(1).unwrap().get(0).unwrap().get(1) == 9);
assert!(prim_list_list_list.get(1).unwrap().get(0).unwrap().get(2) == 8);
let enum_list_list = complex_list_reader.get_enum_list_list().unwrap();
assert!(enum_list_list.get(0).unwrap().get(0) == Ok(AnEnum::Bar));
assert!(enum_list_list.get(1).unwrap().get(0) == Ok(AnEnum::Foo));
assert!(enum_list_list.get(1).unwrap().get(1) == Ok(AnEnum::Qux));
assert!(complex_list_reader.get_text_list_list().unwrap().get(0).unwrap().get(0).unwrap() == "abc");
assert!(complex_list_reader.get_data_list_list().unwrap().get(0).unwrap().get(0).unwrap() == [255, 254, 253]);
assert!(complex_list_reader.get_struct_list_list().unwrap().get(0).unwrap().get(0).get_int8_field() == -1);
}
#[test]
fn test_list_list_set_elem () {
use test_capnp::{test_complex_list};
let mut message1 = message::Builder::new_default();
let mut message2 = message::Builder::new_default();
let mut test_complex_list1 = message1.init_root::<test_complex_list::Builder<'_>>();
let mut test_complex_list2 = message2.init_root::<test_complex_list::Builder<'_>>();
{
let mut prim_list_list1 = test_complex_list1.reborrow().init_prim_list_list(1);
let prim_list_list2 = test_complex_list2.reborrow().init_prim_list_list(1);
{
let mut prim_list1 = prim_list_list1.reborrow().init(0, 3);
prim_list1.set(0, 7);
prim_list1.set(1, 8);
prim_list1.set(2, 9);
assert_eq!(prim_list1.len(), 3);
prim_list_list2.set(0, prim_list1.reborrow().into_reader()).unwrap();
let prim_list2 = prim_list_list2.get(0).unwrap();
assert_eq!(prim_list2.len(), 3);
assert_eq!(prim_list2.get(0), 7);
assert_eq!(prim_list2.get(1), 8);
assert_eq!(prim_list2.get(2), 9);
}
}
}
#[test]
fn test_defaults() {
use test_capnp::test_defaults;
{
let message = message::Builder::new_default();
let test_defaults = message.get_root_as_reader::<test_defaults::Reader<'_>>()
.expect("get_root_as_reader()");
::test_util::CheckTestMessage::check_test_message(test_defaults);
}
{
let mut message = message::Builder::new_default();
let test_defaults = message.init_root::<test_defaults::Builder<'_>>();
::test_util::CheckTestMessage::check_test_message(test_defaults);
}
{
let mut message = message::Builder::new_default();
let mut test_defaults = message.get_root::<test_defaults::Builder<'_>>()
.expect("get_root()");
test_defaults.set_bool_field(false);
test_defaults.set_int8_field(63);
test_defaults.set_int16_field(-1123);
test_defaults.set_int32_field(445678);
test_defaults.set_int64_field(-990123456789);
test_defaults.set_u_int8_field(234);
test_defaults.set_u_int16_field(56789);
test_defaults.set_u_int32_field(123456789);
test_defaults.set_u_int64_field(123456789012345);
test_defaults.set_float32_field(7890.123);
test_defaults.set_float64_field(5e55);
{
let mut sub_builder = test_defaults.reborrow().get_struct_field().unwrap();
sub_builder.set_text_field("garply");
}
assert_eq!(test_defaults.reborrow().get_bool_field(), false);
assert_eq!(test_defaults.reborrow().get_int8_field(), 63);
assert_eq!(test_defaults.reborrow().get_int16_field(), -1123);
assert_eq!(test_defaults.reborrow().get_int32_field(), 445678);
assert_eq!(test_defaults.reborrow().get_int64_field(), -990123456789);
assert_eq!(test_defaults.reborrow().get_u_int8_field(), 234);
assert_eq!(test_defaults.reborrow().get_u_int16_field(), 56789);
assert_eq!(test_defaults.reborrow().get_u_int32_field(), 123456789);
assert_eq!(test_defaults.reborrow().get_u_int64_field(), 123456789012345);
assert_eq!(test_defaults.reborrow().get_float32_field(), 7890.123);
assert_eq!(test_defaults.reborrow().get_float64_field(), 5e55);
{
let sub_builder = test_defaults.reborrow().get_struct_field().unwrap();
assert_eq!("garply", &*sub_builder.get_text_field().unwrap());
}
}
}
#[test]
fn test_default_initialization_multi_segment() {
use test_capnp::test_defaults;
let builder_options = message::HeapAllocator::new()
.first_segment_words(1).allocation_strategy(::capnp::message::AllocationStrategy::FixedSize);
let mut message = message::Builder::new(builder_options);
let test_defaults = message.init_root::<test_defaults::Builder<'_>>();
::test_util::CheckTestMessage::check_test_message(test_defaults);
}
#[test]
fn test_any_pointer() {
use test_capnp::{test_any_pointer, test_empty_struct, test_big_struct};
let mut message = message::Builder::new_default();
let mut test_any_pointer = message.init_root::<test_any_pointer::Builder<'_>>();
test_any_pointer.reborrow().init_any_pointer_field().set_as("xyzzy").unwrap();
{
let reader = test_any_pointer.reborrow().into_reader();
assert_eq!(reader.get_any_pointer_field().get_as::<::capnp::text::Reader<'_>>().unwrap(), "xyzzy");
}
test_any_pointer.reborrow().get_any_pointer_field().init_as::<test_empty_struct::Builder<'_>>();
test_any_pointer.reborrow().get_any_pointer_field().get_as::<test_empty_struct::Builder<'_>>().unwrap();
{
let reader = test_any_pointer.reborrow().into_reader();
reader.get_any_pointer_field().get_as::<test_empty_struct::Reader<'_>>().unwrap();
}
{
let mut message = message::Builder::new_default();
let mut test_big_struct = message.init_root::<test_big_struct::Builder<'_>>();
test_big_struct.set_int32_field(-12345);
test_any_pointer.get_any_pointer_field().set_as(test_big_struct.reborrow().into_reader()).unwrap();
}
fn _test_lifetimes(body: test_big_struct::Reader<'_>) {
let mut message = message::Builder::new_default();
message.set_root(body).unwrap();
}
}
#[test]
fn test_writable_struct_pointer() {
use test_capnp::test_big_struct;
let mut message = message::Builder::new_default();
let mut big_struct = message.init_root::<test_big_struct::Builder<'_>>();
let neg_seven : u64 = (-7i64) as u64;
{
let mut struct_field = big_struct.reborrow().init_struct_field();
assert_eq!(struct_field.reborrow().get_uint64_field(), 0);
struct_field.set_uint64_field(neg_seven);
assert_eq!(struct_field.get_uint64_field(), neg_seven);
}
assert_eq!(big_struct.reborrow().get_struct_field().unwrap().get_uint64_field(), neg_seven);
{
let mut struct_field = big_struct.reborrow().init_struct_field();
assert_eq!(struct_field.reborrow().get_uint64_field(), 0);
assert_eq!(struct_field.get_uint32_field(), 0);
}
{
// getting before init is the same as init
assert_eq!(big_struct.reborrow().get_another_struct_field().unwrap().get_uint64_field(), 0);
big_struct.reborrow().get_another_struct_field().unwrap().set_uint32_field(4294967265);
// Alas, we need to make a copy to appease the reborrow checker.
let mut other_message = message::Builder::new_default();
other_message.set_root(big_struct.reborrow().get_another_struct_field().unwrap().into_reader()).unwrap();
big_struct.set_struct_field(
other_message.get_root::<test_big_struct::inner::Builder<'_>>().unwrap().into_reader()).unwrap();
}
assert_eq!(big_struct.reborrow().get_struct_field().unwrap().get_uint32_field(), 4294967265);
{
let mut other_struct_field = big_struct.reborrow().get_another_struct_field().unwrap();
assert_eq!(other_struct_field.reborrow().get_uint32_field(), 4294967265);
other_struct_field.set_uint32_field(42);
assert_eq!(other_struct_field.get_uint32_field(), 42);
}
assert_eq!(big_struct.reborrow().get_struct_field().unwrap().get_uint32_field(), 4294967265);
assert_eq!(big_struct.get_another_struct_field().unwrap().get_uint32_field(), 42);
}
#[test]
fn test_generic_one_parameter() {
use test_capnp::brand_once;
let mut message_for_brand = message::Builder::new_default();
let mut branded = message_for_brand.init_root::<brand_once::Builder<'_>>();
{
let branded_field = branded.reborrow().init_branded_field();
let mut foo = branded_field.init_generic_field();
foo.set_text_field("blah");
}
let reader = branded.into_reader();
assert_eq!("blah", reader.get_branded_field().unwrap().get_generic_field().unwrap().get_text_field().unwrap());
}
#[test]
fn test_generic_two_parameter() {
use test_capnp::brand_twice;
let mut message_for_brand = message::Builder::new_default();
let mut branded = message_for_brand.init_root::<brand_twice::Builder<'_>>();
{
let mut baz = branded.reborrow().init_baz_field();
baz.set_foo_field("blah").unwrap();
let mut bar = baz.init_bar_field();
bar.set_text_field("some text");
bar.set_data_field("some data".as_bytes());
}
let reader = branded.into_reader();
assert_eq!("blah", reader.get_baz_field().unwrap().get_foo_field().unwrap());
assert_eq!("some text", reader.get_baz_field().unwrap().get_bar_field().unwrap().get_text_field().unwrap());
assert_eq!("some data".as_bytes(), reader.get_baz_field().unwrap().get_bar_field().unwrap().get_data_field().unwrap());
}
#[test]
fn test_generics() {
use capnp::text;
use test_capnp::{test_generics, test_all_types};
let mut message = message::Builder::new_default();
let mut root: test_generics::Builder<'_, test_all_types::Owned, text::Owned> = message.init_root();
::test_util::init_test_message(root.reborrow().get_foo().unwrap());
root.reborrow().get_dub().unwrap().set_foo("Hello").unwrap();
{
let mut bar: ::capnp::primitive_list::Builder<'_,u8> = root.reborrow().get_dub().unwrap().initn_bar(1);
bar.set(0, 11);
}
{
let mut rev_bar = root.reborrow().get_rev().unwrap().get_bar().unwrap();
rev_bar.set_int8_field(111);
let mut bool_list = rev_bar.init_bool_list(2);
bool_list.set(0, false);
bool_list.set(1, true);
}
::test_util::CheckTestMessage::check_test_message(root.reborrow().get_foo().unwrap());
let root_reader = root.into_reader();
::test_util::CheckTestMessage::check_test_message(root_reader.reborrow().get_foo().unwrap());
let dub_reader = root_reader.get_dub().unwrap();
assert_eq!("Hello", dub_reader.get_foo().unwrap());
let bar_reader = dub_reader.get_bar().unwrap();
assert_eq!(bar_reader.len(), 1);
assert_eq!(bar_reader.get(0), 11);
}
#[test]
fn test_generic_union() {
use capnp::primitive_list;
use test_capnp::{test_generics_union, test_all_types};
let mut message = message::Builder::new_default();
{
let mut root: test_generics_union::Builder<'_, test_all_types::Owned, primitive_list::Owned<u32>>
= message.init_root();
{
let mut bar = root.reborrow().initn_bar1(10);
bar.set(5, 100);
}
assert!(!root.has_foo1());
assert!(root.has_bar1());
assert!(!root.has_foo2());
match root.reborrow().which().unwrap() {
test_generics_union::Bar1(Ok(bar)) => {
assert_eq!(bar.len(), 10);
assert_eq!(bar.get(0), 0);
assert_eq!(bar.get(5), 100);
assert_eq!(bar.get(9), 0);
}
_ => panic!("expected Bar1"),
}
{
let mut foo = root.reborrow().init_foo2();
foo.set_int32_field(37);
}
assert!(!root.has_foo1());
assert!(!root.has_bar1());
assert!(root.has_foo2());
match root.reborrow().which().unwrap() {
test_generics_union::Foo2(Ok(foo)) => {
assert_eq!(foo.get_int32_field(), 37);
}
_ => panic!("expected Foo2"),
}
}
}
#[test]
fn test_union() {
use test_capnp::test_union;
let mut message = message::Builder::new_default();
let mut union_struct = message.init_root::<test_union::Builder<'_>>();
union_struct.reborrow().get_union0().set_u0f0s0(());
match union_struct.reborrow().get_union0().which() {
Ok(test_union::union0::U0f0s0(())) => {}
_ => panic!()
}
union_struct.reborrow().init_union0().set_u0f0s1(true);
match union_struct.reborrow().get_union0().which() {
Ok(test_union::union0::U0f0s1(true)) => {}
_ => panic!()
}
union_struct.reborrow().init_union0().set_u0f0s8(127);
match union_struct.reborrow().get_union0().which() {
Ok(test_union::union0::U0f0s8(127)) => {}
_ => panic!()
}
assert_eq!(union_struct.reborrow().get_union0().has_u0f0sp(), false);
union_struct.reborrow().init_union0().set_u0f0sp("abcdef");
assert_eq!(union_struct.get_union0().has_u0f0sp(), true);
}
#[test]
fn test_union_defaults() {
use test_capnp::{test_union, test_union_defaults};
{
let message = message::Builder::new_default();
let reader = message.get_root_as_reader::<test_union_defaults::Reader<'_>>()
.expect("get_root_as_reader()");
let field = reader.get_s16s8s64s8_set().unwrap();
if let test_union::union0::U0f0s16(_) = field.get_union0().which().unwrap() {} else {
panic!("expected U0f0s16");
}
if let test_union_defaults::inner1::A(17) = reader.get_inner1().which().unwrap() {} else {
panic!("")
}
if let test_union_defaults::inner2::C(Ok("grault")) = reader.get_inner2().which().unwrap() {} else {
panic!("")
}
}
}
#[test]
fn test_constants() {
use test_capnp::{test_constants, TestEnum};
assert_eq!(test_constants::VOID_CONST, ());
assert_eq!(test_constants::BOOL_CONST, true);
assert_eq!(test_constants::INT8_CONST, -123);
assert_eq!(test_constants::INT16_CONST, -12345);
assert_eq!(test_constants::INT32_CONST, -12345678);
assert_eq!(test_constants::INT64_CONST, -123456789012345);
assert_eq!(test_constants::UINT8_CONST, 234);
assert_eq!(test_constants::UINT16_CONST, 45678);
assert_eq!(test_constants::UINT32_CONST, 3456789012);
assert_eq!(test_constants::UINT64_CONST, 12345678901234567890);
assert_eq!(test_constants::FLOAT32_CONST, 1234.5);
assert_eq!(test_constants::FLOAT64_CONST, -123e45);
assert_eq!(test_constants::TEXT_CONST, "foo");
assert_eq!(test_constants::COMPLEX_TEXT_CONST, "foo\"☺\'$$$");
assert_eq!(test_constants::DATA_CONST, b"bar");
{
let struct_const_root = test_constants::STRUCT_CONST.get().unwrap();
assert_eq!(struct_const_root.get_bool_field(), true);
assert_eq!(struct_const_root.get_int8_field(), -12);
assert_eq!(struct_const_root.get_int16_field(), 3456);
assert_eq!(struct_const_root.get_int32_field(), -78901234);
// ...
assert_eq!(struct_const_root.get_text_field().unwrap(), "baz");
assert_eq!(struct_const_root.get_data_field().unwrap(), b"qux");
{
let sub_reader = struct_const_root.get_struct_field().unwrap();
assert_eq!(sub_reader.get_text_field().unwrap(), "nested");
assert_eq!(sub_reader.get_struct_field().unwrap().get_text_field().unwrap(), "really nested");
}
// ...
}
assert!(test_constants::ENUM_CONST == TestEnum::Corge);
let void_list = test_constants::VOID_LIST_CONST;
assert_eq!(void_list.get().unwrap().len(), 6);
let bool_list_const = test_constants::BOOL_LIST_CONST;
let bool_list = bool_list_const.get().unwrap();
assert_eq!(bool_list.len(), 4);
assert_eq!(bool_list.get(0), true);
assert_eq!(bool_list.get(1), false);
assert_eq!(bool_list.get(2), false);
assert_eq!(bool_list.get(3), true);
let int8_list_const = test_constants::INT8_LIST_CONST;
let int8_list = int8_list_const.get().unwrap();
assert_eq!(int8_list.len(), 2);
assert_eq!(int8_list.get(0), 111);
assert_eq!(int8_list.get(1), -111);
// ...
let text_list_const = test_constants::TEXT_LIST_CONST;
let text_list = text_list_const.get().unwrap();
assert_eq!(text_list.len(), 3);
assert_eq!(text_list.get(0).unwrap(), "plugh");
assert_eq!(text_list.get(1).unwrap(), "xyzzy");
assert_eq!(text_list.get(2).unwrap(), "thud");
// TODO: DATA_LIST_CONST
let struct_list_const = test_constants::STRUCT_LIST_CONST;
let struct_list = struct_list_const.get().unwrap();
assert_eq!(struct_list.len(), 3);
assert_eq!(struct_list.get(0).get_text_field().unwrap(), "structlist 1");
assert_eq!(struct_list.get(1).get_text_field().unwrap(), "structlist 2");
assert_eq!(struct_list.get(2).get_text_field().unwrap(), "structlist 3");
}
#[test]
fn test_set_root() {
use test_capnp::test_big_struct;
let mut message1 = message::Builder::new_default();
let mut message2 = message::Builder::new_default();
let mut struct1 = message1.init_root::<test_big_struct::Builder<'_>>();
struct1.set_uint8_field(3);
message2.set_root(struct1.into_reader()).unwrap();
let struct2 = message2.get_root::<test_big_struct::Builder<'_>>().unwrap();
assert_eq!(struct2.get_uint8_field(), 3u8);
}
#[test]
fn upgrade_struct() {
use test_capnp::{test_old_version, test_new_version};
let mut message = message::Builder::new_default();
{
let mut old_version = message.init_root::<test_old_version::Builder<'_>>();
old_version.set_old1(123);
}
{
let mut new_version = message.get_root::<test_new_version::Builder<'_>>().unwrap();
new_version.reborrow().get_new2().unwrap();
assert_eq!(new_version.get_new3().unwrap().get_int8_field(), -123);
}
}
#[test]
fn upgrade_union() {
use test_capnp::{test_old_union_version, test_new_union_version};
// This tests for a specific case that was broken originally.
let mut message = message::Builder::new_default();
{
let mut old_version = message.init_root::<test_old_union_version::Builder<'_>>();
old_version.set_b(123);
}
{
let new_version = message.get_root::<test_new_union_version::Builder<'_>>().unwrap();
match new_version.which().unwrap() {
test_new_union_version::B(n) => assert_eq!(n, 123),
_ => panic!("expected B"),
}
}
}
#[test]
fn upgrade_list() {
use test_capnp::{test_any_pointer, test_lists};
{
let mut builder = message::Builder::new_default();
let mut root = builder.init_root::<test_any_pointer::Builder<'_>>();
{
let mut list = root.reborrow()
.get_any_pointer_field().initn_as::<::capnp::primitive_list::Builder<'_,u8>>(3);
list.set(0, 12);
list.set(1, 34);
list.set(2, 56);
}
{
let mut l = root.get_any_pointer_field()
.get_as::<::capnp::struct_list::Builder<'_,test_lists::struct8::Owned>>().unwrap();
assert_eq!(3, l.len());
assert_eq!(12, l.reborrow().get(0).get_f());
assert_eq!(34, l.reborrow().get(1).get_f());
assert_eq!(56, l.reborrow().get(2).get_f());
}
}
{
let mut builder = message::Builder::new_default();
let mut root = builder.init_root::<test_any_pointer::Builder<'_>>();
{
let mut list = root.reborrow()
.get_any_pointer_field().initn_as::<::capnp::text_list::Builder<'_>>(3);
list.set(0, "foo");
list.set(1, "bar");
list.set(2, "baz");
}
{
let mut l = root.get_any_pointer_field()
.get_as::<::capnp::struct_list::Builder<'_,test_lists::struct_p::Owned>>().unwrap();
assert_eq!(3, l.len());
assert_eq!("foo", &*l.reborrow().get(0).get_f().unwrap());
assert_eq!("bar", &*l.reborrow().get(1).get_f().unwrap());
assert_eq!("baz", &*l.reborrow().get(2).get_f().unwrap());
}
}
}
#[test]
fn upgrade_struct_list() {
use capnp::struct_list;
use test_capnp::{test_old_version, test_new_version};
let segment0: &[capnp::Word] = &[
capnp::word(1,0,0,0,0x1f,0,0,0), // list, inline composite, 3 words
capnp::word(4, 0, 0, 0, 1, 0, 2, 0), // struct tag. 1 element, 1 word data, 2 pointers.
capnp::word(0xab,0,0,0,0,0,0,0),
capnp::word(0x05,0,0,0, 0x42,0,0,0), // list pointer, offset 1, type = BYTE, length 8.
capnp::word(0,0,0,0,0,0,0,0),
capnp::word(0x68,0x65,0x6c,0x6c,0x6f,0x21,0x21,0), // "hello!!"
];
let segment_array = &[capnp::Word::words_to_bytes(segment0)];
let message_reader =
message::Reader::new(message::SegmentArray::new(segment_array), ReaderOptions::new());
let old_version: struct_list::Reader<'_,test_old_version::Owned> = message_reader.get_root().unwrap();
assert_eq!(old_version.len(), 1);
assert_eq!(old_version.get(0).get_old1(), 0xab);
assert_eq!(old_version.get(0).get_old2().unwrap(), "hello!!");
// Make the first segment exactly large enough to fit the original message.
// This leaves no room for a far pointer landing pad in the first segment.
let allocator = message::HeapAllocator::new().first_segment_words(6);
let mut message = message::Builder::new(allocator);
message.set_root(old_version).unwrap();
{
let segments = message.get_segments_for_output();
assert_eq!(segments.len(), 1);
assert_eq!(segments[0].len(), 6 * 8);
}
{
let mut new_version: struct_list::Builder<'_,test_new_version::Owned> = message.get_root().unwrap();
assert_eq!(new_version.len(), 1);
assert_eq!(new_version.reborrow().get(0).get_old1(), 0xab);
assert_eq!(&*new_version.reborrow().get(0).get_old2().unwrap(), "hello!!");
}
{
let segments = message.get_segments_for_output();
// Check the old list, including the tag, was zeroed.
assert_eq!(&segments[0][8..40], &[0; 32][..]);
}
}
#[test]
fn all_types() {
use test_capnp::{test_all_types};
let mut message = message::Builder::new_default();
::test_util::init_test_message(message.init_root());
::test_util::CheckTestMessage::check_test_message(message.get_root::<test_all_types::Builder<'_>>().unwrap());
::test_util::CheckTestMessage::check_test_message(
message.get_root::<test_all_types::Builder<'_>>().unwrap().into_reader());
}
#[test]
fn all_types_multi_segment() {
use test_capnp::{test_all_types};
let builder_options = message::HeapAllocator::new()
.first_segment_words(1).allocation_strategy(::capnp::message::AllocationStrategy::FixedSize);
let mut message = message::Builder::new(builder_options);
::test_util::init_test_message(message.init_root());
::test_util::CheckTestMessage::check_test_message(message.get_root::<test_all_types::Builder<'_>>().unwrap());
::test_util::CheckTestMessage::check_test_message(
message.get_root::<test_all_types::Builder<'_>>().unwrap().into_reader());
}
#[test]
fn setters() {
use test_capnp::{test_all_types};
{
let mut message = message::Builder::new_default();
::test_util::init_test_message(message.init_root::<test_all_types::Builder<'_>>());
let mut message2 = message::Builder::new_default();
let mut all_types2 = message2.init_root::<test_all_types::Builder<'_>>();
all_types2.set_struct_field(message.get_root::<test_all_types::Builder<'_>>().unwrap().into_reader()).unwrap();
::test_util::CheckTestMessage::check_test_message(all_types2.reborrow().get_struct_field().unwrap());
let reader = all_types2.into_reader().get_struct_field().unwrap();
::test_util::CheckTestMessage::check_test_message(reader);
}
{
let builder_options = message::HeapAllocator::new()
.first_segment_words(1)
.allocation_strategy(::capnp::message::AllocationStrategy::FixedSize);
let mut message = message::Builder::new(builder_options);
::test_util::init_test_message(message.init_root::<test_all_types::Builder<'_>>());
let builder_options = message::HeapAllocator::new()
.first_segment_words(1)
.allocation_strategy(::capnp::message::AllocationStrategy::FixedSize);
let mut message2 = message::Builder::new(builder_options);
let mut all_types2 = message2.init_root::<test_all_types::Builder<'_>>();
all_types2.set_struct_field(message.get_root_as_reader().unwrap()).unwrap();
::test_util::CheckTestMessage::check_test_message(all_types2.reborrow().get_struct_field().unwrap());
let reader = all_types2.into_reader().get_struct_field().unwrap();
::test_util::CheckTestMessage::check_test_message(reader);
}
}
#[test]
fn double_far_pointer() {
let segment0: &[capnp::Word] = &[
capnp::word(0,0,0,0,0,0,1,0),
// struct pointer, zero offset, zero data words, one pointer.
capnp::word(6,0,0,0,1,0,0,0),
// far pointer, two-word landing pad, offset 0, segment 1.
];
let segment1: &[capnp::Word] = &[
capnp::word(2,0,0,0,2,0,0,0),
// landing pad start. offset 0, segment 2
capnp::word(0,0,0,0,1,0,1,0),
// landing pad tag. struct pointer. One data word. One pointer.
];
let segment2: &[capnp::Word] = &[
capnp::word(0x1f,0x1f,0x1f,0x1f,0x1f,0x1f,0x1f,0x1f),
// Data word.
capnp::word(1,0,0,0,0x42,0,0,0),
// text pointer. offset zero. 1-byte elements. 8 total elements.
capnp::word('h' as u8, 'e' as u8, 'l' as u8, 'l' as u8, 'o' as u8, '.' as u8, '\n' as u8, 0),
];
let segment_array = &[capnp::Word::words_to_bytes(segment0),
capnp::Word::words_to_bytes(segment1),
capnp::Word::words_to_bytes(segment2)];
let message =
message::Reader::new(message::SegmentArray::new(segment_array), ReaderOptions::new());
let root: ::test_capnp::test_any_pointer::Reader<'_> = message.get_root().unwrap();
let s: ::test_capnp::test_all_types::Reader<'_> = root.get_any_pointer_field().get_as().unwrap();
assert_eq!(s.get_int8_field(), 0x1f);
assert_eq!(s.get_int16_field(), 0x1f1f);
assert_eq!(s.get_text_field().unwrap(), "hello.\n");
}
#[test]
fn double_far_pointer_truncated_pad() {
let segment0: &[capnp::Word] = &[
capnp::word(6,0,0,0,1,0,0,0),
// far pointer, two-word landing pad, offset 0, segment 1.
];
let segment1: &[capnp::Word] = &[
capnp::word(2,0,0,0,2,0,0,0),
// landing pad start. offset 0, segment 2
// For this message to be valid, there would need to be another word here.
];
let segment2: &[capnp::Word] = &[
capnp::word(0,0,0,0,0,0,0,0),
];
let segment_array = &[capnp::Word::words_to_bytes(segment0),
capnp::Word::words_to_bytes(segment1),
capnp::Word::words_to_bytes(segment2)];
let message =
message::Reader::new(message::SegmentArray::new(segment_array), ReaderOptions::new());
match message.get_root::<::test_capnp::test_all_types::Reader<'_>>() {
Ok(_) => panic!("expected out-of-bounds error"),
Err(e) => {
assert_eq!(e.description, "message contained out-of-bounds pointer")
}
}
}
#[test]
fn double_far_pointer_out_of_bounds() {
let segment0: &[capnp::Word] = &[
capnp::word(6,0,0,0,1,0,0,0),
// far pointer, two-word landing pad, offset 0, segment 1.
];
let segment1: &[capnp::Word] = &[
capnp::word(0xa,0,0,0,2,0,0,0),
// landing pad start. offset 1, segment 2
capnp::word(0,0,0,0,1,0,1,0),
// landing pad tag. struct pointer. One data word. One pointer.
];
let segment2: &[capnp::Word] = &[
capnp::word(0,0,0,0,0,0,0,0),
];
let segment_array = &[capnp::Word::words_to_bytes(segment0),
capnp::Word::words_to_bytes(segment1),
capnp::Word::words_to_bytes(segment2)];
let message =
message::Reader::new(message::SegmentArray::new(segment_array), ReaderOptions::new());
match message.get_root::<::test_capnp::test_all_types::Reader<'_>>() {
Ok(_) => panic!("expected out-of-bounds error"),
Err(e) => {
assert_eq!(e.description, "message contained out-of-bounds pointer")
}
}
}
#[test]
fn far_pointer_pointing_at_self() {
use test_capnp::test_all_types;
let words: &[capnp::Word] =
&[capnp::word(0,0,0,0,0,0,1,0), // struct, one pointer
capnp::word(0xa,0,0,0,0,0,0,0)]; // far pointer, points to self
let segment_array = &[capnp::Word::words_to_bytes(words)];
let message_reader =
message::Reader::new(message::SegmentArray::new(segment_array), ReaderOptions::new());
let reader = message_reader.get_root::<test_all_types::Reader<'_>>().unwrap();
assert!(reader.total_size().is_err());
let mut builder = ::capnp::message::Builder::new_default();
assert!(builder.set_root(reader).is_err());
}
#[test]
fn text_builder_int_underflow() {
use test_capnp::{test_any_pointer};
let mut message = message::Builder::new_default();
{
let mut root = message.init_root::<test_any_pointer::Builder<'_>>();
let _: ::capnp::data::Builder<'_> = root.reborrow().get_any_pointer_field().initn_as(0);
// No NUL terminator!
let result = root.get_any_pointer_field().get_as::<::capnp::text::Builder<'_>>();
assert!(result.is_err());
}
}
#[test]
fn inline_composite_list_int_overflow() {
let words: &[capnp::Word] = &[
capnp::word(0,0,0,0,0,0,1,0),
capnp::word(1,0,0,0,0x17,0,0,0),
capnp::word(0,0,0,128,16,0,0,0),
capnp::word(0,0,0,0,0,0,0,0),
capnp::word(0,0,0,0,0,0,0,0)];
let segment_array = &[capnp::Word::words_to_bytes(words)];
let message =
message::Reader::new(message::SegmentArray::new(segment_array), ReaderOptions::new());
let root: ::test_capnp::test_any_pointer::Reader<'_> = message.get_root().unwrap();
match root.total_size() {
Err(e) =>
assert_eq!("InlineComposite list's elements overrun its word count.", e.description),
_ => panic!("did not get expected error")
}
{
let result = root.get_any_pointer_field()
.get_as::<::capnp::struct_list::Reader<'_,::test_capnp::test_all_types::Owned>>();
assert!(result.is_err());
}
let mut message_builder = message::Builder::new_default();
let builder_root = message_builder.init_root::<::test_capnp::test_any_pointer::Builder<'_>>();
match builder_root.get_any_pointer_field().set_as(root) {
Err(e) =>
assert_eq!("InlineComposite list's elements overrun its word count.", e.description),
_ => panic!("did not get expected error"),
}
}
#[test]
fn long_u64_list() {
use test_capnp::{test_all_types};
let length: u32 = 1 << 27;
let step_exponent = 18;
let mut message = message::Builder::new_default();
{
let root: test_all_types::Builder<'_> = message.init_root();
let mut list = root.init_u_int64_list(length);
for ii in 0..(length >> step_exponent) {
let jj = ii << step_exponent;
list.set(jj, jj as u64);
}
for ii in 0..(length >> step_exponent) {
let jj = ii << step_exponent;
assert_eq!(list.get(jj), jj as u64);
}
}
let root: test_all_types::Reader<'_> = message.get_root_as_reader().unwrap();
let list = root.get_u_int64_list().unwrap();
for ii in 0..(length >> step_exponent) {
let jj = ii << step_exponent;
assert_eq!(list.get(jj), jj as u64);
}
}
#[test]
fn long_struct_list() {
use test_capnp::{test_lists};
let length: u32 = 1 << 27;
let step_exponent = 18;
let mut message = message::Builder::new_default();
{
let root: test_lists::Builder<'_> = message.init_root();
let mut list = root.init_list64(length);
for ii in 0..(length >> step_exponent) {
let jj = ii << step_exponent;
list.reborrow().get(jj).set_f(jj as u64);
}
for ii in 0..(length >> step_exponent) {
let jj = ii << step_exponent;
assert_eq!(list.reborrow().get(jj).get_f(), jj as u64);
}
}
let root: test_lists::Reader<'_> = message.get_root_as_reader().unwrap();
let list = root.get_list64().unwrap();
for ii in 0..(length >> step_exponent) {
let jj = ii << step_exponent;
assert_eq!(list.get(jj).get_f(), jj as u64);
}
}
#[test]
fn long_list_list() {
use test_capnp::{test_lists};
let length: u32 = 1 << 27;
let step_exponent = 18;
let mut message = message::Builder::new_default();
{
let root: test_lists::Builder<'_> = message.init_root();
let mut list = root.init_int32_list_list(length);
for ii in 0..(length >> step_exponent) {
let jj = ii << step_exponent;
list.reborrow().init(jj, 1).set(0, jj as i32);
}
for ii in 0..(length >> step_exponent) {
let jj = ii << step_exponent;
let elem = list.reborrow().get(jj).unwrap();
assert_eq!(elem.len(), 1);
assert_eq!(elem.get(0), jj as i32);
}
}
let root: test_lists::Reader<'_> = message.get_root_as_reader().unwrap();
let list = root.get_int32_list_list().unwrap();
for ii in 0..(length >> step_exponent) {
let jj = ii << step_exponent;
let elem = list.get(jj).unwrap();
assert_eq!(elem.len(), 1);
assert_eq!(elem.get(0), jj as i32);
}
}
#[test]
fn traversal_limit_exceeded() {
use test_capnp::{test_all_types};
let mut message = message::Builder::new_default();
::test_util::init_test_message(message.init_root());
let segments = message.get_segments_for_output();
let reader = message::Reader::new(message::SegmentArray::new(&segments),
*ReaderOptions::new().traversal_limit_in_words(Some(2)));
match reader.get_root::<test_all_types::Reader<'_>>() {
Err(e) => assert_eq!(e.description, "read limit exceeded"),
Ok(_) => panic!("expected error"),
}
}
#[test]
fn void_list_amplification() {
use test_capnp::{test_any_pointer, test_all_types};
let mut message = message::Builder::new_default();
{
let root = message.init_root::<test_any_pointer::Builder<'_>>();
let _: ::capnp::primitive_list::Builder<'_,()> =
root.get_any_pointer_field().initn_as((1 << 29) - 1);
}
let segments = message.get_segments_for_output();
assert_eq!(segments.len(), 1);
assert_eq!(segments[0].len(), 16); // 2 words
let reader = message::Reader::new(message::SegmentArray::new(&segments),
ReaderOptions::new());
let root = reader.get_root::<test_any_pointer::Reader<'_>>().unwrap();
let result = root.get_any_pointer_field().get_as::<::capnp::struct_list::Reader<'_,test_all_types::Owned>>();
assert!(result.is_err());
}
#[test]
fn empty_struct_list_amplification() {
use test_capnp::{test_any_pointer, test_empty_struct, test_all_types};
let mut message = message::Builder::new_default();
{
let root = message.init_root::<test_any_pointer::Builder<'_>>();
let _ : ::capnp::struct_list::Builder<'_, test_empty_struct::Owned> =
root.get_any_pointer_field().initn_as((1 << 29) - 1);
}
{
let segments = message.get_segments_for_output();
assert_eq!(segments.len(), 1);
assert_eq!(segments[0].len(), 3 * 8); // 3 words
let reader =
message::Reader::new(message::SegmentArray::new(&segments),
ReaderOptions::new());
let root = reader.get_root::<test_any_pointer::Reader<'_>>().unwrap();
let result = root.get_any_pointer_field().get_as::<::capnp::struct_list::Reader<'_, test_all_types::Owned>>();
assert!(result.is_err());
}
// At one point this took a long time because zero_object_helper() would iterate through
// the whole list, even though its elements were void.
message.init_root::<test_any_pointer::Builder<'_>>();
}
#[test]
fn total_size_struct_list_amplification() {
use test_capnp::test_any_pointer;
let words: &[capnp::Word] =
&[capnp::word(0,0,0,0, 0,0,1,0), // struct, one pointers
capnp::word(1,0,0,0, 0xf,0,0,0), // list, inline composite, one word
capnp::word(0,0x80,0xc2,0xff, 0,0,0,0), // large struct, but zero of them
capnp::word(0,0,0x20,0, 0,0,0x22,0),
];
let segment_array = &[capnp::Word::words_to_bytes(words)];
let message_reader =
message::Reader::new(message::SegmentArray::new(segment_array), ReaderOptions::new());
let reader = message_reader.get_root::<test_any_pointer::Reader<'_>>().unwrap();
reader.total_size().unwrap();
let mut builder = ::capnp::message::Builder::new_default();
assert!(builder.set_root(reader).is_err()); // read limit exceeded
}
#[test]
fn null_struct_fields() {
use test_capnp::{test_all_types};
let mut message = message::Builder::new_default();
{
let mut test = message.init_root::<test_all_types::Builder<'_>>();
test.set_text_field("Hello");
}
let reader = message.get_root::<test_all_types::Builder<'_>>().unwrap().into_reader();
assert_eq!(reader.get_text_field().unwrap(), "Hello");
assert_eq!(reader.has_struct_field(), false);
let nested = reader.get_struct_field().unwrap();
assert_eq!(nested.get_int8_field(), 0);
assert_eq!(nested.get_u_int64_field(), 0);
assert_eq!(nested.get_void_list().unwrap().len(), 0);
assert_eq!(nested.get_float64_list().unwrap().len(), 0);
assert_eq!(nested.get_struct_list().unwrap().len(), 0);
assert_eq!(nested.get_text_field().unwrap(), "");
let empty_slice: &[u8] = &[];
assert_eq!(nested.get_data_field().unwrap(), empty_slice);
}
// At one point this failed to typecheck, giving the error:
// "no method named `get_any_pointer_field` found for type `test_capnp::test_any_pointer::Pipeline`"
#[allow(unused)]
fn pipeline_any_pointer(foo: ::test_capnp::test_any_pointer::Pipeline) {
let _ = foo.get_any_pointer_field();
}
#[test]
fn set_with_caveats() {
use test_capnp::test_all_types;
let mut message = message::Builder::new_default();
let root: test_all_types::Builder<'_> = message.init_root();
let list = root.init_struct_list(2);
{
let mut message1 = message::Builder::new_default();
let mut root1: test_all_types::Builder<'_> = message1.init_root();
root1.set_int8_field(11);
list.set_with_caveats(0, root1.into_reader()).unwrap();
}
{
let mut message2 = message::Builder::new_default();
let mut root2: test_all_types::Builder<'_> = message2.init_root();
::test_util::init_test_message(root2.reborrow());
list.set_with_caveats(1, root2.into_reader()).unwrap();
}
let list_reader = list.into_reader();
assert_eq!(11, list_reader.get(0).get_int8_field());
::test_util::CheckTestMessage::check_test_message(list_reader.get(1));
}
#[test]
fn get_raw_struct_data() {
use capnp::traits::HasStructSize;
use test_capnp::test_all_types;
let mut message = message::Builder::new_default();
let mut root: test_all_types::Builder<'_> = message.init_root();
root.set_int8_field(3);
root.set_int16_field(0x0abb);
let struct_size = <test_all_types::Builder<'_> as HasStructSize>::struct_size();
{
let raw_bytes =
::capnp::raw::get_struct_data_section(root.reborrow().into_reader());
assert_eq!(raw_bytes.len(), (struct_size.data * 8) as usize);
assert_eq!(raw_bytes[0], 0); // boolField
assert_eq!(raw_bytes[1], 3); // int8Field
assert_eq!(raw_bytes[2], 0xbb); // int16Field less significant byte
assert_eq!(raw_bytes[3], 0x0a); // int16Field more significant byte
}
}
#[test]
fn get_raw_list_data() {
use test_capnp::test_all_types;
let mut message = message::Builder::new_default();
let mut root: test_all_types::Builder<'_> = message.init_root();
{
let mut uint16_list = root.reborrow().init_u_int16_list(5);
uint16_list.set(0, 10);
uint16_list.set(1, 11);
uint16_list.set(2, 12);
uint16_list.set(3, 13);
uint16_list.set(4, 14);
assert_eq!(
::capnp::raw::get_list_element_size(uint16_list.reborrow().into_reader()),
::capnp::private::layout::ElementSize::TwoBytes);
assert_eq!(
::capnp::raw::get_list_step_size_in_bits(uint16_list.reborrow().into_reader()),
16);
assert_eq!(
::capnp::raw::get_list_bytes(uint16_list.reborrow().into_reader()),
&[10, 0, 11, 0, 12, 0, 13, 0, 14, 0]);
}
}
#[test]
fn get_struct_pointer_section() {
use test_capnp::test_all_types;
let mut message = message::Builder::new_default();
let mut root: test_all_types::Builder<'_> = message.init_root();
::test_util::init_test_message(root.reborrow().init_struct_field());
let pointers = ::capnp::raw::get_struct_pointer_section(root.into_reader());
let substruct: test_all_types::Reader<'_> = pointers.get(2).get_as().unwrap();
::test_util::CheckTestMessage::check_test_message(substruct);
}
#[test]
fn struct_list_iterator() {
use test_capnp::test_all_types;
let mut message = message::Builder::new_default();
{
let root: test_all_types::Builder<'_> = message.init_root();
let mut struct_list = root.init_struct_list(6);
let mut t0 = struct_list.reborrow().get(0);
t0.set_u_int32_field(0);
let mut t1 = struct_list.reborrow().get(1);
t1.set_u_int32_field(1);
let mut t2 = struct_list.reborrow().get(2);
t2.set_u_int32_field(2);
let mut t3 = struct_list.reborrow().get(3);
t3.set_u_int32_field(3);
let mut t4 = struct_list.reborrow().get(4);
t4.set_u_int32_field(4);
let mut t5 = struct_list.reborrow().get(5);
t5.set_u_int32_field(5);
}
let reader = message.get_root_as_reader::<test_all_types::Reader<'_>>().unwrap();
let structs = reader.get_struct_list().unwrap();
let mut iter = structs.iter();
assert_eq!(3, iter.nth(3).unwrap().get_u_int32_field());
assert_eq!(4, iter.nth(0).unwrap().get_u_int32_field());
assert_eq!(5, iter.nth(0).unwrap().get_u_int32_field());
let mut c = 2;
for s in structs.iter().skip(2) {
assert_eq!(c, s.get_u_int32_field());
c += 1;
}
{
let mut overflow_iter = structs.iter();
assert!(overflow_iter.nth(4).is_some());
// The first four elements have been consumed, so going another 4 should overflow.
assert!(overflow_iter.nth(4).is_none());
// The previous call pushed us to the end, even though it returned None.
assert!(overflow_iter.next().is_none());
}
}
#[test]
fn name_annotation() {
use test_capnp::renamed_struct;
let mut message = message::Builder::new_default();
{
let mut root: renamed_struct::Builder<'_> = message.init_root();
root.set_good_field_name(true);
root.set_another_good_field_name(renamed_struct::RenamedEnum::Bar);
let renamed_union = root.get_renamed_union();
renamed_union.init_qux();
}
{
let root: renamed_struct::Reader<'_> = message.get_root_as_reader().unwrap();
match root.which().unwrap() {
renamed_struct::GoodFieldName(true) => (),
_ => panic!("expected GoodFieldName(true)"),
}
assert!(renamed_struct::RenamedEnum::Bar == root.get_another_good_field_name().unwrap());
match root.get_renamed_union().which().unwrap() {
renamed_struct::renamed_union::Qux(_) => (),
_ => panic!("expected Qux"),
}
}
}
}<|fim▁end|> | assert!(bool_list.get(1)); |
<|file_name|>donations-inline.js<|end_file_name|><|fim▁begin|>var InlineDonation = function(){
this.clientToken = document.getElementById('client_token').value;
this.setupEvents();
}
InlineDonation.prototype.setupEvents = function(){
jQuery(document.body).on('keyup', '#donation-form input', this.clearInvalidEntries);
this.setup();
}
InlineDonation.prototype.setup = function(){
braintree.setup(this.clientToken, 'dropin',{
container: 'dropin-container',
form: 'donation-form',
onReady: function(integration){
inlineDonation.integration = integration;
},
onPaymentMethodReceived: function(response){
inlineDonation.onPaymentMethodReceived(response);
}
})
}
InlineDonation.prototype.onPaymentMethodReceived = function(response){
inlineDonation.paymentMethodReceived = true;
var element = document.getElementById('payment_method_nonce');
if(element != null){
element.value = response.nonce;
}
else{
element = document.createElement('input');
element.type = 'hidden';
element.name = 'payment_method_nonce';
element.id = 'payment_method_nonce';
element.value = response.nonce;
jQuery('#donation-form').append(element);
}
inlineDonation.validateInputFields();
}
InlineDonation.prototype.validateInputFields = function(){
var hasFailures = false;
jQuery('#donation-form input').each(function(){
if(jQuery(this).val() === ""){
jQuery(this).parent().find('div.invalid-input-field').show().addClass('active');
hasFailures = true;
}
});
if(! hasFailures){
inlineDonation.submitPayment();
}
}
InlineDonation.prototype.submitPayment = function(){
var data = jQuery('#donation-form').serialize();
var url = jQuery('#ajax_url').val();
jQuery('.overlay-payment-processing').addClass('active');
jQuery.ajax({
type:'POST',
url: url,
dataType: 'json',
data: data
}).done(function(response){
jQuery('.overlay-payment-processing').removeClass('active');
if(response.result === 'success'){
inlineDonation.redirect(response.url);
}
else{
<|fim▁hole|> }).fail(function(response){
jQuery('.overlay-payment-processing').removeClass('active');
inlineDonation.showErrorMessage(response.message);
});
}
InlineDonation.prototype.redirect = function(url){
window.location.replace(url);
}
InlineDonation.prototype.showErrorMessage = function(message){
jQuery('#error_messages').html(message);
}
InlineDonation.prototype.clearInvalidEntries = function(){
jQuery(this).parent().find('div.invalid-input-field').hide().removeClass('active');
}
InlineDonation.prototype.clearErrorMessages = function(){
jQuery('#error_messages').empty();
}
InlineDonation.prototype.clearInvalidEntries = function(){
jQuery(this).parent().find('div.invalid-input-field').hide().removeClass('active');
}
InlineDonation.prototype.displayOverlay = function(callback){
jQuery('#donation_overlay').fadeIn(400, callback);
}
InlineDonation.prototype.hideOverlay = function(callback){
jQuery('#donation_overlay').fadeOut(400, callback);
}
var inlineDonation = new InlineDonation();<|fim▁end|> | inlineDonation.showErrorMessage(response.message);
}
|
<|file_name|>TemplateString.jest.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2019 Simon Edwards <[email protected]>
*
* This source code is licensed under the MIT license which is detailed in the LICENSE.txt file.
*/
import "jest";<|fim▁hole|>describe.each([
["foo", [{ type: "text", text: "foo", startColumn: 0, endColumn: 3 }]],
["foo:bar", [{ type: "text", text: "foo:bar", startColumn: 0, endColumn: 7 }]],
["foo\\$bar", [{ type: "text", text: "foo$bar", startColumn: 0, endColumn: 8 }]],
["$foo: ", [{ type: "text", text: "$foo: " }]],
["foo${TERM:TITLE}", [
{ type: "text", text: "foo", startColumn: 0, endColumn: 3 },
{ type: "field", namespace: "TERM", key: "TITLE", startColumn: 3, endColumn: 16 }
]],
["foo ${TERM:TITLE} bar", [
{ type: "text", text: "foo " },
{ type: "field", namespace: "TERM", key: "TITLE", startColumn: 4, endColumn: 17},
{ type: "text", text: " bar" },
]],
["${TERM:TITLE}", [
{ type: "field", namespace: "TERM", key: "TITLE"}
]],
["${TERM:ROWS}x${TERM:COLUMNS}", [
{ type: "field", namespace: "TERM", key: "ROWS"},
{ type: "text", text: "x" },
{ type: "field", namespace: "TERM", key: "COLUMNS"},
]],
["${awe:fa far linux} ${TERM:TITLE}", [
{ type: "field", namespace: "awe", key: "fa far linux"},
{ type: "text", text: " " },
{ type: "field", namespace: "TERM", key: "TITLE"},
]],
["foo ${TERMTITLE} bar", [
{ type: "text", text: "foo " },
{ type: "error", text: "TERMTITLE"},
{ type: "text", text: " bar" },
]],
["foo ${TERM:} bar", [
{ type: "text", text: "foo " },
{ type: "error", text: "TERM:"},
{ type: "text", text: " bar" },
]],
["foo ${TER", [
{ type: "text", text: "foo " },
{ type: "error", text: "TER"},
]],
])("Test", (input: string, output: Segment[]) => {
test(`parse ${input}`, () => {
const ts = new TemplateString();
ts.setTemplateString(input);
expect(ts._segments.length).toBe(output.length);
for (let i=0; i<ts._segments.length; i++) {
const seg = ts._segments[i];
const outSeg = output[i];
expect(seg.type).toBe(outSeg.type);
if (outSeg.startColumn !== undefined) {
expect(seg.startColumn).toBe(outSeg.startColumn);
}
if (outSeg.endColumn !== undefined) {
expect(seg.endColumn).toBe(outSeg.endColumn);
}
if (seg.type === "text") {
expect((<TextSegment> seg).text).toBe((<TextSegment> outSeg).text);
} else if(seg.type === "field") {
expect((<FieldSegment> seg).namespace).toBe((<FieldSegment> outSeg).namespace);
expect((<FieldSegment> seg).key).toBe((<FieldSegment> outSeg).key);
}
}
});
});<|fim▁end|> |
import { Segment, TemplateString, TextSegment, FieldSegment } from "../TemplateString";
|
<|file_name|>Heading2.py<|end_file_name|><|fim▁begin|># encoding: utf-8
from yast import import_module
import_module('UI')
from yast import *
class Heading2Client:
def main(self):
UI.OpenDialog(
VBox(
Heading("This Is a Heading."),
Label("This is a Label."),
PushButton("&OK")
)
)
UI.UserInput()
UI.CloseDialog()
<|fim▁hole|>Heading2Client().main()<|fim▁end|> | |
<|file_name|>path_tool.py<|end_file_name|><|fim▁begin|>__author__ = 'Dongwoo Kim'
import itertools
from collections import defaultdict
import numpy as np
def num_neighbor(T, idx, link_val=1):
"""
find neighborhood of given idx (node)
"""
outlink = len(np.nonzero(T[idx, :, :] == link_val))
inlink = len(np.nonzero(T[:, idx, :] == link_val))
return outlink + inlink
def sample_broken_tri(T, link_val=1):
"""
find three nodes which do not have triangular path (i->j->k<-i) and corresponding relations a, b, c
@param T: graph tensor matrix
@return: tuple (x, y, z) where a, b, c is an index of link (i->j), (j->k), (i->k)
"""
find = False
<|fim▁hole|>
if not (T[i, j, a] == link_val and T[j, k, b] == link_val and T[i, k, c] == link_val):
find = True
return ((i, j, a), (j, k, b), (i, k, c))
def tri_index(T, link_val=1):
"""
extract indices of every possible triangular path in the graph
especially for the following path structure
i -> j
j -> k
i -> k
@param T: [E x E x K] tensor graph where T[i,j,k] = 1 when there is type k link between node i and j
@return: list of tuples consist of (x, y, z) where a, b, c is an index of link (i->j), (j->k), (i->k)
"""
T = T.copy()
T[T!=link_val] = 0
e, k = T.shape[0], T.shape[2]
T_squeeze = np.sum(T, 2)
indices = list()
link_types = defaultdict(list)
for i, j in itertools.permutations(range(e), 2):
_tmp = np.nonzero(T[i, j, :])[0]
if len(_tmp) != 0:
link_types[(i, j)] = np.nonzero(T[i, j, :])[0]
for i in range(e):
out_links = np.setdiff1d(np.nonzero(T_squeeze[i, :])[0], i)
for j, k in itertools.permutations(out_links, 2):
if T_squeeze[j, k] != 0: # at least one edge from j to k exists
type_1, type_2, type_3 = link_types[(i, j)], link_types[(j, k)], link_types[(i, k)]
for types in itertools.product(type_1, type_2, type_3):
a = (i, j, types[0])
b = (j, k, types[1])
c = (i, k, types[2])
indices.append((a, b, c))
return indices
def test():
from scipy.io.matlab import loadmat
mat = loadmat('../data/alyawarradata.mat')
T = np.array(mat['Rs'], np.float32)
indices = tri_index(T)
print(len(indices))
for ix in range(10):
a, b, c = indices[ix]
i, j, t1 = a
j, k, t2 = b
i, k, t3 = c
print('a path %d->%d->%d by type %d/%d and a link %d->%d by type %d' % (i, j, k, t1, t2, i, k, t3))
for ix in range(len(indices)):
assert T[i, j, t1] and T[j, k, t2] and T[i, k, t3]
if __name__ == '__main__':
test()<|fim▁end|> | while not find:
i, j, k = np.random.permutation(range(T.shape[0]))[:3]
a, b, c = np.random.randint(T.shape[2], size=3) |
<|file_name|>forums.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# **************************************************************************
# Copyright © 2016 jianglin
# File Name: forums.py
# Author: jianglin
# Email: [email protected]
# Created: 2016-12-17 13:12:23 (CST)
# Last Update:星期五 2017-11-10 11:04:16 (CST)
# By:<|fim▁hole|># **************************************************************************
from .views import BaseView
from forums.extension import db
from forums.api.forums.models import Board
from forums.api.tag.models import Tags
class BoardView(BaseView):
form_excluded_columns = ('topics')
class TagView(BaseView):
column_searchable_list = ['name']
form_excluded_columns = ('topics', 'followers')
def init_admin(admin):
admin.add_view(
BoardView(
Board,
db.session,
name='管理版块',
endpoint='admin_board',
category='管理社区'))
admin.add_view(
TagView(
Tags,
db.session,
name='管理节点',
endpoint='admin_tag',
category='管理社区'))<|fim▁end|> | # Description: |
<|file_name|>NwEvent.js<|end_file_name|><|fim▁begin|>OJ.extendClass(
'NwEvent', [OjEvent],
{
'_get_props_' : {
'data' : null
},
'_constructor' : function(type/*, bubbles = false, cancelable = false, data = null*/){
var ln = arguments.length;
this._super(OjEvent, '_constructor', ln > 3 ? [].slice.call(arguments, 0, 3) : arguments);
if(ln > 3){
this._data = arguments[3];<|fim▁hole|> }
);<|fim▁end|> | }
} |
<|file_name|>gce.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package gce
import (
"context"
"fmt"
"io"
"net/http"
"runtime"
"strconv"
"strings"
"sync"
"time"
gcfg "gopkg.in/gcfg.v1"
"cloud.google.com/go/compute/metadata"
"golang.org/x/oauth2"
"golang.org/x/oauth2/google"
computealpha "google.golang.org/api/compute/v0.alpha"
computebeta "google.golang.org/api/compute/v0.beta"
compute "google.golang.org/api/compute/v1"
container "google.golang.org/api/container/v1"
"k8s.io/klog"
"k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/client-go/informers"
clientset "k8s.io/client-go/kubernetes"
"k8s.io/client-go/kubernetes/scheme"
v1core "k8s.io/client-go/kubernetes/typed/core/v1"
"k8s.io/client-go/tools/cache"
"k8s.io/client-go/tools/record"
"k8s.io/client-go/util/flowcontrol"
cloudprovider "k8s.io/cloud-provider"
"k8s.io/kubernetes/pkg/cloudprovider/providers/gce/cloud"
"k8s.io/kubernetes/pkg/controller"
kubeletapis "k8s.io/kubernetes/pkg/kubelet/apis"
"k8s.io/kubernetes/pkg/version"
)
const (
// ProviderName is the official const representation of the Google Cloud Provider
ProviderName = "gce"
k8sNodeRouteTag = "k8s-node-route"
// AffinityTypeNone - no session affinity.
gceAffinityTypeNone = "NONE"
// AffinityTypeClientIP - affinity based on Client IP.
gceAffinityTypeClientIP = "CLIENT_IP"
// AffinityTypeClientIPProto - affinity based on Client IP and port.
gceAffinityTypeClientIPProto = "CLIENT_IP_PROTO"
operationPollInterval = time.Second
// Creating Route in very large clusters, may take more than half an hour.
operationPollTimeoutDuration = time.Hour
// Each page can have 500 results, but we cap how many pages
// are iterated through to prevent infinite loops if the API
// were to continuously return a nextPageToken.
maxPages = 25
maxTargetPoolCreateInstances = 200
// HTTP Load Balancer parameters
// Configure 8 second period for external health checks.
gceHcCheckIntervalSeconds = int64(8)
gceHcTimeoutSeconds = int64(1)
// Start sending requests as soon as a pod is found on the node.
gceHcHealthyThreshold = int64(1)
// Defaults to 3 * 8 = 24 seconds before the LB will steer traffic away.
gceHcUnhealthyThreshold = int64(3)
gceComputeAPIEndpoint = "https://www.googleapis.com/compute/v1/"
gceComputeAPIEndpointBeta = "https://www.googleapis.com/compute/beta/"
)
// gceObject is an abstraction of all GCE API object in go client
type gceObject interface {
MarshalJSON() ([]byte, error)
}
var _ cloudprovider.Interface = (*Cloud)(nil)
var _ cloudprovider.Instances = (*Cloud)(nil)
var _ cloudprovider.LoadBalancer = (*Cloud)(nil)
var _ cloudprovider.Routes = (*Cloud)(nil)
var _ cloudprovider.Zones = (*Cloud)(nil)
var _ cloudprovider.PVLabeler = (*Cloud)(nil)
var _ cloudprovider.Clusters = (*Cloud)(nil)
// Cloud is an implementation of Interface, LoadBalancer and Instances for Google Compute Engine.
type Cloud struct {
// ClusterID contains functionality for getting (and initializing) the ingress-uid. Call Cloud.Initialize()
// for the cloudprovider to start watching the configmap.
ClusterID ClusterID
service *compute.Service
serviceBeta *computebeta.Service
serviceAlpha *computealpha.Service
containerService *container.Service
tpuService *tpuService
client clientset.Interface
clientBuilder controller.ControllerClientBuilder
eventBroadcaster record.EventBroadcaster
eventRecorder record.EventRecorder
projectID string
region string
regional bool
localZone string // The zone in which we are running
// managedZones will be set to the 1 zone if running a single zone cluster
// it will be set to ALL zones in region for any multi-zone cluster
// Use GetAllCurrentZones to get only zones that contain nodes
managedZones []string
networkURL string
isLegacyNetwork bool
subnetworkURL string
secondaryRangeName string
networkProjectID string
onXPN bool
nodeTags []string // List of tags to use on firewall rules for load balancers
lastComputedNodeTags []string // List of node tags calculated in GetHostTags()
lastKnownNodeNames sets.String // List of hostnames used to calculate lastComputedHostTags in GetHostTags(names)
computeNodeTagLock sync.Mutex // Lock for computing and setting node tags
nodeInstancePrefix string // If non-"", an advisory prefix for all nodes in the cluster
useMetadataServer bool
operationPollRateLimiter flowcontrol.RateLimiter
manager diskServiceManager
// Lock for access to nodeZones
nodeZonesLock sync.Mutex
// nodeZones is a mapping from Zone to a sets.String of Node's names in the Zone
// it is updated by the nodeInformer
nodeZones map[string]sets.String
nodeInformerSynced cache.InformerSynced
// sharedResourceLock is used to serialize GCE operations that may mutate shared state to
// prevent inconsistencies. For example, load balancers manipulation methods will take the
// lock to prevent shared resources from being prematurely deleted while the operation is
// in progress.
sharedResourceLock sync.Mutex
// AlphaFeatureGate gates gce alpha features in Cloud instance.
// Related wrapper functions that interacts with gce alpha api should examine whether
// the corresponding api is enabled.
// If not enabled, it should return error.
AlphaFeatureGate *AlphaFeatureGate
// New code generated interface to the GCE compute library.
c cloud.Cloud
// Keep a reference of this around so we can inject a new cloud.RateLimiter implementation.
s *cloud.Service
}
// ConfigGlobal is the in memory representation of the gce.conf config data
// TODO: replace gcfg with json
type ConfigGlobal struct {
TokenURL string `gcfg:"token-url"`
TokenBody string `gcfg:"token-body"`
// ProjectID and NetworkProjectID can either be the numeric or string-based
// unique identifier that starts with [a-z].
ProjectID string `gcfg:"project-id"`
// NetworkProjectID refers to the project which owns the network being used.
NetworkProjectID string `gcfg:"network-project-id"`
NetworkName string `gcfg:"network-name"`
SubnetworkName string `gcfg:"subnetwork-name"`
// SecondaryRangeName is the name of the secondary range to allocate IP
// aliases. The secondary range must be present on the subnetwork the
// cluster is attached to.
SecondaryRangeName string `gcfg:"secondary-range-name"`
NodeTags []string `gcfg:"node-tags"`
NodeInstancePrefix string `gcfg:"node-instance-prefix"`
Regional bool `gcfg:"regional"`
Multizone bool `gcfg:"multizone"`
// APIEndpoint is the GCE compute API endpoint to use. If this is blank,
// then the default endpoint is used.
APIEndpoint string `gcfg:"api-endpoint"`
// ContainerAPIEndpoint is the GCE container API endpoint to use. If this is blank,
// then the default endpoint is used.
ContainerAPIEndpoint string `gcfg:"container-api-endpoint"`
// LocalZone specifies the GCE zone that gce cloud client instance is
// located in (i.e. where the controller will be running). If this is
// blank, then the local zone will be discovered via the metadata server.
LocalZone string `gcfg:"local-zone"`
// Default to none.
// For example: MyFeatureFlag
AlphaFeatures []string `gcfg:"alpha-features"`
}
// ConfigFile is the struct used to parse the /etc/gce.conf configuration file.
type ConfigFile struct {
Global ConfigGlobal `gcfg:"global"`
}
// CloudConfig includes all the necessary configuration for creating Cloud
type CloudConfig struct {
APIEndpoint string
ContainerAPIEndpoint string
ProjectID string
NetworkProjectID string
Region string
Regional bool
Zone string
ManagedZones []string
NetworkName string
NetworkURL string
SubnetworkName string
SubnetworkURL string
SecondaryRangeName string
NodeTags []string
NodeInstancePrefix string
TokenSource oauth2.TokenSource
UseMetadataServer bool
AlphaFeatureGate *AlphaFeatureGate
}
func init() {
cloudprovider.RegisterCloudProvider(
ProviderName,
func(config io.Reader) (cloudprovider.Interface, error) {
return newGCECloud(config)
})
}
// Services is the set of all versions of the compute service.
type Services struct {
// GA, Alpha, Beta versions of the compute API.
GA *compute.Service
Alpha *computealpha.Service
Beta *computebeta.Service
}
// ComputeServices returns access to the internal compute services.
func (g *Cloud) ComputeServices() *Services {
return &Services{g.service, g.serviceAlpha, g.serviceBeta}
}
// Compute returns the generated stubs for the compute API.
func (g *Cloud) Compute() cloud.Cloud {
return g.c
}
// ContainerService returns the container service.
func (g *Cloud) ContainerService() *container.Service {
return g.containerService
}
// newGCECloud creates a new instance of Cloud.
func newGCECloud(config io.Reader) (gceCloud *Cloud, err error) {
var cloudConfig *CloudConfig
var configFile *ConfigFile
if config != nil {
configFile, err = readConfig(config)
if err != nil {
return nil, err
}
klog.Infof("Using GCE provider config %+v", configFile)
}
cloudConfig, err = generateCloudConfig(configFile)
if err != nil {
return nil, err
}
return CreateGCECloud(cloudConfig)
}
func readConfig(reader io.Reader) (*ConfigFile, error) {
cfg := &ConfigFile{}
if err := gcfg.FatalOnly(gcfg.ReadInto(cfg, reader)); err != nil {
klog.Errorf("Couldn't read config: %v", err)
return nil, err
}
return cfg, nil
}
func generateCloudConfig(configFile *ConfigFile) (cloudConfig *CloudConfig, err error) {
cloudConfig = &CloudConfig{}
// By default, fetch token from GCE metadata server
cloudConfig.TokenSource = google.ComputeTokenSource("")
cloudConfig.UseMetadataServer = true
cloudConfig.AlphaFeatureGate = NewAlphaFeatureGate([]string{})
if configFile != nil {
if configFile.Global.APIEndpoint != "" {
cloudConfig.APIEndpoint = configFile.Global.APIEndpoint
}
if configFile.Global.ContainerAPIEndpoint != "" {
cloudConfig.ContainerAPIEndpoint = configFile.Global.ContainerAPIEndpoint
}
if configFile.Global.TokenURL != "" {
// if tokenURL is nil, set tokenSource to nil. This will force the OAuth client to fall
// back to use DefaultTokenSource. This allows running gceCloud remotely.
if configFile.Global.TokenURL == "nil" {
cloudConfig.TokenSource = nil
} else {
cloudConfig.TokenSource = NewAltTokenSource(configFile.Global.TokenURL, configFile.Global.TokenBody)
}
}
cloudConfig.NodeTags = configFile.Global.NodeTags
cloudConfig.NodeInstancePrefix = configFile.Global.NodeInstancePrefix
cloudConfig.AlphaFeatureGate = NewAlphaFeatureGate(configFile.Global.AlphaFeatures)
}
// retrieve projectID and zone
if configFile == nil || configFile.Global.ProjectID == "" || configFile.Global.LocalZone == "" {
cloudConfig.ProjectID, cloudConfig.Zone, err = getProjectAndZone()
if err != nil {
return nil, err
}
}
if configFile != nil {
if configFile.Global.ProjectID != "" {
cloudConfig.ProjectID = configFile.Global.ProjectID
}
if configFile.Global.LocalZone != "" {
cloudConfig.Zone = configFile.Global.LocalZone<|fim▁hole|> }
// retrieve region
cloudConfig.Region, err = GetGCERegion(cloudConfig.Zone)
if err != nil {
return nil, err
}
// Determine if its a regional cluster
if configFile != nil && configFile.Global.Regional {
cloudConfig.Regional = true
}
// generate managedZones
cloudConfig.ManagedZones = []string{cloudConfig.Zone}
if configFile != nil && (configFile.Global.Multizone || configFile.Global.Regional) {
cloudConfig.ManagedZones = nil // Use all zones in region
}
// Determine if network parameter is URL or Name
if configFile != nil && configFile.Global.NetworkName != "" {
if strings.Contains(configFile.Global.NetworkName, "/") {
cloudConfig.NetworkURL = configFile.Global.NetworkName
} else {
cloudConfig.NetworkName = configFile.Global.NetworkName
}
} else {
cloudConfig.NetworkName, err = getNetworkNameViaMetadata()
if err != nil {
return nil, err
}
}
// Determine if subnetwork parameter is URL or Name
// If cluster is on a GCP network of mode=custom, then `SubnetName` must be specified in config file.
if configFile != nil && configFile.Global.SubnetworkName != "" {
if strings.Contains(configFile.Global.SubnetworkName, "/") {
cloudConfig.SubnetworkURL = configFile.Global.SubnetworkName
} else {
cloudConfig.SubnetworkName = configFile.Global.SubnetworkName
}
}
if configFile != nil {
cloudConfig.SecondaryRangeName = configFile.Global.SecondaryRangeName
}
return cloudConfig, err
}
// CreateGCECloud creates a Cloud object using the specified parameters.
// If no networkUrl is specified, loads networkName via rest call.
// If no tokenSource is specified, uses oauth2.DefaultTokenSource.
// If managedZones is nil / empty all zones in the region will be managed.
func CreateGCECloud(config *CloudConfig) (*Cloud, error) {
// Remove any pre-release version and build metadata from the semver,
// leaving only the MAJOR.MINOR.PATCH portion. See http://semver.org/.
version := strings.TrimLeft(strings.Split(strings.Split(version.Get().GitVersion, "-")[0], "+")[0], "v")
// Create a user-agent header append string to supply to the Google API
// clients, to identify Kubernetes as the origin of the GCP API calls.
userAgent := fmt.Sprintf("Kubernetes/%s (%s %s)", version, runtime.GOOS, runtime.GOARCH)
// Use ProjectID for NetworkProjectID, if it wasn't explicitly set.
if config.NetworkProjectID == "" {
config.NetworkProjectID = config.ProjectID
}
client, err := newOauthClient(config.TokenSource)
if err != nil {
return nil, err
}
service, err := compute.New(client)
if err != nil {
return nil, err
}
service.UserAgent = userAgent
client, err = newOauthClient(config.TokenSource)
if err != nil {
return nil, err
}
serviceBeta, err := computebeta.New(client)
if err != nil {
return nil, err
}
serviceBeta.UserAgent = userAgent
client, err = newOauthClient(config.TokenSource)
if err != nil {
return nil, err
}
serviceAlpha, err := computealpha.New(client)
if err != nil {
return nil, err
}
serviceAlpha.UserAgent = userAgent
// Expect override api endpoint to always be v1 api and follows the same pattern as prod.
// Generate alpha and beta api endpoints based on override v1 api endpoint.
// For example,
// staging API endpoint: https://www.googleapis.com/compute/staging_v1/
if config.APIEndpoint != "" {
service.BasePath = fmt.Sprintf("%sprojects/", config.APIEndpoint)
serviceBeta.BasePath = fmt.Sprintf("%sprojects/", strings.Replace(config.APIEndpoint, "v1", "beta", -1))
serviceAlpha.BasePath = fmt.Sprintf("%sprojects/", strings.Replace(config.APIEndpoint, "v1", "alpha", -1))
}
containerService, err := container.New(client)
if err != nil {
return nil, err
}
containerService.UserAgent = userAgent
if config.ContainerAPIEndpoint != "" {
containerService.BasePath = config.ContainerAPIEndpoint
}
tpuService, err := newTPUService(client)
if err != nil {
return nil, err
}
// ProjectID and.NetworkProjectID may be project number or name.
projID, netProjID := tryConvertToProjectNames(config.ProjectID, config.NetworkProjectID, service)
onXPN := projID != netProjID
var networkURL string
var subnetURL string
var isLegacyNetwork bool
if config.NetworkURL != "" {
networkURL = config.NetworkURL
} else if config.NetworkName != "" {
networkURL = gceNetworkURL(config.APIEndpoint, netProjID, config.NetworkName)
} else {
// Other consumers may use the cloudprovider without utilizing the wrapped GCE API functions
// or functions requiring network/subnetwork URLs (e.g. Kubelet).
klog.Warningf("No network name or URL specified.")
}
if config.SubnetworkURL != "" {
subnetURL = config.SubnetworkURL
} else if config.SubnetworkName != "" {
subnetURL = gceSubnetworkURL(config.APIEndpoint, netProjID, config.Region, config.SubnetworkName)
} else {
// Determine the type of network and attempt to discover the correct subnet for AUTO mode.
// Gracefully fail because kubelet calls CreateGCECloud without any config, and minions
// lack the proper credentials for API calls.
if networkName := lastComponent(networkURL); networkName != "" {
var n *compute.Network
if n, err = getNetwork(service, netProjID, networkName); err != nil {
klog.Warningf("Could not retrieve network %q; err: %v", networkName, err)
} else {
switch typeOfNetwork(n) {
case netTypeLegacy:
klog.Infof("Network %q is type legacy - no subnetwork", networkName)
isLegacyNetwork = true
case netTypeCustom:
klog.Warningf("Network %q is type custom - cannot auto select a subnetwork", networkName)
case netTypeAuto:
subnetURL, err = determineSubnetURL(service, netProjID, networkName, config.Region)
if err != nil {
klog.Warningf("Could not determine subnetwork for network %q and region %v; err: %v", networkName, config.Region, err)
} else {
klog.Infof("Auto selecting subnetwork %q", subnetURL)
}
}
}
}
}
if len(config.ManagedZones) == 0 {
config.ManagedZones, err = getZonesForRegion(service, config.ProjectID, config.Region)
if err != nil {
return nil, err
}
}
if len(config.ManagedZones) > 1 {
klog.Infof("managing multiple zones: %v", config.ManagedZones)
}
operationPollRateLimiter := flowcontrol.NewTokenBucketRateLimiter(5, 5) // 5 qps, 5 burst.
gce := &Cloud{
service: service,
serviceAlpha: serviceAlpha,
serviceBeta: serviceBeta,
containerService: containerService,
tpuService: tpuService,
projectID: projID,
networkProjectID: netProjID,
onXPN: onXPN,
region: config.Region,
regional: config.Regional,
localZone: config.Zone,
managedZones: config.ManagedZones,
networkURL: networkURL,
isLegacyNetwork: isLegacyNetwork,
subnetworkURL: subnetURL,
secondaryRangeName: config.SecondaryRangeName,
nodeTags: config.NodeTags,
nodeInstancePrefix: config.NodeInstancePrefix,
useMetadataServer: config.UseMetadataServer,
operationPollRateLimiter: operationPollRateLimiter,
AlphaFeatureGate: config.AlphaFeatureGate,
nodeZones: map[string]sets.String{},
}
gce.manager = &gceServiceManager{gce}
gce.s = &cloud.Service{
GA: service,
Alpha: serviceAlpha,
Beta: serviceBeta,
ProjectRouter: &gceProjectRouter{gce},
RateLimiter: &gceRateLimiter{gce},
}
gce.c = cloud.NewGCE(gce.s)
return gce, nil
}
// SetRateLimiter adds a custom cloud.RateLimiter implementation.
// WARNING: Calling this could have unexpected behavior if you have in-flight
// requests. It is best to use this immediately after creating a Cloud.
func (g *Cloud) SetRateLimiter(rl cloud.RateLimiter) {
if rl != nil {
g.s.RateLimiter = rl
}
}
// determineSubnetURL queries for all subnetworks in a region for a given network and returns
// the URL of the subnetwork which exists in the auto-subnet range.
func determineSubnetURL(service *compute.Service, networkProjectID, networkName, region string) (string, error) {
subnets, err := listSubnetworksOfNetwork(service, networkProjectID, networkName, region)
if err != nil {
return "", err
}
autoSubnets, err := subnetsInCIDR(subnets, autoSubnetIPRange)
if err != nil {
return "", err
}
if len(autoSubnets) == 0 {
return "", fmt.Errorf("no subnet exists in auto CIDR")
}
if len(autoSubnets) > 1 {
return "", fmt.Errorf("multiple subnetworks in the same region exist in auto CIDR")
}
return autoSubnets[0].SelfLink, nil
}
func tryConvertToProjectNames(configProject, configNetworkProject string, service *compute.Service) (projID, netProjID string) {
projID = configProject
if isProjectNumber(projID) {
projName, err := getProjectID(service, projID)
if err != nil {
klog.Warningf("Failed to retrieve project %v while trying to retrieve its name. err %v", projID, err)
} else {
projID = projName
}
}
netProjID = projID
if configNetworkProject != configProject {
netProjID = configNetworkProject
}
if isProjectNumber(netProjID) {
netProjName, err := getProjectID(service, netProjID)
if err != nil {
klog.Warningf("Failed to retrieve network project %v while trying to retrieve its name. err %v", netProjID, err)
} else {
netProjID = netProjName
}
}
return projID, netProjID
}
// Initialize takes in a clientBuilder and spawns a goroutine for watching the clusterid configmap.
// This must be called before utilizing the funcs of gce.ClusterID
func (g *Cloud) Initialize(clientBuilder cloudprovider.ControllerClientBuilder, stop <-chan struct{}) {
g.clientBuilder = clientBuilder
g.client = clientBuilder.ClientOrDie("cloud-provider")
if g.OnXPN() {
g.eventBroadcaster = record.NewBroadcaster()
g.eventBroadcaster.StartRecordingToSink(&v1core.EventSinkImpl{Interface: g.client.CoreV1().Events("")})
g.eventRecorder = g.eventBroadcaster.NewRecorder(scheme.Scheme, v1.EventSource{Component: "g-cloudprovider"})
}
go g.watchClusterID(stop)
}
// LoadBalancer returns an implementation of LoadBalancer for Google Compute Engine.
func (g *Cloud) LoadBalancer() (cloudprovider.LoadBalancer, bool) {
return g, true
}
// Instances returns an implementation of Instances for Google Compute Engine.
func (g *Cloud) Instances() (cloudprovider.Instances, bool) {
return g, true
}
// Zones returns an implementation of Zones for Google Compute Engine.
func (g *Cloud) Zones() (cloudprovider.Zones, bool) {
return g, true
}
// Clusters returns an implementation of Clusters for Google Compute Engine.
func (g *Cloud) Clusters() (cloudprovider.Clusters, bool) {
return g, true
}
// Routes returns an implementation of Routes for Google Compute Engine.
func (g *Cloud) Routes() (cloudprovider.Routes, bool) {
return g, true
}
// ProviderName returns the cloud provider ID.
func (g *Cloud) ProviderName() string {
return ProviderName
}
// ProjectID returns the ProjectID corresponding to the project this cloud is in.
func (g *Cloud) ProjectID() string {
return g.projectID
}
// NetworkProjectID returns the ProjectID corresponding to the project this cluster's network is in.
func (g *Cloud) NetworkProjectID() string {
return g.networkProjectID
}
// Region returns the region
func (g *Cloud) Region() string {
return g.region
}
// OnXPN returns true if the cluster is running on a cross project network (XPN)
func (g *Cloud) OnXPN() bool {
return g.onXPN
}
// NetworkURL returns the network url
func (g *Cloud) NetworkURL() string {
return g.networkURL
}
// SubnetworkURL returns the subnetwork url
func (g *Cloud) SubnetworkURL() string {
return g.subnetworkURL
}
// IsLegacyNetwork returns true if the cluster is still running a legacy network configuration.
func (g *Cloud) IsLegacyNetwork() bool {
return g.isLegacyNetwork
}
// SetInformers sets up the zone handlers we need watching for node changes.
func (g *Cloud) SetInformers(informerFactory informers.SharedInformerFactory) {
klog.Infof("Setting up informers for Cloud")
nodeInformer := informerFactory.Core().V1().Nodes().Informer()
nodeInformer.AddEventHandler(cache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
node := obj.(*v1.Node)
g.updateNodeZones(nil, node)
},
UpdateFunc: func(prev, obj interface{}) {
prevNode := prev.(*v1.Node)
newNode := obj.(*v1.Node)
if newNode.Labels[kubeletapis.LabelZoneFailureDomain] ==
prevNode.Labels[kubeletapis.LabelZoneFailureDomain] {
return
}
g.updateNodeZones(prevNode, newNode)
},
DeleteFunc: func(obj interface{}) {
node, isNode := obj.(*v1.Node)
// We can get DeletedFinalStateUnknown instead of *v1.Node here
// and we need to handle that correctly.
if !isNode {
deletedState, ok := obj.(cache.DeletedFinalStateUnknown)
if !ok {
klog.Errorf("Received unexpected object: %v", obj)
return
}
node, ok = deletedState.Obj.(*v1.Node)
if !ok {
klog.Errorf("DeletedFinalStateUnknown contained non-Node object: %v", deletedState.Obj)
return
}
}
g.updateNodeZones(node, nil)
},
})
g.nodeInformerSynced = nodeInformer.HasSynced
}
func (g *Cloud) updateNodeZones(prevNode, newNode *v1.Node) {
g.nodeZonesLock.Lock()
defer g.nodeZonesLock.Unlock()
if prevNode != nil {
prevZone, ok := prevNode.ObjectMeta.Labels[kubeletapis.LabelZoneFailureDomain]
if ok {
g.nodeZones[prevZone].Delete(prevNode.ObjectMeta.Name)
if g.nodeZones[prevZone].Len() == 0 {
g.nodeZones[prevZone] = nil
}
}
}
if newNode != nil {
newZone, ok := newNode.ObjectMeta.Labels[kubeletapis.LabelZoneFailureDomain]
if ok {
if g.nodeZones[newZone] == nil {
g.nodeZones[newZone] = sets.NewString()
}
g.nodeZones[newZone].Insert(newNode.ObjectMeta.Name)
}
}
}
// HasClusterID returns true if the cluster has a clusterID
func (g *Cloud) HasClusterID() bool {
return true
}
// Project IDs cannot have a digit for the first characeter. If the id contains a digit,
// then it must be a project number.
func isProjectNumber(idOrNumber string) bool {
_, err := strconv.ParseUint(idOrNumber, 10, 64)
return err == nil
}
func gceNetworkURL(apiEndpoint, project, network string) string {
if apiEndpoint == "" {
apiEndpoint = gceComputeAPIEndpoint
}
return apiEndpoint + strings.Join([]string{"projects", project, "global", "networks", network}, "/")
}
func gceSubnetworkURL(apiEndpoint, project, region, subnetwork string) string {
if apiEndpoint == "" {
apiEndpoint = gceComputeAPIEndpoint
}
return apiEndpoint + strings.Join([]string{"projects", project, "regions", region, "subnetworks", subnetwork}, "/")
}
// getRegionInURL parses full resource URLS and shorter URLS
// https://www.googleapis.com/compute/v1/projects/myproject/regions/us-central1/subnetworks/a
// projects/myproject/regions/us-central1/subnetworks/a
// All return "us-central1"
func getRegionInURL(urlStr string) string {
fields := strings.Split(urlStr, "/")
for i, v := range fields {
if v == "regions" && i < len(fields)-1 {
return fields[i+1]
}
}
return ""
}
func getNetworkNameViaMetadata() (string, error) {
result, err := metadata.Get("instance/network-interfaces/0/network")
if err != nil {
return "", err
}
parts := strings.Split(result, "/")
if len(parts) != 4 {
return "", fmt.Errorf("unexpected response: %s", result)
}
return parts[3], nil
}
// getNetwork returns a GCP network
func getNetwork(svc *compute.Service, networkProjectID, networkID string) (*compute.Network, error) {
return svc.Networks.Get(networkProjectID, networkID).Do()
}
// listSubnetworksOfNetwork returns a list of subnetworks for a particular region of a network.
func listSubnetworksOfNetwork(svc *compute.Service, networkProjectID, networkID, region string) ([]*compute.Subnetwork, error) {
var subnets []*compute.Subnetwork
err := svc.Subnetworks.List(networkProjectID, region).Filter(fmt.Sprintf("network eq .*/%v$", networkID)).Pages(context.Background(), func(res *compute.SubnetworkList) error {
subnets = append(subnets, res.Items...)
return nil
})
return subnets, err
}
// getProjectID returns the project's string ID given a project number or string
func getProjectID(svc *compute.Service, projectNumberOrID string) (string, error) {
proj, err := svc.Projects.Get(projectNumberOrID).Do()
if err != nil {
return "", err
}
return proj.Name, nil
}
func getZonesForRegion(svc *compute.Service, projectID, region string) ([]string, error) {
// TODO: use PageToken to list all not just the first 500
listCall := svc.Zones.List(projectID)
// Filtering by region doesn't seem to work
// (tested in https://cloud.google.com/compute/docs/reference/latest/zones/list)
// listCall = listCall.Filter("region eq " + region)
res, err := listCall.Do()
if err != nil {
return nil, fmt.Errorf("unexpected response listing zones: %v", err)
}
zones := []string{}
for _, zone := range res.Items {
regionName := lastComponent(zone.Region)
if regionName == region {
zones = append(zones, zone.Name)
}
}
return zones, nil
}
func findSubnetForRegion(subnetURLs []string, region string) string {
for _, url := range subnetURLs {
if thisRegion := getRegionInURL(url); thisRegion == region {
return url
}
}
return ""
}
func newOauthClient(tokenSource oauth2.TokenSource) (*http.Client, error) {
if tokenSource == nil {
var err error
tokenSource, err = google.DefaultTokenSource(
oauth2.NoContext,
compute.CloudPlatformScope,
compute.ComputeScope)
klog.Infof("Using DefaultTokenSource %#v", tokenSource)
if err != nil {
return nil, err
}
} else {
klog.Infof("Using existing Token Source %#v", tokenSource)
}
backoff := wait.Backoff{
// These values will add up to about a minute. See #56293 for background.
Duration: time.Second,
Factor: 1.4,
Steps: 10,
}
if err := wait.ExponentialBackoff(backoff, func() (bool, error) {
if _, err := tokenSource.Token(); err != nil {
klog.Errorf("error fetching initial token: %v", err)
return false, nil
}
return true, nil
}); err != nil {
return nil, err
}
return oauth2.NewClient(oauth2.NoContext, tokenSource), nil
}
func (manager *gceServiceManager) getProjectsAPIEndpoint() string {
projectsAPIEndpoint := gceComputeAPIEndpoint + "projects/"
if manager.gce.service != nil {
projectsAPIEndpoint = manager.gce.service.BasePath
}
return projectsAPIEndpoint
}
func (manager *gceServiceManager) getProjectsAPIEndpointBeta() string {
projectsAPIEndpoint := gceComputeAPIEndpointBeta + "projects/"
if manager.gce.service != nil {
projectsAPIEndpoint = manager.gce.serviceBeta.BasePath
}
return projectsAPIEndpoint
}<|fim▁end|> | }
if configFile.Global.NetworkProjectID != "" {
cloudConfig.NetworkProjectID = configFile.Global.NetworkProjectID
} |
<|file_name|>codeReference.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
""" create rst files for documentation of DIRAC """
import os
import shutil
import socket
import sys
import logging
import glob
from diracdoctools.Utilities import writeLinesToFile, mkdir, makeLogger
from diracdoctools.Config import Configuration, CLParser as clparser
LOG = makeLogger('CodeReference')
# global used inside the CustomizedDocs modules
CUSTOMIZED_DOCSTRINGS = {}
class CLParser(clparser):
"""Extension to CLParser to also parse buildType."""
def __init__(self):
super(CLParser, self).__init__()
self.log = LOG.getChild('CLParser')
self.clean = False
self.parser.add_argument('--buildType', action='store', default='full',
choices=['full', 'limited'],
help='Build full or limited code reference',
)
self.parser.add_argument('--clean', action='store_true',
help='Remove rst files and exit',
)
def parse(self):
super(CLParser, self).parse()
self.log.info('Parsing options')
self.buildType = self.parsed.buildType
self.clean = self.parsed.clean
def optionDict(self):
oDict = super(CLParser, self).optionDict()
oDict['buildType'] = self.buildType
oDict['clean'] = self.clean
return oDict
class CodeReference(object):
"""Module to create rst files containing autodoc for sphinx."""
def __init__(self, configFile='docs.conf'):
self.config = Configuration(configFile, sections=['Code'])
self.orgWorkingDir = os.getcwd()
def end(self):
"""Make sure we are back in the original working directory."""
LOG.info('Done with creating code reference')
os.chdir(self.orgWorkingDir)
def getCustomDocs(self):
"""Import the dynamically created docstrings from the files in CustomizedDocs.
Use 'exec' to avoid a lot of relative import, pylint errors, etc.
"""
customizedPath = os.path.join(self.config.code_customDocsPath, '*.py')
LOG.info('Looking for custom strings in %s', customizedPath)
for filename in glob.glob(customizedPath):
LOG.info('Found customization: %s', filename)
exec(open(filename).read(), globals()) # pylint: disable=exec-used
def mkPackageRst(self, filename, modulename, fullmodulename, subpackages=None, modules=None):
"""Make a rst file for module containing other modules."""
if modulename == 'scripts':
return
else:<|fim▁hole|> lines = []
lines.append('%s' % modulefinal)
lines.append('=' * len(modulefinal))
lines.append('.. module:: %s ' % fullmodulename)
lines.append('')
if subpackages or modules:
lines.append('.. toctree::')
lines.append(' :maxdepth: 1')
lines.append('')
subpackages = [s for s in subpackages if not s.endswith(('scripts', ))]
if subpackages:
LOG.info('Module %r with subpackages: %r', fullmodulename, ', '.join(subpackages))
lines.append('SubPackages')
lines.append('...........')
lines.append('')
lines.append('.. toctree::')
lines.append(' :maxdepth: 1')
lines.append('')
for package in sorted(subpackages):
lines.append(' %s/%s_Module.rst' % (package, package.split('/')[-1]))
lines.append('')
# remove CLI etc. because we drop them earlier
modules = [m for m in modules if not m.endswith('CLI') and '-' not in m]
if modules:
lines.append('Modules')
lines.append('.......')
lines.append('')
lines.append('.. toctree::')
lines.append(' :maxdepth: 1')
lines.append('')
for module in sorted(modules):
lines.append(' %s.rst' % (module.split('/')[-1],))
lines.append('')
writeLinesToFile(filename, lines)
def mkDummyRest(self, classname, _fullclassname):
"""Create a dummy rst file for files that behave badly."""
filename = classname + '.rst'
lines = []
lines.append('%s' % classname)
lines.append('=' * len(classname))
lines.append('')
lines.append(' This is an empty file, because we cannot parse this file correctly or it causes problems')
lines.append(' , please look at the source code directly')
writeLinesToFile(filename, lines)
def mkModuleRst(self, classname, fullclassname, buildtype='full'):
"""Create rst file for module."""
LOG.info('Creating rst file for %r, aka %r', classname, fullclassname)
filename = classname + '.rst'
lines = []
lines.append('%s' % classname)
lines.append('=' * len(classname))
lines.append('.. automodule:: %s' % fullclassname)
if buildtype == 'full':
lines.append(' :members:')
if classname not in self.config.code_noInherited:
lines.append(' :inherited-members:')
lines.append(' :undoc-members:')
lines.append(' :show-inheritance:')
if classname in self.config.code_privateMembers:
lines.append(' :special-members:')
lines.append(' :private-members:')
else:
lines.append(' :special-members: __init__')
if classname.startswith('_'):
lines.append(' :private-members:')
if fullclassname in CUSTOMIZED_DOCSTRINGS:
ds = CUSTOMIZED_DOCSTRINGS[fullclassname]
if ds.replace:
lines = ds.doc_string
else:
lines.append(ds.doc_string)
writeLinesToFile(filename, lines)
def getsubpackages(self, abspath, direc):
"""return list of subpackages with full path"""
packages = []
for dire in direc:
if dire.lower() == 'test' or dire.lower() == 'tests' or '/test' in dire.lower():
LOG.debug('Skipping test directory: %s/%s', abspath, dire)
continue
if dire.lower() == 'docs' or '/docs' in dire.lower():
LOG.debug('Skipping docs directory: %s/%s', abspath, dire)
continue
if os.path.exists(os.path.join(self.config.sourcePath, abspath, dire, '__init__.py')):
packages.append(os.path.join(dire))
return packages
def getmodules(self, abspath, _direc, files):
"""Return list of subpackages with full path."""
packages = []
for filename in files:
if filename.lower().startswith('test') or filename.lower().endswith('test') or \
any(f.lower() in filename.lower() for f in self.config.code_ignoreFiles):
LOG.debug('Skipping file: %s/%s', abspath, filename)
continue
if 'test' in filename.lower():
LOG.warn("File contains 'test', but is kept: %s/%s", abspath, filename)
if filename != '__init__.py':
packages.append(filename.split('.py')[0])
return packages
def cleanDoc(self):
"""Remove the code output folder."""
LOG.info('Removing existing code documentation: %r', self.config.code_targetPath)
if os.path.exists(self.config.code_targetPath):
shutil.rmtree(self.config.code_targetPath)
def createDoc(self, buildtype="full"):
"""create the rst files for all the things we want them for"""
LOG.info('self.config.sourcePath: %s', self.config.sourcePath)
LOG.info('self.config.targetPath: %s', self.config.code_targetPath)
LOG.info('Host: %s', socket.gethostname())
# we need to replace existing rst files so we can decide how much code-doc to create
if os.path.exists(self.config.code_targetPath) and os.environ.get('READTHEDOCS', 'False') == 'True':
self.cleanDoc()
mkdir(self.config.code_targetPath)
os.chdir(self.config.code_targetPath)
self.getCustomDocs()
LOG.info('Now creating rst files: starting in %r', self.config.sourcePath)
firstModule = True
for root, direc, files in os.walk(self.config.sourcePath):
configTemplate = [os.path.join(root, _) for _ in files if _ == 'ConfigTemplate.cfg']
files = [_ for _ in files if _.endswith('.py')]
if '__init__.py' not in files:
continue
elif any(f.lower() in root.lower() for f in self.config.code_ignoreFolders):
LOG.debug('Skipping folder: %s', root)
continue
modulename = root.split('/')[-1].strip('.')
codePath = root.split(self.config.sourcePath)[1].strip('/.')
docPath = codePath
if docPath.startswith(self.config.moduleName):
docPath = docPath[len(self.config.moduleName) + 1:]
fullmodulename = '.'.join(codePath.split('/')).strip('.')
if not fullmodulename.startswith(self.config.moduleName):
fullmodulename = ('.'.join([self.config.moduleName, fullmodulename])).strip('.')
packages = self.getsubpackages(codePath, direc)
if docPath:
LOG.debug('Trying to create folder: %s', docPath)
mkdir(docPath)
os.chdir(docPath)
if firstModule:
firstModule = False
self.createCodeDocIndex(
subpackages=packages,
modules=self.getmodules(
codePath,
direc,
files),
buildtype=buildtype)
elif buildtype == 'limited':
os.chdir(self.config.code_targetPath)
return 0
else:
self.mkPackageRst(
modulename + '_Module.rst',
modulename,
fullmodulename,
subpackages=packages,
modules=self.getmodules(
docPath,
direc,
files))
for filename in files:
# Skip things that call parseCommandLine or similar issues
fullclassname = '.'.join(docPath.split('/') + [filename])
if any(f in filename for f in self.config.code_dummyFiles):
LOG.debug('Creating dummy for file %r', filename)
self.mkDummyRest(filename.split('.py')[0], fullclassname.split('.py')[0])
continue
elif not filename.endswith('.py') or \
filename.endswith('CLI.py') or \
filename.lower().startswith('test') or \
filename == '__init__.py' or \
any(f in filename for f in self.config.code_ignoreFiles) or \
'-' in filename: # not valid python identifier, e.g. dirac-pilot
LOG.debug('Ignoring file %r', filename)
continue
if not fullclassname.startswith(self.config.moduleName):
fullclassname = '.'.join([self.config.moduleName, fullclassname])
self.mkModuleRst(filename.split('.py')[0], fullclassname.split('.py')[0], buildtype)
# copy configTemplate files to code doc so we can import them in the agent docstrings
if configTemplate:
shutil.copy(configTemplate[0], os.path.join(self.config.code_targetPath, docPath))
os.chdir(self.config.code_targetPath)
return 0
def createCodeDocIndex(self, subpackages, modules, buildtype="full"):
"""create the main index file"""
LOG.info('Creating base index file')
filename = 'index.rst'
lines = []
lines.append('.. _code_documentation:')
lines.append('')
lines.append('Code Documentation (|release|)')
lines.append('------------------------------')
# for limited builds we only create the most basic code documentation so
# we let users know there is more elsewhere
if buildtype == 'limited':
lines.append('')
lines.append('.. warning::')
lines.append(
' This a limited build of the code documentation, for the full code documentation '
'please look at the website')
lines.append('')
else:
if subpackages or modules:
lines.append('.. toctree::')
lines.append(' :maxdepth: 1')
lines.append('')
if subpackages:
systemPackages = sorted([pck for pck in subpackages if pck.endswith('System')])
otherPackages = sorted([pck for pck in subpackages if not pck.endswith('System')])
lines.append('=======')
lines.append('Systems')
lines.append('=======')
lines.append('')
lines.append('.. toctree::')
lines.append(' :maxdepth: 1')
lines.append('')
for package in systemPackages:
lines.append(' %s/%s_Module.rst' % (package, package.split('/')[-1]))
lines.append('')
lines.append('=====')
lines.append('Other')
lines.append('=====')
lines.append('')
lines.append('.. toctree::')
lines.append(' :maxdepth: 1')
lines.append('')
for package in otherPackages:
lines.append(' %s/%s_Module.rst' % (package, package.split('/')[-1]))
if modules:
for module in sorted(modules):
lines.append(' %s.rst' % (module.split('/')[-1],))
writeLinesToFile(filename, lines)
def checkBuildTypeAndRun(self, buildType='full'):
"""Check for input argument and then create the doc rst files."""
buildTypes = ('full', 'limited')
if buildType not in buildTypes:
LOG.error('Unknown build type: %s use %s ', buildType, ' '.join(buildTypes))
return 1
LOG.info('Buildtype: %s', buildType)
return self.createDoc(buildType)
def run(configFile='docs.conf', logLevel=logging.INFO, debug=False, buildType='full', clean=False):
"""Create the code reference.
:param str configFile: path to the configFile
:param logLevel: logging level to use
:param bool debug: if true even more debug information is printed
:param str buildType: 'full' or 'limited', use limited only when memory is limited
:param bool clean: Remove rst files and exit
:returns: return value 1 or 0
"""
logging.getLogger().setLevel(logLevel)
code = CodeReference(configFile=configFile)
if clean:
code.cleanDoc()
return 0
retVal = code.checkBuildTypeAndRun(buildType=buildType)
code.end()
return retVal
if __name__ == '__main__':
sys.exit(run(**(CLParser().optionDict())))<|fim▁end|> | modulefinal = modulename
|
<|file_name|>expedia.py<|end_file_name|><|fim▁begin|>from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
# Configure the baseURL
baseUrl = "https://www.expedia.es"
# Create a webDriver instance and maximize window
driver = webdriver.Firefox()
driver.maximize_window()
# Navigage to URL and put a 10 seconds implicit wait
driver.get(baseUrl)
driver.implicitly_wait(10)
# Find and click on element "Flights"
# Find departure textbox and type "Barcelona"
# Find departure textbox and type "Madrid"
# Find departure time and type "23/11/2017"<|fim▁hole|># Close Calendar
# Find the "Find" button and click on
# Quit driver<|fim▁end|> | |
<|file_name|>filter_transcript_counts.py<|end_file_name|><|fim▁begin|><|fim▁hole|>print "Gene\tTranscript\tExpression"
for l in sys.stdin:
t = l.strip().split('\t')
if float(t[2]) > 1.1:
print '\t'.join(t[0:3])<|fim▁end|> | # python filter_transcript_counts.py < transcript_counts.txt > active_transcripts.txt
import sys
|
<|file_name|>identity.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name,unused-argument
"""Tensor Expression for identity"""
from tvm import te
from .dma import read_compute, write_compute
def identity_compute(
ifm: te.Tensor,
lut: te.Tensor,
ifm_scale: float,
ifm_zero_point: int,
ofm_scale: float,
ofm_zero_point: int,
activation: str,
) -> te.Tensor:
"""A compute operator for the NPU identity operator.
Parameters
----------
ifm : te.Tensor
The Input Feature Map tensor (IFM).
lut : te.Tensor
The look-up table values to use if activation is "LUT", "TANH" or "SIGMOID".
ifm_scale : float
The quantization scale for the Input Feature Map tensor.
ifm_zero_point : int
The quantization zero point for the Input Feature Map tensor.
ofm_scale : float
The quantization scale for the Output Feature Map tensor.
ofm_zero_point : int
The quantization zero point for the Output Feature Map tensor.
activation : str
The activation function to use.<|fim▁hole|> "LUT" - use a look-up table to perform the activation function.
Returns
-------
te.Tensor
The Output Feature Map tensor.
"""
dmaed_ifm = read_compute(ifm, ifm_zero_point, ifm_scale)
id_attrs = {"op": "ethosu_identity", "activation": activation}
has_lut = activation in ("TANH", "LUT", "SIGMOID")
# This is a trick to insert the LUT tensor into the TE graph if LUT is present
lut_expr = (lut[0] + lut[255]).astype(ifm.dtype) if has_lut else 0
# Add the LUT tensor to the attributes to be able to later tell which tensor is the LUT
if has_lut:
id_attrs["lut"] = lut
identity = te.compute(
ifm.shape,
lambda *i: (dmaed_ifm(*i) + lut_expr).astype(ifm.dtype),
name="ethosu_identity",
attrs=id_attrs,
)
dmaed_ofm = write_compute(identity, ofm_zero_point, ofm_scale)
return dmaed_ofm<|fim▁end|> | "NONE" - no activation function.
"TANH" - tanh activation function.
"SIGMOID" - sigmoid activation function. |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015-2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################<|fim▁hole|><|fim▁end|> | from . import account_analytic_attribution
from . import account_analytic_distribution_line |
<|file_name|>reflect.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""<|fim▁hole|>"""
import operator
import string
from optparse import OptionParser
from sqlalchemy import create_engine, MetaData
from tsadisplay import describe, render, __version__
def run():
"""Command for reflection database objects"""
parser = OptionParser(
version=__version__, description=__doc__,
)
parser.add_option(
'-u', '--url', dest='url',
help='Database URL (connection string)',
)
parser.add_option(
'-r', '--render', dest='render', default='dot',
choices=['plantuml', 'dot'],
help='Output format - plantuml or dot',
)
parser.add_option(
'-l', '--list', dest='list', action='store_true',
help='Output database list of tables and exit',
)
parser.add_option(
'-i', '--include', dest='include',
help='List of tables to include through ","',
)
parser.add_option(
'-e', '--exclude', dest='exclude',
help='List of tables to exlude through ","',
)
(options, args) = parser.parse_args()
if not options.url:
print('-u/--url option required')
exit(1)
engine = create_engine(options.url)
meta = MetaData()
meta.reflect(bind=engine)
if options.list:
print('Database tables:')
tables = sorted(meta.tables.keys())
def _g(l, i):
try:
return tables[i]
except IndexError:
return ''
for i in range(0, len(tables), 2):
print(' {0}{1}{2}'.format(
_g(tables, i),
' ' * (38 - len(_g(tables, i))),
_g(tables, i + 1),
))
exit(0)
tables = set(meta.tables.keys())
if options.include:
tables &= set(map(string.strip, options.include.split(',')))
if options.exclude:
tables -= set(map(string.strip, options.exclude.split(',')))
desc = describe(map(lambda x: operator.getitem(meta.tables, x), tables))
print(getattr(render, options.render)(desc))<|fim▁end|> | Program for generating plantuml or dot format
of a database tables by connection string
\n\nDatabase connection string - http://goo.gl/3GpnE |
<|file_name|>verification.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Block and transaction verification functions
//!
//! Block verification is done in 3 steps
//! 1. Quick verification upon adding to the block queue
//! 2. Signatures verification done in the queue.
//! 3. Final verification against the blockchain done before enactment.
use util::*;
use engines::Engine;
use error::{BlockError, Error};
use blockchain::*;
use header::{BlockNumber, Header};
use rlp::{UntrustedRlp, View};
use transaction::SignedTransaction;
use views::BlockView;
use time::get_time;
/// Preprocessed block data gathered in `verify_block_unordered` call
pub struct PreverifiedBlock {
/// Populated block header
pub header: Header,
/// Populated block transactions
pub transactions: Vec<SignedTransaction>,
/// Block bytes
pub bytes: Bytes,
}
impl HeapSizeOf for PreverifiedBlock {
fn heap_size_of_children(&self) -> usize {
self.header.heap_size_of_children()
+ self.transactions.heap_size_of_children()
+ self.bytes.heap_size_of_children()
}
}
/// Phase 1 quick block verification. Only does checks that are cheap. Operates on a single block
pub fn verify_block_basic(header: &Header, bytes: &[u8], engine: &Engine) -> Result<(), Error> {
try!(verify_header_params(&header, engine));
try!(verify_block_integrity(bytes, &header.transactions_root(), &header.uncles_hash()));
try!(engine.verify_block_basic(&header, Some(bytes)));
for u in try!(UntrustedRlp::new(bytes).at(2)).iter().map(|rlp| rlp.as_val::<Header>()) {
let u = try!(u);
try!(verify_header_params(&u, engine));
try!(engine.verify_block_basic(&u, None));
}
// Verify transactions.
// TODO: either use transaction views or cache the decoded transactions.
let v = BlockView::new(bytes);
for t in v.transactions() {
try!(engine.verify_transaction_basic(&t, &header));
}
Ok(())
}
/// Phase 2 verification. Perform costly checks such as transaction signatures and block nonce for ethash.
/// Still operates on a individual block
/// Returns a `PreverifiedBlock` structure populated with transactions
pub fn verify_block_unordered(header: Header, bytes: Bytes, engine: &Engine, check_seal: bool) -> Result<PreverifiedBlock, Error> {
if check_seal {
try!(engine.verify_block_unordered(&header, Some(&bytes)));
for u in try!(UntrustedRlp::new(&bytes).at(2)).iter().map(|rlp| rlp.as_val::<Header>()) {
try!(engine.verify_block_unordered(&try!(u), None));
}
}
// Verify transactions.
let mut transactions = Vec::new();
{
let v = BlockView::new(&bytes);
for t in v.transactions() {
try!(engine.verify_transaction(&t, &header));
transactions.push(t);
}
}
Ok(PreverifiedBlock {
header: header,
transactions: transactions,
bytes: bytes,
})
}
/// Phase 3 verification. Check block information against parent and uncles.
pub fn verify_block_family(header: &Header, bytes: &[u8], engine: &Engine, bc: &BlockProvider) -> Result<(), Error> {
// TODO: verify timestamp
let parent = try!(bc.block_header(&header.parent_hash()).ok_or_else(|| Error::from(BlockError::UnknownParent(header.parent_hash().clone()))));
try!(verify_parent(&header, &parent));
try!(engine.verify_block_family(&header, &parent, Some(bytes)));
let num_uncles = try!(UntrustedRlp::new(bytes).at(2)).item_count();
if num_uncles != 0 {
if num_uncles > engine.maximum_uncle_count() {
return Err(From::from(BlockError::TooManyUncles(OutOfBounds { min: None, max: Some(engine.maximum_uncle_count()), found: num_uncles })));
}
let mut excluded = HashSet::new();
excluded.insert(header.hash());
let mut hash = header.parent_hash().clone();
excluded.insert(hash.clone());
for _ in 0..engine.maximum_uncle_age() {
match bc.block_details(&hash) {
Some(details) => {
excluded.insert(details.parent.clone());
let b = bc.block(&hash)
.expect("parent already known to be stored; qed");
excluded.extend(BlockView::new(&b).uncle_hashes());
hash = details.parent;
}
None => break
}
}
for uncle in try!(UntrustedRlp::new(bytes).at(2)).iter().map(|rlp| rlp.as_val::<Header>()) {
let uncle = try!(uncle);
if excluded.contains(&uncle.hash()) {
return Err(From::from(BlockError::UncleInChain(uncle.hash())))
}
// m_currentBlock.number() - uncle.number() m_cB.n - uP.n()
// 1 2
// 2
// 3
// 4
// 5
// 6 7
// (8 Invalid)
let depth = if header.number() > uncle.number() { header.number() - uncle.number() } else { 0 };
if depth > engine.maximum_uncle_age() as u64 {
return Err(From::from(BlockError::UncleTooOld(OutOfBounds { min: Some(header.number() - depth), max: Some(header.number() - 1), found: uncle.number() })));
}
else if depth < 1 {
return Err(From::from(BlockError::UncleIsBrother(OutOfBounds { min: Some(header.number() - depth), max: Some(header.number() - 1), found: uncle.number() })));
}
// cB
// cB.p^1 1 depth, valid uncle
// cB.p^2 ---/ 2
// cB.p^3 -----/ 3
// cB.p^4 -------/ 4
// cB.p^5 ---------/ 5
// cB.p^6 -----------/ 6
// cB.p^7 -------------/
// cB.p^8
let mut expected_uncle_parent = header.parent_hash().clone();
let uncle_parent = try!(bc.block_header(&uncle.parent_hash()).ok_or_else(|| Error::from(BlockError::UnknownUncleParent(uncle.parent_hash().clone()))));
for _ in 0..depth {
match bc.block_details(&expected_uncle_parent) {
Some(details) => {
expected_uncle_parent = details.parent;
},
None => break
}
}
if expected_uncle_parent != uncle_parent.hash() {
return Err(From::from(BlockError::UncleParentNotInChain(uncle_parent.hash())));
}
try!(verify_parent(&uncle, &uncle_parent));
try!(engine.verify_block_family(&uncle, &uncle_parent, Some(bytes)));
}
}
Ok(())
}
/// Phase 4 verification. Check block information against transaction enactment results,
pub fn verify_block_final(expected: &Header, got: &Header) -> Result<(), Error> {
if expected.gas_used() != got.gas_used() {
return Err(From::from(BlockError::InvalidGasUsed(Mismatch { expected: expected.gas_used().clone(), found: got.gas_used().clone() })))
}
if expected.log_bloom() != got.log_bloom() {
return Err(From::from(BlockError::InvalidLogBloom(Mismatch { expected: expected.log_bloom().clone(), found: got.log_bloom().clone() })))
}
if expected.state_root() != got.state_root() {
return Err(From::from(BlockError::InvalidStateRoot(Mismatch { expected: expected.state_root().clone(), found: got.state_root().clone() })))
}
if expected.receipts_root() != got.receipts_root() {
return Err(From::from(BlockError::InvalidReceiptsRoot(Mismatch { expected: expected.receipts_root().clone(), found: got.receipts_root().clone() })))
}
Ok(())
}
/// Check basic header parameters.
pub fn verify_header_params(header: &Header, engine: &Engine) -> Result<(), Error> {
if header.number() >= From::from(BlockNumber::max_value()) {
return Err(From::from(BlockError::RidiculousNumber(OutOfBounds { max: Some(From::from(BlockNumber::max_value())), min: None, found: header.number() })))
}
if header.gas_used() > header.gas_limit() {
return Err(From::from(BlockError::TooMuchGasUsed(OutOfBounds { max: Some(header.gas_limit().clone()), min: None, found: header.gas_used().clone() })));
}
let min_gas_limit = engine.params().min_gas_limit;
if header.gas_limit() < &min_gas_limit {
return Err(From::from(BlockError::InvalidGasLimit(OutOfBounds { min: Some(min_gas_limit), max: None, found: header.gas_limit().clone() })));
}
let maximum_extra_data_size = engine.maximum_extra_data_size();
if header.number() != 0 && header.extra_data().len() > maximum_extra_data_size {
return Err(From::from(BlockError::ExtraDataOutOfBounds(OutOfBounds { min: None, max: Some(maximum_extra_data_size), found: header.extra_data().len() })));
}
let max_time = get_time().sec as u64 + 30;
if header.timestamp() > max_time {
return Err(From::from(BlockError::InvalidTimestamp(OutOfBounds { max: Some(max_time), min: None, found: header.timestamp() })))
}
Ok(())
}
/// Check header parameters agains parent header.
fn verify_parent(header: &Header, parent: &Header) -> Result<(), Error> {
if !header.parent_hash().is_zero() && &parent.hash() != header.parent_hash() {
return Err(From::from(BlockError::InvalidParentHash(Mismatch { expected: parent.hash(), found: header.parent_hash().clone() })))
}
if header.timestamp() <= parent.timestamp() {
return Err(From::from(BlockError::InvalidTimestamp(OutOfBounds { max: None, min: Some(parent.timestamp() + 1), found: header.timestamp() })))
}
if header.number() != parent.number() + 1 {
return Err(From::from(BlockError::InvalidNumber(Mismatch { expected: parent.number() + 1, found: header.number() })));
}
Ok(())
}
/// Verify block data against header: transactions root and uncles hash.
fn verify_block_integrity(block: &[u8], transactions_root: &H256, uncles_hash: &H256) -> Result<(), Error> {
let block = UntrustedRlp::new(block);
let tx = try!(block.at(1));
let expected_root = &ordered_trie_root(tx.iter().map(|r| r.as_raw().to_vec())); //TODO: get rid of vectors here
if expected_root != transactions_root {
return Err(From::from(BlockError::InvalidTransactionsRoot(Mismatch { expected: expected_root.clone(), found: transactions_root.clone() })))
}
let expected_uncles = &try!(block.at(2)).as_raw().sha3();
if expected_uncles != uncles_hash {
return Err(From::from(BlockError::InvalidUnclesHash(Mismatch { expected: expected_uncles.clone(), found: uncles_hash.clone() })))
}
Ok(())
}
#[cfg(test)]
mod tests {
use util::*;
use ethkey::{Random, Generator};
use header::*;
use verification::*;
use blockchain::extras::*;
use error::*;
use error::BlockError::*;
use views::*;
use blockchain::*;
use engines::Engine;
use spec::*;
use transaction::*;
use tests::helpers::*;
use types::log_entry::{LogEntry, LocalizedLogEntry};
use rlp::View;
use time::get_time;
fn check_ok(result: Result<(), Error>) {
result.unwrap_or_else(|e| panic!("Block verification failed: {:?}", e));
}
fn check_fail(result: Result<(), Error>, e: BlockError) {
match result {
Err(Error::Block(ref error)) if *error == e => (),
Err(other) => panic!("Block verification failed.\nExpected: {:?}\nGot: {:?}", e, other),
Ok(_) => panic!("Block verification failed.\nExpected: {:?}\nGot: Ok", e),
}
}
fn check_fail_timestamp(result: Result<(), Error>) {
match result {
Err(Error::Block(BlockError::InvalidTimestamp(_))) => (),
Err(other) => panic!("Block verification failed.\nExpected: InvalidTimestamp\nGot: {:?}", other),
Ok(_) => panic!("Block verification failed.\nExpected: InvalidTimestamp\nGot: Ok"),
}
}
struct TestBlockChain {
blocks: HashMap<H256, Bytes>,
numbers: HashMap<BlockNumber, H256>,
}
impl Default for TestBlockChain {
fn default() -> Self {
TestBlockChain::new()
}
}
impl TestBlockChain {
pub fn new() -> Self {
TestBlockChain {
blocks: HashMap::new(),
numbers: HashMap::new(),
}
}
pub fn insert(&mut self, bytes: Bytes) {
let number = BlockView::new(&bytes).header_view().number();
let hash = BlockView::new(&bytes).header_view().sha3();
self.blocks.insert(hash.clone(), bytes);
self.numbers.insert(number, hash.clone());
}
}
impl BlockProvider for TestBlockChain {
fn is_known(&self, hash: &H256) -> bool {
self.blocks.contains_key(hash)
}
fn first_block(&self) -> Option<H256> {
unimplemented!()
}
/// Get raw block data
fn block(&self, hash: &H256) -> Option<Bytes> {
self.blocks.get(hash).cloned()
}
fn block_header_data(&self, hash: &H256) -> Option<Bytes> {
self.block(hash).map(|b| BlockView::new(&b).header_rlp().as_raw().to_vec())
}
fn block_body(&self, hash: &H256) -> Option<Bytes> {
self.block(hash).map(|b| BlockChain::block_to_body(&b))
}
fn best_ancient_block(&self) -> Option<H256> {
None
}
/// Get the familial details concerning a block.
fn block_details(&self, hash: &H256) -> Option<BlockDetails> {
self.blocks.get(hash).map(|bytes| {
let header = BlockView::new(bytes).header();
BlockDetails {
number: header.number(),
total_difficulty: header.difficulty().clone(),
parent: header.parent_hash().clone(),
children: Vec::new(),
}
})
}
fn transaction_address(&self, _hash: &H256) -> Option<TransactionAddress> {
unimplemented!()
}
/// Get the hash of given block's number.
fn block_hash(&self, index: BlockNumber) -> Option<H256> {
self.numbers.get(&index).cloned()
}
fn blocks_with_bloom(&self, _bloom: &H2048, _from_block: BlockNumber, _to_block: BlockNumber) -> Vec<BlockNumber> {
unimplemented!()
}
fn block_receipts(&self, _hash: &H256) -> Option<BlockReceipts> {
unimplemented!()
}
fn logs<F>(&self, _blocks: Vec<BlockNumber>, _matches: F, _limit: Option<usize>) -> Vec<LocalizedLogEntry>
where F: Fn(&LogEntry) -> bool, Self: Sized {
unimplemented!()
}
}
fn basic_test(bytes: &[u8], engine: &Engine) -> Result<(), Error> {
let header = BlockView::new(bytes).header();
verify_block_basic(&header, bytes, engine)
}
fn family_test<BC>(bytes: &[u8], engine: &Engine, bc: &BC) -> Result<(), Error> where BC: BlockProvider {
let header = BlockView::new(bytes).header();
verify_block_family(&header, bytes, engine, bc)
}
#[test]
#[cfg_attr(feature="dev", allow(similar_names))]
fn test_verify_block() {
use rlp::{RlpStream, Stream};
// Test against morden
let mut good = Header::new();
let spec = Spec::new_test();
let engine = &*spec.engine;
let min_gas_limit = engine.params().min_gas_limit;
good.set_gas_limit(min_gas_limit);
good.set_timestamp(40);
good.set_number(10);
let keypair = Random.generate().unwrap();
let tr1 = Transaction {
action: Action::Create,
value: U256::from(0),
data: Bytes::new(),
gas: U256::from(30_000),
gas_price: U256::from(40_000),
nonce: U256::one()
}.sign(keypair.secret(), None);
let tr2 = Transaction {
action: Action::Create,
value: U256::from(0),
data: Bytes::new(),
gas: U256::from(30_000),
gas_price: U256::from(40_000),
nonce: U256::from(2)
}.sign(keypair.secret(), None);
let good_transactions = [ tr1.clone(), tr2.clone() ];
let diff_inc = U256::from(0x40);
let mut parent6 = good.clone();
parent6.set_number(6);
let mut parent7 = good.clone();
parent7.set_number(7);
parent7.set_parent_hash(parent6.hash());
parent7.set_difficulty(parent6.difficulty().clone() + diff_inc);
parent7.set_timestamp(parent6.timestamp() + 10);
let mut parent8 = good.clone();
parent8.set_number(8);
parent8.set_parent_hash(parent7.hash());
parent8.set_difficulty(parent7.difficulty().clone() + diff_inc);
parent8.set_timestamp(parent7.timestamp() + 10);
let mut good_uncle1 = good.clone();
good_uncle1.set_number(9);
good_uncle1.set_parent_hash(parent8.hash());
good_uncle1.set_difficulty(parent8.difficulty().clone() + diff_inc);
good_uncle1.set_timestamp(parent8.timestamp() + 10);
good_uncle1.extra_data_mut().push(1u8);
let mut good_uncle2 = good.clone();
good_uncle2.set_number(8);
good_uncle2.set_parent_hash(parent7.hash());
good_uncle2.set_difficulty(parent7.difficulty().clone() + diff_inc);
good_uncle2.set_timestamp(parent7.timestamp() + 10);
good_uncle2.extra_data_mut().push(2u8);
let good_uncles = vec![ good_uncle1.clone(), good_uncle2.clone() ];
let mut uncles_rlp = RlpStream::new();
uncles_rlp.append(&good_uncles);
let good_uncles_hash = uncles_rlp.as_raw().sha3();
let good_transactions_root = ordered_trie_root(good_transactions.iter().map(|t| ::rlp::encode::<SignedTransaction>(t).to_vec()));
let mut parent = good.clone();
parent.set_number(9);
parent.set_timestamp(parent8.timestamp() + 10);
parent.set_parent_hash(parent8.hash());
parent.set_difficulty(parent8.difficulty().clone() + diff_inc);
good.set_parent_hash(parent.hash());
good.set_difficulty(parent.difficulty().clone() + diff_inc);
good.set_timestamp(parent.timestamp() + 10);
let mut bc = TestBlockChain::new();
bc.insert(create_test_block(&good));
bc.insert(create_test_block(&parent));
bc.insert(create_test_block(&parent6));
bc.insert(create_test_block(&parent7));
bc.insert(create_test_block(&parent8));
check_ok(basic_test(&create_test_block(&good), engine));
let mut header = good.clone();
header.set_transactions_root(good_transactions_root.clone());
header.set_uncles_hash(good_uncles_hash.clone());<|fim▁hole|> InvalidGasLimit(OutOfBounds { min: Some(min_gas_limit), max: None, found: header.gas_limit().clone() }));
header = good.clone();
header.set_number(BlockNumber::max_value());
check_fail(basic_test(&create_test_block(&header), engine),
RidiculousNumber(OutOfBounds { max: Some(BlockNumber::max_value()), min: None, found: header.number() }));
header = good.clone();
let gas_used = header.gas_limit().clone() + 1.into();
header.set_gas_used(gas_used);
check_fail(basic_test(&create_test_block(&header), engine),
TooMuchGasUsed(OutOfBounds { max: Some(header.gas_limit().clone()), min: None, found: header.gas_used().clone() }));
header = good.clone();
header.extra_data_mut().resize(engine.maximum_extra_data_size() + 1, 0u8);
check_fail(basic_test(&create_test_block(&header), engine),
ExtraDataOutOfBounds(OutOfBounds { max: Some(engine.maximum_extra_data_size()), min: None, found: header.extra_data().len() }));
header = good.clone();
header.extra_data_mut().resize(engine.maximum_extra_data_size() + 1, 0u8);
check_fail(basic_test(&create_test_block(&header), engine),
ExtraDataOutOfBounds(OutOfBounds { max: Some(engine.maximum_extra_data_size()), min: None, found: header.extra_data().len() }));
header = good.clone();
header.set_uncles_hash(good_uncles_hash.clone());
check_fail(basic_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine),
InvalidTransactionsRoot(Mismatch { expected: good_transactions_root.clone(), found: header.transactions_root().clone() }));
header = good.clone();
header.set_transactions_root(good_transactions_root.clone());
check_fail(basic_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine),
InvalidUnclesHash(Mismatch { expected: good_uncles_hash.clone(), found: header.uncles_hash().clone() }));
check_ok(family_test(&create_test_block(&good), engine, &bc));
check_ok(family_test(&create_test_block_with_data(&good, &good_transactions, &good_uncles), engine, &bc));
header = good.clone();
header.set_parent_hash(H256::random());
check_fail(family_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine, &bc),
UnknownParent(header.parent_hash().clone()));
header = good.clone();
header.set_timestamp(10);
check_fail(family_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine, &bc),
InvalidTimestamp(OutOfBounds { max: None, min: Some(parent.timestamp() + 1), found: header.timestamp() }));
header = good.clone();
header.set_timestamp(2450000000);
check_fail_timestamp(basic_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine));
header = good.clone();
header.set_timestamp(get_time().sec as u64 + 40);
check_fail_timestamp(basic_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine));
header = good.clone();
header.set_number(9);
check_fail(family_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine, &bc),
InvalidNumber(Mismatch { expected: parent.number() + 1, found: header.number() }));
header = good.clone();
let mut bad_uncles = good_uncles.clone();
bad_uncles.push(good_uncle1.clone());
check_fail(family_test(&create_test_block_with_data(&header, &good_transactions, &bad_uncles), engine, &bc),
TooManyUncles(OutOfBounds { max: Some(engine.maximum_uncle_count()), min: None, found: bad_uncles.len() }));
// TODO: some additional uncle checks
}
}<|fim▁end|> | check_ok(basic_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine));
header.set_gas_limit(min_gas_limit - From::from(1));
check_fail(basic_test(&create_test_block(&header), engine), |
<|file_name|>xbox-remote-power.py<|end_file_name|><|fim▁begin|>import sys, socket, select, time
from optparse import OptionParser
<|fim▁hole|>
help_text = "xbox-remote-power.py -a <ip address> -i <live id>"
py3 = sys.version_info[0] > 2
def main():
parser = OptionParser()
parser.add_option('-a', '--address', dest='ip_addr', help="IP Address of Xbox One", default='')
parser.add_option('-i', '--id', dest='live_id', help="Live ID of Xbox One", default='')
(opts, args) = parser.parse_args()
if not opts.ip_addr:
opts.ip_addr = user_input("Enter the IP address: ")
ping = False
if not opts.live_id:
print("No Live ID given, do you want to attempt to ping the Xbox for it?")
result = ""
while result not in ("y", "n"):
result = user_input("(y/n): ").lower()
if result == "y":
ping = True
elif result == "n":
opts.live_id = user_input("Enter the Live ID: ")
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setblocking(0)
s.bind(("", 0))
s.connect((opts.ip_addr, XBOX_PORT))
if ping:
print("Attempting to ping Xbox for Live ID...")
s.send(bytearray.fromhex(XBOX_PING))
ready = select.select([s], [], [], 5)
if ready[0]:
data = s.recv(1024)
opts.live_id = data[199:215]
else:
print("Failed to ping Xbox, please enter Live ID manually")
opts.live_id = user_input("Enter the Live ID: ")
if isinstance(opts.live_id, str):
live_id = opts.live_id.encode()
else:
live_id = opts.live_id
power_packet = bytearray.fromhex(XBOX_POWER) + live_id + b'\x00'
print("Sending power on packets to " + opts.ip_addr)
for i in range(0, 5):
s.send(power_packet)
time.sleep(1)
print("Xbox should turn on now")
s.send(bytearray.fromhex(XBOX_PING))
ready = select.select([s], [], [], 5)
if ready[0]:
data = s.recv(1024)
opts.live_id = data[199:215]
print("Ping successful!")
print("Live ID = " + live_id.decode("utf-8"))
print("")
print("******************************************")
print("* Xbox running - Streaming now possible! *")
print("******************************************")
print("")
else:
print("Failed to ping Xbox - please try again! :(")
print("")
s.close()
def user_input(text):
response = ""
while response == "":
if py3:
response = input(text)
else:
response = raw_input(text)
return response
if __name__ == "__main__":
main()<|fim▁end|> | XBOX_PORT = 5050
XBOX_PING = "dd00000a000000000000000400000002"
XBOX_POWER = "dd02001300000010" |
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Function/method decorators that provide timeout and retry logic.<|fim▁hole|>import itertools
import sys
from devil.android import device_errors
from devil.utils import cmd_helper
from devil.utils import reraiser_thread
from devil.utils import timeout_retry
DEFAULT_TIMEOUT_ATTR = '_default_timeout'
DEFAULT_RETRIES_ATTR = '_default_retries'
def _TimeoutRetryWrapper(f, timeout_func, retries_func, pass_values=False):
""" Wraps a funcion with timeout and retry handling logic.
Args:
f: The function to wrap.
timeout_func: A callable that returns the timeout value.
retries_func: A callable that returns the retries value.
pass_values: If True, passes the values returned by |timeout_func| and
|retries_func| to the wrapped function as 'timeout' and
'retries' kwargs, respectively.
Returns:
The wrapped function.
"""
@functools.wraps(f)
def timeout_retry_wrapper(*args, **kwargs):
timeout = timeout_func(*args, **kwargs)
retries = retries_func(*args, **kwargs)
if pass_values:
kwargs['timeout'] = timeout
kwargs['retries'] = retries
@functools.wraps(f)
def impl():
return f(*args, **kwargs)
try:
if timeout_retry.CurrentTimeoutThreadGroup():
# Don't wrap if there's already an outer timeout thread.
return impl()
else:
desc = '%s(%s)' % (f.__name__, ', '.join(itertools.chain(
(str(a) for a in args),
('%s=%s' % (k, str(v)) for k, v in kwargs.iteritems()))))
return timeout_retry.Run(impl, timeout, retries, desc=desc)
except reraiser_thread.TimeoutError as e:
raise device_errors.CommandTimeoutError(str(e)), None, (
sys.exc_info()[2])
except cmd_helper.TimeoutError as e:
raise device_errors.CommandTimeoutError(str(e)), None, (
sys.exc_info()[2])
return timeout_retry_wrapper
def WithTimeoutAndRetries(f):
"""A decorator that handles timeouts and retries.
'timeout' and 'retries' kwargs must be passed to the function.
Args:
f: The function to decorate.
Returns:
The decorated function.
"""
get_timeout = lambda *a, **kw: kw['timeout']
get_retries = lambda *a, **kw: kw['retries']
return _TimeoutRetryWrapper(f, get_timeout, get_retries)
def WithExplicitTimeoutAndRetries(timeout, retries):
"""Returns a decorator that handles timeouts and retries.
The provided |timeout| and |retries| values are always used.
Args:
timeout: The number of seconds to wait for the decorated function to
return. Always used.
retries: The number of times the decorated function should be retried on
failure. Always used.
Returns:
The actual decorator.
"""
def decorator(f):
get_timeout = lambda *a, **kw: timeout
get_retries = lambda *a, **kw: retries
return _TimeoutRetryWrapper(f, get_timeout, get_retries)
return decorator
def WithTimeoutAndRetriesDefaults(default_timeout, default_retries):
"""Returns a decorator that handles timeouts and retries.
The provided |default_timeout| and |default_retries| values are used only
if timeout and retries values are not provided.
Args:
default_timeout: The number of seconds to wait for the decorated function
to return. Only used if a 'timeout' kwarg is not passed
to the decorated function.
default_retries: The number of times the decorated function should be
retried on failure. Only used if a 'retries' kwarg is not
passed to the decorated function.
Returns:
The actual decorator.
"""
def decorator(f):
get_timeout = lambda *a, **kw: kw.get('timeout', default_timeout)
get_retries = lambda *a, **kw: kw.get('retries', default_retries)
return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
return decorator
def WithTimeoutAndRetriesFromInstance(
default_timeout_name=DEFAULT_TIMEOUT_ATTR,
default_retries_name=DEFAULT_RETRIES_ATTR,
min_default_timeout=None):
"""Returns a decorator that handles timeouts and retries.
The provided |default_timeout_name| and |default_retries_name| are used to
get the default timeout value and the default retries value from the object
instance if timeout and retries values are not provided.
Note that this should only be used to decorate methods, not functions.
Args:
default_timeout_name: The name of the default timeout attribute of the
instance.
default_retries_name: The name of the default retries attribute of the
instance.
min_timeout: Miniumum timeout to be used when using instance timeout.
Returns:
The actual decorator.
"""
def decorator(f):
def get_timeout(inst, *_args, **kwargs):
ret = getattr(inst, default_timeout_name)
if min_default_timeout is not None:
ret = max(min_default_timeout, ret)
return kwargs.get('timeout', ret)
def get_retries(inst, *_args, **kwargs):
return kwargs.get('retries', getattr(inst, default_retries_name))
return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
return decorator<|fim▁end|> | """
import functools |
<|file_name|>TwitchCore.java<|end_file_name|><|fim▁begin|>package gavilan.irc;
import java.util.Set;
import javax.annotation.PreDestroy;
import org.pircbotx.PircBotX;
public interface TwitchCore {
void doGreet(ChatMessage cm);
Set<String> getPendings();
void message(String channel, String message);<|fim▁hole|>
void onMessage(ChatMessage cm);
@PreDestroy
void shutdown();
void startAll(PircBotX pircBotX);
}<|fim▁end|> | |
<|file_name|>astencode.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
// FIXME: remove this after snapshot, and Results are handled
#![allow(unused_must_use)]
use metadata::common as c;
use metadata::cstore as cstore;
use session::Session;
use metadata::decoder;
use middle::def;
use metadata::encoder as e;
use middle::region;
use metadata::tydecode;
use metadata::tydecode::{DefIdSource, NominalType, TypeWithId, TypeParameter};
use metadata::tydecode::{RegionParameter, ClosureSource};
use metadata::tyencode;
use middle::check_const::ConstQualif;
use middle::mem_categorization::Typer;
use middle::privacy::{AllPublic, LastMod};
use middle::subst;
use middle::subst::VecPerParamSpace;
use middle::ty::{self, Ty, MethodCall, MethodCallee, MethodOrigin};
use util::ppaux::ty_to_string;
use syntax::{ast, ast_map, ast_util, codemap, fold};
use syntax::codemap::Span;
use syntax::fold::Folder;
use syntax::parse::token;
use syntax::ptr::P;
use syntax;
use std::cell::Cell;
use std::io::SeekFrom;
use std::io::prelude::*;
use std::fmt::Debug;
use rbml::reader;
use rbml::writer::Encoder;
use rbml;
use serialize;
use serialize::{Decodable, Decoder, DecoderHelpers, Encodable};
use serialize::EncoderHelpers;
#[cfg(test)] use std::io::Cursor;
#[cfg(test)] use syntax::parse;
#[cfg(test)] use syntax::print::pprust;
struct DecodeContext<'a, 'b, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
cdata: &'b cstore::crate_metadata,
from_id_range: ast_util::IdRange,
to_id_range: ast_util::IdRange,
// Cache the last used filemap for translating spans as an optimization.
last_filemap_index: Cell<usize>,
}
trait tr {
fn tr(&self, dcx: &DecodeContext) -> Self;
}
trait tr_intern {
fn tr_intern(&self, dcx: &DecodeContext) -> ast::DefId;
}
// ______________________________________________________________________
// Top-level methods.
pub fn encode_inlined_item(ecx: &e::EncodeContext,
rbml_w: &mut Encoder,
ii: e::InlinedItemRef) {
let id = match ii {
e::IIItemRef(i) => i.id,
e::IIForeignRef(i) => i.id,
e::IITraitItemRef(_, ti) => ti.id,
e::IIImplItemRef(_, ii) => ii.id,
};
debug!("> Encoding inlined item: {} ({:?})",
ecx.tcx.map.path_to_string(id),
rbml_w.writer.seek(SeekFrom::Current(0)));
// Folding could be avoided with a smarter encoder.
let ii = simplify_ast(ii);
let id_range = ast_util::compute_id_range_for_inlined_item(&ii);
rbml_w.start_tag(c::tag_ast as usize);
id_range.encode(rbml_w);
encode_ast(rbml_w, &ii);
encode_side_tables_for_ii(ecx, rbml_w, &ii);
rbml_w.end_tag();
debug!("< Encoded inlined fn: {} ({:?})",
ecx.tcx.map.path_to_string(id),
rbml_w.writer.seek(SeekFrom::Current(0)));
}
impl<'a, 'b, 'c, 'tcx> ast_map::FoldOps for &'a DecodeContext<'b, 'c, 'tcx> {
fn new_id(&self, id: ast::NodeId) -> ast::NodeId {
if id == ast::DUMMY_NODE_ID {
// Used by ast_map to map the NodeInlinedParent.
self.tcx.sess.next_node_id()
} else {
self.tr_id(id)
}
}
fn new_def_id(&self, def_id: ast::DefId) -> ast::DefId {
self.tr_def_id(def_id)
}
fn new_span(&self, span: Span) -> Span {
self.tr_span(span)
}
}
/// Decodes an item from its AST in the cdata's metadata and adds it to the
/// ast-map.
pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata,
tcx: &ty::ctxt<'tcx>,
path: Vec<ast_map::PathElem>,
par_doc: rbml::Doc)
-> Result<&'tcx ast::InlinedItem, Vec<ast_map::PathElem>> {
match par_doc.opt_child(c::tag_ast) {
None => Err(path),
Some(ast_doc) => {
let mut path_as_str = None;
debug!("> Decoding inlined fn: {:?}::?",
{
// Do an Option dance to use the path after it is moved below.
let s = ast_map::path_to_string(path.iter().cloned());
path_as_str = Some(s);
path_as_str.as_ref().map(|x| &x[..])
});
let mut ast_dsr = reader::Decoder::new(ast_doc);
let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
let to_id_range = reserve_id_range(&tcx.sess, from_id_range);
let dcx = &DecodeContext {
cdata: cdata,
tcx: tcx,
from_id_range: from_id_range,
to_id_range: to_id_range,
last_filemap_index: Cell::new(0)
};
let raw_ii = decode_ast(ast_doc);
let ii = ast_map::map_decoded_item(&dcx.tcx.map, path, raw_ii, dcx);
let ident = match *ii {
ast::IIItem(ref i) => i.ident,
ast::IIForeign(ref i) => i.ident,
ast::IITraitItem(_, ref ti) => ti.ident,
ast::IIImplItem(_, ref ii) => ii.ident
};
debug!("Fn named: {}", token::get_ident(ident));
debug!("< Decoded inlined fn: {}::{}",
path_as_str.unwrap(),
token::get_ident(ident));
region::resolve_inlined_item(&tcx.sess, &tcx.region_maps, ii);
decode_side_tables(dcx, ast_doc);
match *ii {
ast::IIItem(ref i) => {
debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<",
syntax::print::pprust::item_to_string(&**i));
}
_ => { }
}
Ok(ii)
}
}
}
// ______________________________________________________________________
// Enumerating the IDs which appear in an AST
fn reserve_id_range(sess: &Session,
from_id_range: ast_util::IdRange) -> ast_util::IdRange {
// Handle the case of an empty range:
if from_id_range.empty() { return from_id_range; }
let cnt = from_id_range.max - from_id_range.min;
let to_id_min = sess.reserve_node_ids(cnt);
let to_id_max = to_id_min + cnt;
ast_util::IdRange { min: to_id_min, max: to_id_max }
}
impl<'a, 'b, 'tcx> DecodeContext<'a, 'b, 'tcx> {
/// Translates an internal id, meaning a node id that is known to refer to some part of the
/// item currently being inlined, such as a local variable or argument. All naked node-ids
/// that appear in types have this property, since if something might refer to an external item
/// we would use a def-id to allow for the possibility that the item resides in another crate.
pub fn tr_id(&self, id: ast::NodeId) -> ast::NodeId {
// from_id_range should be non-empty
assert!(!self.from_id_range.empty());
// Use wrapping arithmetic because otherwise it introduces control flow.
// Maybe we should just have the control flow? -- aatch
(id.wrapping_sub(self.from_id_range.min).wrapping_add(self.to_id_range.min))
}
/// Translates an EXTERNAL def-id, converting the crate number from the one used in the encoded
/// data to the current crate numbers.. By external, I mean that it be translated to a
/// reference to the item in its original crate, as opposed to being translated to a reference
/// to the inlined version of the item. This is typically, but not always, what you want,
/// because most def-ids refer to external things like types or other fns that may or may not
/// be inlined. Note that even when the inlined function is referencing itself recursively, we
/// would want `tr_def_id` for that reference--- conceptually the function calls the original,
/// non-inlined version, and trans deals with linking that recursive call to the inlined copy.
///
/// However, there are a *few* cases where def-ids are used but we know that the thing being
/// referenced is in fact *internal* to the item being inlined. In those cases, you should use
/// `tr_intern_def_id()` below.
pub fn tr_def_id(&self, did: ast::DefId) -> ast::DefId {
decoder::translate_def_id(self.cdata, did)
}
/// Translates an INTERNAL def-id, meaning a def-id that is
/// known to refer to some part of the item currently being
/// inlined. In that case, we want to convert the def-id to
/// refer to the current crate and to the new, inlined node-id.
pub fn tr_intern_def_id(&self, did: ast::DefId) -> ast::DefId {
assert_eq!(did.krate, ast::LOCAL_CRATE);
ast::DefId { krate: ast::LOCAL_CRATE, node: self.tr_id(did.node) }
}
/// Translates a `Span` from an extern crate to the corresponding `Span`
/// within the local crate's codemap. `creader::import_codemap()` will
/// already have allocated any additionally needed FileMaps in the local
/// codemap as a side-effect of creating the crate_metadata's
/// `codemap_import_info`.
pub fn tr_span(&self, span: Span) -> Span {
let imported_filemaps = &self.cdata.codemap_import_info[..];
let span = if span.lo > span.hi {
// Currently macro expansion sometimes produces invalid Span values
// where lo > hi. In order not to crash the compiler when trying to
// translate these values, let's transform them into something we
// can handle (and which will produce useful debug locations at
// least some of the time).
// This workaround is only necessary as long as macro expansion is
// not fixed. FIXME(#23480)
codemap::mk_sp(span.lo, span.lo)
} else {
span
};
let filemap_index = {
// Optimize for the case that most spans within a translated item
// originate from the same filemap.
let last_filemap_index = self.last_filemap_index.get();
if span.lo >= imported_filemaps[last_filemap_index].original_start_pos &&
span.lo <= imported_filemaps[last_filemap_index].original_end_pos &&
span.hi >= imported_filemaps[last_filemap_index].original_start_pos &&
span.hi <= imported_filemaps[last_filemap_index].original_end_pos {
last_filemap_index
} else {
let mut a = 0;
let mut b = imported_filemaps.len();
while b - a > 1 {
let m = (a + b) / 2;
if imported_filemaps[m].original_start_pos > span.lo {
b = m;
} else {
a = m;
}
}
self.last_filemap_index.set(a);
a
}
};
let lo = (span.lo - imported_filemaps[filemap_index].original_start_pos) +
imported_filemaps[filemap_index].translated_filemap.start_pos;
let hi = (span.hi - imported_filemaps[filemap_index].original_start_pos) +
imported_filemaps[filemap_index].translated_filemap.start_pos;
codemap::mk_sp(lo, hi)
}
}
impl tr_intern for ast::DefId {
fn tr_intern(&self, dcx: &DecodeContext) -> ast::DefId {
dcx.tr_intern_def_id(*self)
}
}
impl tr for ast::DefId {
fn tr(&self, dcx: &DecodeContext) -> ast::DefId {
dcx.tr_def_id(*self)
}
}
impl tr for Option<ast::DefId> {
fn tr(&self, dcx: &DecodeContext) -> Option<ast::DefId> {
self.map(|d| dcx.tr_def_id(d))
}
}
impl tr for Span {
fn tr(&self, dcx: &DecodeContext) -> Span {
dcx.tr_span(*self)
}
}
trait def_id_encoder_helpers {
fn emit_def_id(&mut self, did: ast::DefId);
}
impl<S:serialize::Encoder> def_id_encoder_helpers for S
where <S as serialize::serialize::Encoder>::Error: Debug
{
fn emit_def_id(&mut self, did: ast::DefId) {
did.encode(self).unwrap()
}
}
trait def_id_decoder_helpers {
fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId;
fn read_def_id_nodcx(&mut self,
cdata: &cstore::crate_metadata) -> ast::DefId;
}
impl<D:serialize::Decoder> def_id_decoder_helpers for D
where <D as serialize::serialize::Decoder>::Error: Debug
{
fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId {
let did: ast::DefId = Decodable::decode(self).unwrap();
did.tr(dcx)
}
fn read_def_id_nodcx(&mut self,
cdata: &cstore::crate_metadata)
-> ast::DefId {
let did: ast::DefId = Decodable::decode(self).unwrap();
decoder::translate_def_id(cdata, did)
}
}
// ______________________________________________________________________
// Encoding and decoding the AST itself
//
// The hard work is done by an autogenerated module astencode_gen. To
// regenerate astencode_gen, run src/etc/gen-astencode. It will
// replace astencode_gen with a dummy file and regenerate its
// contents. If you get compile errors, the dummy file
// remains---resolve the errors and then rerun astencode_gen.
// Annoying, I know, but hopefully only temporary.
//
// When decoding, we have to renumber the AST so that the node ids that
// appear within are disjoint from the node ids in our existing ASTs.
// We also have to adjust the spans: for now we just insert a dummy span,
// but eventually we should add entries to the local codemap as required.
fn encode_ast(rbml_w: &mut Encoder, item: &ast::InlinedItem) {
rbml_w.start_tag(c::tag_tree as usize);
item.encode(rbml_w);
rbml_w.end_tag();
}
struct NestedItemsDropper;
impl Folder for NestedItemsDropper {
fn fold_block(&mut self, blk: P<ast::Block>) -> P<ast::Block> {
blk.and_then(|ast::Block {id, stmts, expr, rules, span, ..}| {
let stmts_sans_items = stmts.into_iter().filter_map(|stmt| {
let use_stmt = match stmt.node {
ast::StmtExpr(_, _) | ast::StmtSemi(_, _) => true,
ast::StmtDecl(ref decl, _) => {
match decl.node {
ast::DeclLocal(_) => true,
ast::DeclItem(_) => false,
}
}
ast::StmtMac(..) => panic!("unexpanded macro in astencode")
};
if use_stmt {
Some(stmt)
} else {
None
}
}).collect();
let blk_sans_items = P(ast::Block {
stmts: stmts_sans_items,
expr: expr,
id: id,
rules: rules,
span: span,
});
fold::noop_fold_block(blk_sans_items, self)
})
}
}
// Produces a simplified copy of the AST which does not include things
// that we do not need to or do not want to export. For example, we
// do not include any nested items: if these nested items are to be
// inlined, their AST will be exported separately (this only makes
// sense because, in Rust, nested items are independent except for
// their visibility).
//
// As it happens, trans relies on the fact that we do not export
// nested items, as otherwise it would get confused when translating
// inlined items.
fn simplify_ast(ii: e::InlinedItemRef) -> ast::InlinedItem {
let mut fld = NestedItemsDropper;
match ii {
// HACK we're not dropping items.
e::IIItemRef(i) => {
ast::IIItem(fold::noop_fold_item(P(i.clone()), &mut fld)
.expect_one("expected one item"))
}
e::IITraitItemRef(d, ti) => {
ast::IITraitItem(d,
fold::noop_fold_trait_item(P(ti.clone()), &mut fld)
.expect_one("noop_fold_trait_item must produce \
exactly one trait item"))
}
e::IIImplItemRef(d, ii) => {
ast::IIImplItem(d,
fold::noop_fold_impl_item(P(ii.clone()), &mut fld)
.expect_one("noop_fold_impl_item must produce \
exactly one impl item"))
}
e::IIForeignRef(i) => {
ast::IIForeign(fold::noop_fold_foreign_item(P(i.clone()), &mut fld))
}
}
}
fn decode_ast(par_doc: rbml::Doc) -> ast::InlinedItem {
let chi_doc = par_doc.get(c::tag_tree as usize);
let mut d = reader::Decoder::new(chi_doc);
Decodable::decode(&mut d).unwrap()
}
// ______________________________________________________________________
// Encoding and decoding of ast::def
fn decode_def(dcx: &DecodeContext, dsr: &mut reader::Decoder) -> def::Def {
let def: def::Def = Decodable::decode(dsr).unwrap();
def.tr(dcx)
}
impl tr for def::Def {
fn tr(&self, dcx: &DecodeContext) -> def::Def {
match *self {
def::DefFn(did, is_ctor) => def::DefFn(did.tr(dcx), is_ctor),
def::DefMethod(did, p) => {
def::DefMethod(did.tr(dcx), p.map(|did2| did2.tr(dcx)))
}
def::DefSelfTy(opt_did, impl_ids) => { def::DefSelfTy(opt_did.map(|did| did.tr(dcx)),
impl_ids.map(|(nid1, nid2)| {
(dcx.tr_id(nid1),
dcx.tr_id(nid2))
})) }
def::DefMod(did) => { def::DefMod(did.tr(dcx)) }
def::DefForeignMod(did) => { def::DefForeignMod(did.tr(dcx)) }
def::DefStatic(did, m) => { def::DefStatic(did.tr(dcx), m) }
def::DefConst(did) => { def::DefConst(did.tr(dcx)) }
def::DefAssociatedConst(did, p) => {
def::DefAssociatedConst(did.tr(dcx), p.map(|did2| did2.tr(dcx)))
}
def::DefLocal(nid) => { def::DefLocal(dcx.tr_id(nid)) }
def::DefVariant(e_did, v_did, is_s) => {
def::DefVariant(e_did.tr(dcx), v_did.tr(dcx), is_s)
},
def::DefTrait(did) => def::DefTrait(did.tr(dcx)),
def::DefTy(did, is_enum) => def::DefTy(did.tr(dcx), is_enum),
def::DefAssociatedTy(trait_did, did) =>
def::DefAssociatedTy(trait_did.tr(dcx), did.tr(dcx)),
def::DefPrimTy(p) => def::DefPrimTy(p),
def::DefTyParam(s, index, def_id, n) => def::DefTyParam(s, index, def_id.tr(dcx), n),
def::DefUse(did) => def::DefUse(did.tr(dcx)),
def::DefUpvar(nid1, nid2) => {
def::DefUpvar(dcx.tr_id(nid1), dcx.tr_id(nid2))
}
def::DefStruct(did) => def::DefStruct(did.tr(dcx)),
def::DefRegion(nid) => def::DefRegion(dcx.tr_id(nid)),
def::DefLabel(nid) => def::DefLabel(dcx.tr_id(nid))
}
}
}
// ______________________________________________________________________
// Encoding and decoding of ancillary information
impl tr for ty::Region {
fn tr(&self, dcx: &DecodeContext) -> ty::Region {
match *self {
ty::ReLateBound(debruijn, br) => {
ty::ReLateBound(debruijn, br.tr(dcx))
}
ty::ReEarlyBound(data) => {
ty::ReEarlyBound(ty::EarlyBoundRegion {
param_id: dcx.tr_id(data.param_id),
space: data.space,
index: data.index,
name: data.name,
})
}
ty::ReScope(scope) => {
ty::ReScope(scope.tr(dcx))
}
ty::ReEmpty | ty::ReStatic | ty::ReInfer(..) => {
*self
}
ty::ReFree(ref fr) => {
ty::ReFree(fr.tr(dcx))
}
}
}
}
impl tr for ty::FreeRegion {
fn tr(&self, dcx: &DecodeContext) -> ty::FreeRegion {
ty::FreeRegion { scope: self.scope.tr(dcx),
bound_region: self.bound_region.tr(dcx) }
}
}
impl tr for region::CodeExtent {
fn tr(&self, dcx: &DecodeContext) -> region::CodeExtent {
self.map_id(|id| dcx.tr_id(id))
}
}
impl tr for region::DestructionScopeData {
fn tr(&self, dcx: &DecodeContext) -> region::DestructionScopeData {
region::DestructionScopeData { node_id: dcx.tr_id(self.node_id) }
}
}
impl tr for ty::BoundRegion {
fn tr(&self, dcx: &DecodeContext) -> ty::BoundRegion {
match *self {
ty::BrAnon(_) |
ty::BrFresh(_) |
ty::BrEnv => *self,
ty::BrNamed(id, ident) => ty::BrNamed(dcx.tr_def_id(id),
ident),
}
}
}
// ______________________________________________________________________
// Encoding and decoding of freevar information
fn encode_freevar_entry(rbml_w: &mut Encoder, fv: &ty::Freevar) {
(*fv).encode(rbml_w).unwrap();
}
trait rbml_decoder_helper {
fn read_freevar_entry(&mut self, dcx: &DecodeContext)
-> ty::Freevar;
fn read_capture_mode(&mut self) -> ast::CaptureClause;
}
impl<'a> rbml_decoder_helper for reader::Decoder<'a> {
fn read_freevar_entry(&mut self, dcx: &DecodeContext)
-> ty::Freevar {
let fv: ty::Freevar = Decodable::decode(self).unwrap();
fv.tr(dcx)
}
fn read_capture_mode(&mut self) -> ast::CaptureClause {
let cm: ast::CaptureClause = Decodable::decode(self).unwrap();
cm
}
}
impl tr for ty::Freevar {
fn tr(&self, dcx: &DecodeContext) -> ty::Freevar {
ty::Freevar {
def: self.def.tr(dcx),
span: self.span.tr(dcx),
}
}
}
impl tr for ty::UpvarBorrow {
fn tr(&self, dcx: &DecodeContext) -> ty::UpvarBorrow {
ty::UpvarBorrow {
kind: self.kind,
region: self.region.tr(dcx)
}
}
}
impl tr for ty::UpvarCapture {
fn tr(&self, dcx: &DecodeContext) -> ty::UpvarCapture {
match *self {
ty::UpvarCapture::ByValue => ty::UpvarCapture::ByValue,
ty::UpvarCapture::ByRef(ref data) => ty::UpvarCapture::ByRef(data.tr(dcx)),
}
}
}
// ______________________________________________________________________
// Encoding and decoding of MethodCallee
trait read_method_callee_helper<'tcx> {
fn read_method_callee<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> (u32, MethodCallee<'tcx>);
}
fn encode_method_callee<'a, 'tcx>(ecx: &e::EncodeContext<'a, 'tcx>,
rbml_w: &mut Encoder,
autoderef: u32,
method: &MethodCallee<'tcx>) {
use serialize::Encoder;
rbml_w.emit_struct("MethodCallee", 4, |rbml_w| {
rbml_w.emit_struct_field("autoderef", 0, |rbml_w| {
autoderef.encode(rbml_w)
});
rbml_w.emit_struct_field("origin", 1, |rbml_w| {
Ok(rbml_w.emit_method_origin(ecx, &method.origin))
});
rbml_w.emit_struct_field("ty", 2, |rbml_w| {
Ok(rbml_w.emit_ty(ecx, method.ty))
});
rbml_w.emit_struct_field("substs", 3, |rbml_w| {
Ok(rbml_w.emit_substs(ecx, &method.substs))
})
}).unwrap();
}
impl<'a, 'tcx> read_method_callee_helper<'tcx> for reader::Decoder<'a> {
fn read_method_callee<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> (u32, MethodCallee<'tcx>) {
self.read_struct("MethodCallee", 4, |this| {
let autoderef = this.read_struct_field("autoderef", 0, |this| {
Decodable::decode(this)
}).unwrap();
Ok((autoderef, MethodCallee {
origin: this.read_struct_field("origin", 1, |this| {
Ok(this.read_method_origin(dcx))
}).unwrap(),
ty: this.read_struct_field("ty", 2, |this| {
Ok(this.read_ty(dcx))
}).unwrap(),
substs: this.read_struct_field("substs", 3, |this| {
Ok(this.read_substs(dcx))
}).unwrap()
}))
}).unwrap()
}
}
impl<'tcx> tr for MethodOrigin<'tcx> {
fn tr(&self, dcx: &DecodeContext) -> MethodOrigin<'tcx> {
match *self {
ty::MethodStatic(did) => ty::MethodStatic(did.tr(dcx)),
ty::MethodStaticClosure(did) => {
ty::MethodStaticClosure(did.tr(dcx))
}
ty::MethodTypeParam(ref mp) => {
ty::MethodTypeParam(
ty::MethodParam {
// def-id is already translated when we read it out
trait_ref: mp.trait_ref.clone(),
method_num: mp.method_num,
impl_def_id: mp.impl_def_id.tr(dcx),
}
)
}
ty::MethodTraitObject(ref mo) => {
ty::MethodTraitObject(
ty::MethodObject {
trait_ref: mo.trait_ref.clone(),
.. *mo
}
)
}
}
}
}
pub fn encode_closure_kind(ebml_w: &mut Encoder, kind: ty::ClosureKind) {
kind.encode(ebml_w).unwrap();
}
pub trait vtable_decoder_helpers<'tcx> {
fn read_vec_per_param_space<T, F>(&mut self, f: F) -> VecPerParamSpace<T> where
F: FnMut(&mut Self) -> T;
fn read_vtable_res_with_key(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> (u32, ty::vtable_res<'tcx>);
fn read_vtable_res(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> ty::vtable_res<'tcx>;
fn read_vtable_param_res(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> ty::vtable_param_res<'tcx>;
fn read_vtable_origin(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> ty::vtable_origin<'tcx>;
}
impl<'tcx, 'a> vtable_decoder_helpers<'tcx> for reader::Decoder<'a> {
fn read_vec_per_param_space<T, F>(&mut self, mut f: F) -> VecPerParamSpace<T> where
F: FnMut(&mut reader::Decoder<'a>) -> T,
{
let types = self.read_to_vec(|this| Ok(f(this))).unwrap();
let selfs = self.read_to_vec(|this| Ok(f(this))).unwrap();
let fns = self.read_to_vec(|this| Ok(f(this))).unwrap();
VecPerParamSpace::new(types, selfs, fns)
}
fn read_vtable_res_with_key(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> (u32, ty::vtable_res<'tcx>) {
self.read_struct("VtableWithKey", 2, |this| {
let autoderef = this.read_struct_field("autoderef", 0, |this| {
Decodable::decode(this)
}).unwrap();
Ok((autoderef, this.read_struct_field("vtable_res", 1, |this| {
Ok(this.read_vtable_res(tcx, cdata))
}).unwrap()))
}).unwrap()
}
fn read_vtable_res(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> ty::vtable_res<'tcx>
{
self.read_vec_per_param_space(
|this| this.read_vtable_param_res(tcx, cdata))
}
fn read_vtable_param_res(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> ty::vtable_param_res<'tcx> {
self.read_to_vec(|this| Ok(this.read_vtable_origin(tcx, cdata)))
.unwrap().into_iter().collect()
}
fn read_vtable_origin(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> ty::vtable_origin<'tcx> {
self.read_enum("vtable_origin", |this| {
this.read_enum_variant(&["vtable_static",
"vtable_param",
"vtable_error",
"vtable_closure"],
|this, i| {
Ok(match i {
0 => {
ty::vtable_static(
this.read_enum_variant_arg(0, |this| {
Ok(this.read_def_id_nodcx(cdata))
}).unwrap(),
this.read_enum_variant_arg(1, |this| {
Ok(this.read_substs_nodcx(tcx, cdata))
}).unwrap(),
this.read_enum_variant_arg(2, |this| {
Ok(this.read_vtable_res(tcx, cdata))
}).unwrap()
)
}
1 => {
ty::vtable_param(
this.read_enum_variant_arg(0, |this| {
Decodable::decode(this)
}).unwrap(),
this.read_enum_variant_arg(1, |this| {
this.read_uint()
}).unwrap()
)
}
2 => {
ty::vtable_closure(
this.read_enum_variant_arg(0, |this| {
Ok(this.read_def_id_nodcx(cdata))
}).unwrap()
)
}
3 => {
ty::vtable_error
}
_ => panic!("bad enum variant")
})
})
}).unwrap()
}
}
// ___________________________________________________________________________
//
fn encode_vec_per_param_space<T, F>(rbml_w: &mut Encoder,
v: &subst::VecPerParamSpace<T>,
mut f: F) where
F: FnMut(&mut Encoder, &T),
{
for &space in &subst::ParamSpace::all() {
rbml_w.emit_from_vec(v.get_slice(space),
|rbml_w, n| Ok(f(rbml_w, n))).unwrap();
}
}
// ______________________________________________________________________
// Encoding and decoding the side tables
trait get_ty_str_ctxt<'tcx> {
fn ty_str_ctxt<'a>(&'a self) -> tyencode::ctxt<'a, 'tcx>;
}
impl<'a, 'tcx> get_ty_str_ctxt<'tcx> for e::EncodeContext<'a, 'tcx> {
fn ty_str_ctxt<'b>(&'b self) -> tyencode::ctxt<'b, 'tcx> {
tyencode::ctxt {
diag: self.tcx.sess.diagnostic(),
ds: e::def_to_string,
tcx: self.tcx,
abbrevs: &self.type_abbrevs
}
}
}
trait rbml_writer_helpers<'tcx> {
fn emit_closure_type<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
closure_type: &ty::ClosureTy<'tcx>);
fn emit_method_origin<'a>(&mut self,
ecx: &e::EncodeContext<'a, 'tcx>,
method_origin: &ty::MethodOrigin<'tcx>);
fn emit_ty<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, ty: Ty<'tcx>);
fn emit_tys<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, tys: &[Ty<'tcx>]);
fn emit_type_param_def<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
type_param_def: &ty::TypeParameterDef<'tcx>);
fn emit_predicate<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
predicate: &ty::Predicate<'tcx>);
fn emit_trait_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
ty: &ty::TraitRef<'tcx>);
fn emit_type_scheme<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
type_scheme: ty::TypeScheme<'tcx>);
fn emit_substs<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
substs: &subst::Substs<'tcx>);
fn emit_existential_bounds<'b>(&mut self, ecx: &e::EncodeContext<'b,'tcx>,
bounds: &ty::ExistentialBounds<'tcx>);
fn emit_builtin_bounds(&mut self, ecx: &e::EncodeContext, bounds: &ty::BuiltinBounds);
fn emit_auto_adjustment<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
adj: &ty::AutoAdjustment<'tcx>);
fn emit_autoref<'a>(&mut self, autoref: &ty::AutoRef<'tcx>);
fn emit_auto_deref_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
auto_deref_ref: &ty::AutoDerefRef<'tcx>);
}
impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> {
fn emit_closure_type<'b>(&mut self,
ecx: &e::EncodeContext<'b, 'tcx>,
closure_type: &ty::ClosureTy<'tcx>) {
self.emit_opaque(|this| {
Ok(e::write_closure_type(ecx, this, closure_type))
});
}
fn emit_method_origin<'b>(&mut self,
ecx: &e::EncodeContext<'b, 'tcx>,
method_origin: &ty::MethodOrigin<'tcx>)
{
use serialize::Encoder;
self.emit_enum("MethodOrigin", |this| {
match *method_origin {
ty::MethodStatic(def_id) => {
this.emit_enum_variant("MethodStatic", 0, 1, |this| {
Ok(this.emit_def_id(def_id))
})
}
ty::MethodStaticClosure(def_id) => {
this.emit_enum_variant("MethodStaticClosure", 1, 1, |this| {
Ok(this.emit_def_id(def_id))
})
}
ty::MethodTypeParam(ref p) => {
this.emit_enum_variant("MethodTypeParam", 2, 1, |this| {
this.emit_struct("MethodParam", 2, |this| {
try!(this.emit_struct_field("trait_ref", 0, |this| {
Ok(this.emit_trait_ref(ecx, &p.trait_ref))
}));
try!(this.emit_struct_field("method_num", 0, |this| {
this.emit_uint(p.method_num)
}));
try!(this.emit_struct_field("impl_def_id", 0, |this| {
this.emit_option(|this| {
match p.impl_def_id {
None => this.emit_option_none(),
Some(did) => this.emit_option_some(|this| {
Ok(this.emit_def_id(did))
})
}
})
}));
Ok(())
})
})
}
ty::MethodTraitObject(ref o) => {
this.emit_enum_variant("MethodTraitObject", 3, 1, |this| {
this.emit_struct("MethodObject", 2, |this| {
try!(this.emit_struct_field("trait_ref", 0, |this| {
Ok(this.emit_trait_ref(ecx, &o.trait_ref))
}));
try!(this.emit_struct_field("object_trait_id", 0, |this| {
Ok(this.emit_def_id(o.object_trait_id))
}));
try!(this.emit_struct_field("method_num", 0, |this| {
this.emit_uint(o.method_num)
}));
try!(this.emit_struct_field("vtable_index", 0, |this| {
this.emit_uint(o.vtable_index)
}));
Ok(())
})
})
}
}
});
}
fn emit_ty<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, ty: Ty<'tcx>) {
self.emit_opaque(|this| Ok(e::write_type(ecx, this, ty)));
}
fn emit_tys<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, tys: &[Ty<'tcx>]) {
self.emit_from_vec(tys, |this, ty| Ok(this.emit_ty(ecx, *ty)));
}
fn emit_trait_ref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
trait_ref: &ty::TraitRef<'tcx>) {
self.emit_opaque(|this| Ok(e::write_trait_ref(ecx, this, trait_ref)));
}
fn emit_type_param_def<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
type_param_def: &ty::TypeParameterDef<'tcx>) {
self.emit_opaque(|this| {
Ok(tyencode::enc_type_param_def(this,
&ecx.ty_str_ctxt(),
type_param_def))
});
}
fn emit_predicate<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
predicate: &ty::Predicate<'tcx>) {
self.emit_opaque(|this| {
Ok(tyencode::enc_predicate(this,
&ecx.ty_str_ctxt(),
predicate))
});
}
fn emit_type_scheme<'b>(&mut self,
ecx: &e::EncodeContext<'b, 'tcx>,
type_scheme: ty::TypeScheme<'tcx>) {
use serialize::Encoder;
self.emit_struct("TypeScheme", 2, |this| {
this.emit_struct_field("generics", 0, |this| {
this.emit_struct("Generics", 2, |this| {
this.emit_struct_field("types", 0, |this| {
Ok(encode_vec_per_param_space(
this, &type_scheme.generics.types,
|this, def| this.emit_type_param_def(ecx, def)))
});
this.emit_struct_field("regions", 1, |this| {
Ok(encode_vec_per_param_space(
this, &type_scheme.generics.regions,
|this, def| def.encode(this).unwrap()))
})
})
});
this.emit_struct_field("ty", 1, |this| {
Ok(this.emit_ty(ecx, type_scheme.ty))
})
});
}
fn emit_existential_bounds<'b>(&mut self, ecx: &e::EncodeContext<'b,'tcx>,
bounds: &ty::ExistentialBounds<'tcx>) {
self.emit_opaque(|this| Ok(tyencode::enc_existential_bounds(this,
&ecx.ty_str_ctxt(),
bounds)));
}
fn emit_builtin_bounds(&mut self, ecx: &e::EncodeContext, bounds: &ty::BuiltinBounds) {
self.emit_opaque(|this| Ok(tyencode::enc_builtin_bounds(this,
&ecx.ty_str_ctxt(),
bounds)));
}
fn emit_substs<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
substs: &subst::Substs<'tcx>) {
self.emit_opaque(|this| Ok(tyencode::enc_substs(this,
&ecx.ty_str_ctxt(),
substs)));
}
fn emit_auto_adjustment<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
adj: &ty::AutoAdjustment<'tcx>) {
use serialize::Encoder;
self.emit_enum("AutoAdjustment", |this| {
match *adj {
ty::AdjustReifyFnPointer=> {
this.emit_enum_variant("AdjustReifyFnPointer", 1, 0, |_| Ok(()))
}
ty::AdjustUnsafeFnPointer => {
this.emit_enum_variant("AdjustUnsafeFnPointer", 2, 0, |_| {
Ok(())
})
}
ty::AdjustDerefRef(ref auto_deref_ref) => {
this.emit_enum_variant("AdjustDerefRef", 3, 2, |this| {
this.emit_enum_variant_arg(0,
|this| Ok(this.emit_auto_deref_ref(ecx, auto_deref_ref)))
})
}
}
});
}
fn emit_autoref<'b>(&mut self, autoref: &ty::AutoRef<'tcx>) {
use serialize::Encoder;
self.emit_enum("AutoRef", |this| {
match autoref {
&ty::AutoPtr(r, m) => {
this.emit_enum_variant("AutoPtr", 0, 2, |this| {
this.emit_enum_variant_arg(0, |this| r.encode(this));
this.emit_enum_variant_arg(1, |this| m.encode(this))
})
}
&ty::AutoUnsafe(m) => {
this.emit_enum_variant("AutoUnsafe", 1, 1, |this| {
this.emit_enum_variant_arg(0, |this| m.encode(this))
})
}
}
});
}
fn emit_auto_deref_ref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
auto_deref_ref: &ty::AutoDerefRef<'tcx>) {
use serialize::Encoder;
self.emit_struct("AutoDerefRef", 2, |this| {
this.emit_struct_field("autoderefs", 0, |this| auto_deref_ref.autoderefs.encode(this));
this.emit_struct_field("autoref", 1, |this| {
this.emit_option(|this| {
match auto_deref_ref.autoref {
None => this.emit_option_none(),
Some(ref a) => this.emit_option_some(|this| Ok(this.emit_autoref(a))),
}
})
});
this.emit_struct_field("unsize", 2, |this| {
this.emit_option(|this| {
match auto_deref_ref.unsize {
None => this.emit_option_none(),
Some(target) => this.emit_option_some(|this| {
Ok(this.emit_ty(ecx, target))
})
}
})
})
});
}
}
trait write_tag_and_id {
fn tag<F>(&mut self, tag_id: c::astencode_tag, f: F) where F: FnOnce(&mut Self);
fn id(&mut self, id: ast::NodeId);
}
impl<'a> write_tag_and_id for Encoder<'a> {
fn tag<F>(&mut self,
tag_id: c::astencode_tag,
f: F) where
F: FnOnce(&mut Encoder<'a>),
{
self.start_tag(tag_id as usize);
f(self);
self.end_tag();
}
fn id(&mut self, id: ast::NodeId) {
id.encode(self).unwrap();
}
}
struct SideTableEncodingIdVisitor<'a, 'b:'a, 'c:'a, 'tcx:'c> {
ecx: &'a e::EncodeContext<'c, 'tcx>,
rbml_w: &'a mut Encoder<'b>,
}
impl<'a, 'b, 'c, 'tcx> ast_util::IdVisitingOperation for
SideTableEncodingIdVisitor<'a, 'b, 'c, 'tcx> {
fn visit_id(&mut self, id: ast::NodeId) {
encode_side_tables_for_id(self.ecx, self.rbml_w, id)
}
}
fn encode_side_tables_for_ii(ecx: &e::EncodeContext,
rbml_w: &mut Encoder,
ii: &ast::InlinedItem) {
rbml_w.start_tag(c::tag_table as usize);
ast_util::visit_ids_for_inlined_item(ii, &mut SideTableEncodingIdVisitor {
ecx: ecx,
rbml_w: rbml_w
});
rbml_w.end_tag();
}
fn encode_side_tables_for_id(ecx: &e::EncodeContext,
rbml_w: &mut Encoder,
id: ast::NodeId) {
let tcx = ecx.tcx;
debug!("Encoding side tables for id {}", id);
if let Some(def) = tcx.def_map.borrow().get(&id).map(|d| d.full_def()) {
rbml_w.tag(c::tag_table_def, |rbml_w| {
rbml_w.id(id);
def.encode(rbml_w).unwrap();
})
}
if let Some(ty) = tcx.node_types().get(&id) {
rbml_w.tag(c::tag_table_node_type, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_ty(ecx, *ty);
})
}
if let Some(item_substs) = tcx.item_substs.borrow().get(&id) {
rbml_w.tag(c::tag_table_item_subst, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_substs(ecx, &item_substs.substs);
})
}
if let Some(fv) = tcx.freevars.borrow().get(&id) {
rbml_w.tag(c::tag_table_freevars, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_from_vec(fv, |rbml_w, fv_entry| {
Ok(encode_freevar_entry(rbml_w, fv_entry))
});
});
for freevar in fv {
rbml_w.tag(c::tag_table_upvar_capture_map, |rbml_w| {
rbml_w.id(id);
let var_id = freevar.def.def_id().node;
let upvar_id = ty::UpvarId {
var_id: var_id,
closure_expr_id: id
};
let upvar_capture = tcx.upvar_capture_map.borrow().get(&upvar_id).unwrap().clone();
var_id.encode(rbml_w);
upvar_capture.encode(rbml_w);
})
}
}
let lid = ast::DefId { krate: ast::LOCAL_CRATE, node: id };
if let Some(type_scheme) = tcx.tcache.borrow().get(&lid) {
rbml_w.tag(c::tag_table_tcache, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_type_scheme(ecx, type_scheme.clone());
})
}
if let Some(type_param_def) = tcx.ty_param_defs.borrow().get(&id) {
rbml_w.tag(c::tag_table_param_defs, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_type_param_def(ecx, type_param_def)
})
}
let method_call = MethodCall::expr(id);
if let Some(method) = tcx.method_map.borrow().get(&method_call) {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
encode_method_callee(ecx, rbml_w, method_call.autoderef, method)
})
}
if let Some(trait_ref) = tcx.object_cast_map.borrow().get(&id) {
rbml_w.tag(c::tag_table_object_cast_map, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_trait_ref(ecx, &trait_ref.0);
})
}
if let Some(adjustment) = tcx.adjustments.borrow().get(&id) {
match *adjustment {
ty::AdjustDerefRef(ref adj) => {
for autoderef in 0..adj.autoderefs {
let method_call = MethodCall::autoderef(id, autoderef as u32);
if let Some(method) = tcx.method_map.borrow().get(&method_call) {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
encode_method_callee(ecx, rbml_w,
method_call.autoderef, method)
})
}
}
}
_ => {}
}
rbml_w.tag(c::tag_table_adjustments, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_auto_adjustment(ecx, adjustment);
})
}
if let Some(closure_type) = tcx.closure_tys.borrow().get(&ast_util::local_def(id)) {
rbml_w.tag(c::tag_table_closure_tys, |rbml_w| {
rbml_w.id(id);
rbml_w.emit_closure_type(ecx, closure_type);
})
}
if let Some(closure_kind) = tcx.closure_kinds.borrow().get(&ast_util::local_def(id)) {
rbml_w.tag(c::tag_table_closure_kinds, |rbml_w| {
rbml_w.id(id);
encode_closure_kind(rbml_w, *closure_kind)
})
}
for &qualif in tcx.const_qualif_map.borrow().get(&id).iter() {
rbml_w.tag(c::tag_table_const_qualif, |rbml_w| {
rbml_w.id(id);
qualif.encode(rbml_w).unwrap()
})
}
}
trait doc_decoder_helpers {
fn as_int(&self) -> isize;
fn opt_child(&self, tag: c::astencode_tag) -> Option<Self>;
}
impl<'a> doc_decoder_helpers for rbml::Doc<'a> {
fn as_int(&self) -> isize { reader::doc_as_u64(*self) as isize }
fn opt_child(&self, tag: c::astencode_tag) -> Option<rbml::Doc<'a>> {
reader::maybe_get_doc(*self, tag as usize)
}
}
trait rbml_decoder_decoder_helpers<'tcx> {
fn read_method_origin<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::MethodOrigin<'tcx>;
fn read_ty<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Ty<'tcx>;
fn read_tys<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Vec<Ty<'tcx>>;
fn read_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::TraitRef<'tcx>;
fn read_poly_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::PolyTraitRef<'tcx>;
fn read_type_param_def<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::TypeParameterDef<'tcx>;
fn read_predicate<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::Predicate<'tcx>;
fn read_type_scheme<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::TypeScheme<'tcx>;
fn read_existential_bounds<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::ExistentialBounds<'tcx>;
fn read_substs<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> subst::Substs<'tcx>;
fn read_auto_adjustment<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoAdjustment<'tcx>;
fn read_closure_kind<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::ClosureKind;
fn read_closure_ty<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::ClosureTy<'tcx>;
fn read_auto_deref_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoDerefRef<'tcx>;
fn read_autoref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoRef<'tcx>;
fn convert_def_id(&mut self,
dcx: &DecodeContext,
source: DefIdSource,
did: ast::DefId)
-> ast::DefId;
// Versions of the type reading functions that don't need the full
// DecodeContext.
fn read_ty_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata) -> Ty<'tcx>;
fn read_tys_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata) -> Vec<Ty<'tcx>>;
fn read_substs_nodcx(&mut self, tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> subst::Substs<'tcx>;
}
impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> {
fn read_ty_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata) -> Ty<'tcx> {
self.read_opaque(|_, doc| {
Ok(tydecode::parse_ty_data(
doc.data,
cdata.cnum,
doc.start,
tcx,
|_, id| decoder::translate_def_id(cdata, id)))
}).unwrap()
}
fn read_tys_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata) -> Vec<Ty<'tcx>> {
self.read_to_vec(|this| Ok(this.read_ty_nodcx(tcx, cdata)) )
.unwrap()
.into_iter()
.collect()
}
fn read_substs_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> subst::Substs<'tcx>
{
self.read_opaque(|_, doc| {
Ok(tydecode::parse_substs_data(
doc.data,
cdata.cnum,
doc.start,
tcx,
|_, id| decoder::translate_def_id(cdata, id)))
}).unwrap()
}
fn read_method_origin<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::MethodOrigin<'tcx>
{
self.read_enum("MethodOrigin", |this| {
let variants = &["MethodStatic", "MethodStaticClosure",
"MethodTypeParam", "MethodTraitObject"];
this.read_enum_variant(variants, |this, i| {
Ok(match i {
0 => {
let def_id = this.read_def_id(dcx);
ty::MethodStatic(def_id)
}
1 => {
let def_id = this.read_def_id(dcx);
ty::MethodStaticClosure(def_id)
}
2 => {
this.read_struct("MethodTypeParam", 2, |this| {
Ok(ty::MethodTypeParam(
ty::MethodParam {
trait_ref: {
this.read_struct_field("trait_ref", 0, |this| {
Ok(this.read_trait_ref(dcx))
}).unwrap()
},
method_num: {
this.read_struct_field("method_num", 1, |this| {
this.read_uint()
}).unwrap()
},
impl_def_id: {
this.read_struct_field("impl_def_id", 2, |this| {
this.read_option(|this, b| {
if b {
Ok(Some(this.read_def_id(dcx)))
} else {
Ok(None)
}
})
}).unwrap()
}
}))
}).unwrap()
}
3 => {
this.read_struct("MethodTraitObject", 2, |this| {
Ok(ty::MethodTraitObject(
ty::MethodObject {
trait_ref: {
this.read_struct_field("trait_ref", 0, |this| {
Ok(this.read_trait_ref(dcx))
}).unwrap()
},
object_trait_id: {
this.read_struct_field("object_trait_id", 1, |this| {
Ok(this.read_def_id(dcx))
}).unwrap()
},
method_num: {
this.read_struct_field("method_num", 2, |this| {
this.read_uint()
}).unwrap()
},
vtable_index: {
this.read_struct_field("vtable_index", 3, |this| {
this.read_uint()
}).unwrap()
},
}))
}).unwrap()
}
_ => panic!("..")
})
})
}).unwrap()
}
fn read_ty<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) -> Ty<'tcx> {
// Note: regions types embed local node ids. In principle, we
// should translate these node ids into the new decode
// context. However, we do not bother, because region types
// are not used during trans.
return self.read_opaque(|this, doc| {
debug!("read_ty({})", type_string(doc));
let ty = tydecode::parse_ty_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a));<|fim▁hole|>
fn type_string(doc: rbml::Doc) -> String {
let mut str = String::new();
for i in doc.start..doc.end {
str.push(doc.data[i] as char);
}
str
}
}
fn read_tys<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> Vec<Ty<'tcx>> {
self.read_to_vec(|this| Ok(this.read_ty(dcx))).unwrap().into_iter().collect()
}
fn read_trait_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::TraitRef<'tcx> {
self.read_opaque(|this, doc| {
let ty = tydecode::parse_trait_ref_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a));
Ok(ty)
}).unwrap()
}
fn read_poly_trait_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::PolyTraitRef<'tcx> {
ty::Binder(self.read_opaque(|this, doc| {
let ty = tydecode::parse_trait_ref_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a));
Ok(ty)
}).unwrap())
}
fn read_type_param_def<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::TypeParameterDef<'tcx> {
self.read_opaque(|this, doc| {
Ok(tydecode::parse_type_param_def_data(
doc.data,
doc.start,
dcx.cdata.cnum,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_predicate<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::Predicate<'tcx>
{
self.read_opaque(|this, doc| {
Ok(tydecode::parse_predicate_data(doc.data, doc.start, dcx.cdata.cnum, dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_type_scheme<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::TypeScheme<'tcx> {
self.read_struct("TypeScheme", 3, |this| {
Ok(ty::TypeScheme {
generics: this.read_struct_field("generics", 0, |this| {
this.read_struct("Generics", 2, |this| {
Ok(ty::Generics {
types:
this.read_struct_field("types", 0, |this| {
Ok(this.read_vec_per_param_space(
|this| this.read_type_param_def(dcx)))
}).unwrap(),
regions:
this.read_struct_field("regions", 1, |this| {
Ok(this.read_vec_per_param_space(
|this| Decodable::decode(this).unwrap()))
}).unwrap(),
})
})
}).unwrap(),
ty: this.read_struct_field("ty", 1, |this| {
Ok(this.read_ty(dcx))
}).unwrap()
})
}).unwrap()
}
fn read_existential_bounds<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::ExistentialBounds<'tcx>
{
self.read_opaque(|this, doc| {
Ok(tydecode::parse_existential_bounds_data(doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_substs<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> subst::Substs<'tcx> {
self.read_opaque(|this, doc| {
Ok(tydecode::parse_substs_data(doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_auto_adjustment<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::AutoAdjustment<'tcx> {
self.read_enum("AutoAdjustment", |this| {
let variants = ["AdjustReifyFnPointer", "AdjustUnsafeFnPointer", "AdjustDerefRef"];
this.read_enum_variant(&variants, |this, i| {
Ok(match i {
1 => ty::AdjustReifyFnPointer,
2 => ty::AdjustUnsafeFnPointer,
3 => {
let auto_deref_ref: ty::AutoDerefRef =
this.read_enum_variant_arg(0,
|this| Ok(this.read_auto_deref_ref(dcx))).unwrap();
ty::AdjustDerefRef(auto_deref_ref)
}
_ => panic!("bad enum variant for ty::AutoAdjustment")
})
})
}).unwrap()
}
fn read_auto_deref_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::AutoDerefRef<'tcx> {
self.read_struct("AutoDerefRef", 2, |this| {
Ok(ty::AutoDerefRef {
autoderefs: this.read_struct_field("autoderefs", 0, |this| {
Decodable::decode(this)
}).unwrap(),
autoref: this.read_struct_field("autoref", 1, |this| {
this.read_option(|this, b| {
if b {
Ok(Some(this.read_autoref(dcx)))
} else {
Ok(None)
}
})
}).unwrap(),
unsize: this.read_struct_field("unsize", 2, |this| {
this.read_option(|this, b| {
if b {
Ok(Some(this.read_ty(dcx)))
} else {
Ok(None)
}
})
}).unwrap(),
})
}).unwrap()
}
fn read_autoref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::AutoRef<'tcx> {
self.read_enum("AutoRef", |this| {
let variants = ["AutoPtr", "AutoUnsafe"];
this.read_enum_variant(&variants, |this, i| {
Ok(match i {
0 => {
let r: ty::Region =
this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
let m: ast::Mutability =
this.read_enum_variant_arg(1, |this| Decodable::decode(this)).unwrap();
ty::AutoPtr(dcx.tcx.mk_region(r.tr(dcx)), m)
}
1 => {
let m: ast::Mutability =
this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
ty::AutoUnsafe(m)
}
_ => panic!("bad enum variant for ty::AutoRef")
})
})
}).unwrap()
}
fn read_closure_kind<'b, 'c>(&mut self, _dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::ClosureKind
{
Decodable::decode(self).unwrap()
}
fn read_closure_ty<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::ClosureTy<'tcx>
{
self.read_opaque(|this, doc| {
Ok(tydecode::parse_ty_closure_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
/// Converts a def-id that appears in a type. The correct
/// translation will depend on what kind of def-id this is.
/// This is a subtle point: type definitions are not
/// inlined into the current crate, so if the def-id names
/// a nominal type or type alias, then it should be
/// translated to refer to the source crate.
///
/// However, *type parameters* are cloned along with the function
/// they are attached to. So we should translate those def-ids
/// to refer to the new, cloned copy of the type parameter.
/// We only see references to free type parameters in the body of
/// an inlined function. In such cases, we need the def-id to
/// be a local id so that the TypeContents code is able to lookup
/// the relevant info in the ty_param_defs table.
///
/// *Region parameters*, unfortunately, are another kettle of fish.
/// In such cases, def_id's can appear in types to distinguish
/// shadowed bound regions and so forth. It doesn't actually
/// matter so much what we do to these, since regions are erased
/// at trans time, but it's good to keep them consistent just in
/// case. We translate them with `tr_def_id()` which will map
/// the crate numbers back to the original source crate.
///
/// Unboxed closures are cloned along with the function being
/// inlined, and all side tables use interned node IDs, so we
/// translate their def IDs accordingly.
///
/// It'd be really nice to refactor the type repr to not include
/// def-ids so that all these distinctions were unnecessary.
fn convert_def_id(&mut self,
dcx: &DecodeContext,
source: tydecode::DefIdSource,
did: ast::DefId)
-> ast::DefId {
let r = match source {
NominalType | TypeWithId | RegionParameter => dcx.tr_def_id(did),
TypeParameter | ClosureSource => dcx.tr_intern_def_id(did)
};
debug!("convert_def_id(source={:?}, did={:?})={:?}", source, did, r);
return r;
}
}
fn decode_side_tables(dcx: &DecodeContext,
ast_doc: rbml::Doc) {
let tbl_doc = ast_doc.get(c::tag_table as usize);
reader::docs(tbl_doc, |tag, entry_doc| {
let mut entry_dsr = reader::Decoder::new(entry_doc);
let id0: ast::NodeId = Decodable::decode(&mut entry_dsr).unwrap();
let id = dcx.tr_id(id0);
debug!(">> Side table document with tag 0x{:x} \
found for id {} (orig {})",
tag, id, id0);
let tag = tag as u32;
let decoded_tag: Option<c::astencode_tag> = c::astencode_tag::from_u32(tag);
match decoded_tag {
None => {
dcx.tcx.sess.bug(
&format!("unknown tag found in side tables: {:x}",
tag));
}
Some(value) => {
let val_dsr = &mut entry_dsr;
match value {
c::tag_table_def => {
let def = decode_def(dcx, val_dsr);
dcx.tcx.def_map.borrow_mut().insert(id, def::PathResolution {
base_def: def,
// This doesn't matter cross-crate.
last_private: LastMod(AllPublic),
depth: 0
});
}
c::tag_table_node_type => {
let ty = val_dsr.read_ty(dcx);
debug!("inserting ty for node {}: {}",
id, ty_to_string(dcx.tcx, ty));
dcx.tcx.node_type_insert(id, ty);
}
c::tag_table_item_subst => {
let item_substs = ty::ItemSubsts {
substs: val_dsr.read_substs(dcx)
};
dcx.tcx.item_substs.borrow_mut().insert(
id, item_substs);
}
c::tag_table_freevars => {
let fv_info = val_dsr.read_to_vec(|val_dsr| {
Ok(val_dsr.read_freevar_entry(dcx))
}).unwrap().into_iter().collect();
dcx.tcx.freevars.borrow_mut().insert(id, fv_info);
}
c::tag_table_upvar_capture_map => {
let var_id: ast::NodeId = Decodable::decode(val_dsr).unwrap();
let upvar_id = ty::UpvarId {
var_id: dcx.tr_id(var_id),
closure_expr_id: id
};
let ub: ty::UpvarCapture = Decodable::decode(val_dsr).unwrap();
dcx.tcx.upvar_capture_map.borrow_mut().insert(upvar_id, ub.tr(dcx));
}
c::tag_table_tcache => {
let type_scheme = val_dsr.read_type_scheme(dcx);
let lid = ast::DefId { krate: ast::LOCAL_CRATE, node: id };
dcx.tcx.tcache.borrow_mut().insert(lid, type_scheme);
}
c::tag_table_param_defs => {
let bounds = val_dsr.read_type_param_def(dcx);
dcx.tcx.ty_param_defs.borrow_mut().insert(id, bounds);
}
c::tag_table_method_map => {
let (autoderef, method) = val_dsr.read_method_callee(dcx);
let method_call = MethodCall {
expr_id: id,
autoderef: autoderef
};
dcx.tcx.method_map.borrow_mut().insert(method_call, method);
}
c::tag_table_object_cast_map => {
let trait_ref = val_dsr.read_poly_trait_ref(dcx);
dcx.tcx.object_cast_map.borrow_mut()
.insert(id, trait_ref);
}
c::tag_table_adjustments => {
let adj: ty::AutoAdjustment = val_dsr.read_auto_adjustment(dcx);
dcx.tcx.adjustments.borrow_mut().insert(id, adj);
}
c::tag_table_closure_tys => {
let closure_ty =
val_dsr.read_closure_ty(dcx);
dcx.tcx.closure_tys.borrow_mut().insert(ast_util::local_def(id),
closure_ty);
}
c::tag_table_closure_kinds => {
let closure_kind =
val_dsr.read_closure_kind(dcx);
dcx.tcx.closure_kinds.borrow_mut().insert(ast_util::local_def(id),
closure_kind);
}
c::tag_table_const_qualif => {
let qualif: ConstQualif = Decodable::decode(val_dsr).unwrap();
dcx.tcx.const_qualif_map.borrow_mut().insert(id, qualif);
}
_ => {
dcx.tcx.sess.bug(
&format!("unknown tag found in side tables: {:x}",
tag));
}
}
}
}
debug!(">< Side table doc loaded");
true
});
}
// ______________________________________________________________________
// Testing of astencode_gen
#[cfg(test)]
fn encode_item_ast(rbml_w: &mut Encoder, item: &ast::Item) {
rbml_w.start_tag(c::tag_tree as usize);
(*item).encode(rbml_w);
rbml_w.end_tag();
}
#[cfg(test)]
fn decode_item_ast(par_doc: rbml::Doc) -> ast::Item {
let chi_doc = par_doc.get(c::tag_tree as usize);
let mut d = reader::Decoder::new(chi_doc);
Decodable::decode(&mut d).unwrap()
}
#[cfg(test)]
trait FakeExtCtxt {
fn call_site(&self) -> codemap::Span;
fn cfg(&self) -> ast::CrateConfig;
fn ident_of(&self, st: &str) -> ast::Ident;
fn name_of(&self, st: &str) -> ast::Name;
fn parse_sess(&self) -> &parse::ParseSess;
}
#[cfg(test)]
impl FakeExtCtxt for parse::ParseSess {
fn call_site(&self) -> codemap::Span {
codemap::Span {
lo: codemap::BytePos(0),
hi: codemap::BytePos(0),
expn_id: codemap::NO_EXPANSION,
}
}
fn cfg(&self) -> ast::CrateConfig { Vec::new() }
fn ident_of(&self, st: &str) -> ast::Ident {
parse::token::str_to_ident(st)
}
fn name_of(&self, st: &str) -> ast::Name {
parse::token::intern(st)
}
fn parse_sess(&self) -> &parse::ParseSess { self }
}
#[cfg(test)]
fn mk_ctxt() -> parse::ParseSess {
parse::new_parse_sess()
}
#[cfg(test)]
fn roundtrip(in_item: Option<P<ast::Item>>) {
let in_item = in_item.unwrap();
let mut wr = Cursor::new(Vec::new());
encode_item_ast(&mut Encoder::new(&mut wr), &*in_item);
let rbml_doc = rbml::Doc::new(wr.get_ref());
let out_item = decode_item_ast(rbml_doc);
assert!(*in_item == out_item);
}
#[test]
fn test_basic() {
let cx = mk_ctxt();
roundtrip(quote_item!(&cx,
fn foo() {}
));
}
#[test]
fn test_smalltalk() {
let cx = mk_ctxt();
roundtrip(quote_item!(&cx,
fn foo() -> isize { 3 + 4 } // first smalltalk program ever executed.
));
}
#[test]
fn test_more() {
let cx = mk_ctxt();
roundtrip(quote_item!(&cx,
fn foo(x: usize, y: usize) -> usize {
let z = x + y;
return z;
}
));
}
#[test]
fn test_simplification() {
let cx = mk_ctxt();
let item = quote_item!(&cx,
fn new_int_alist<B>() -> alist<isize, B> {
fn eq_int(a: isize, b: isize) -> bool { a == b }
return alist {eq_fn: eq_int, data: Vec::new()};
}
).unwrap();
let item_in = e::IIItemRef(&*item);
let item_out = simplify_ast(item_in);
let item_exp = ast::IIItem(quote_item!(&cx,
fn new_int_alist<B>() -> alist<isize, B> {
return alist {eq_fn: eq_int, data: Vec::new()};
}
).unwrap());
match (item_out, item_exp) {
(ast::IIItem(item_out), ast::IIItem(item_exp)) => {
assert!(pprust::item_to_string(&*item_out) ==
pprust::item_to_string(&*item_exp));
}
_ => panic!()
}
}<|fim▁end|> |
Ok(ty)
}).unwrap(); |
<|file_name|>popover.js<|end_file_name|><|fim▁begin|>$(function () {
'use strict';
QUnit.module('popover plugin')
QUnit.test('should be defined on jquery object', function (assert) {
assert.expect(1)
assert.ok($(document.body).popover, 'popover method is defined')
})
QUnit.module('popover', {
beforeEach: function () {
// Run all tests in noConflict mode -- it's the only way to ensure that the plugin works in noConflict mode
$.fn.bootstrapPopover = $.fn.popover.noConflict()
},
afterEach: function () {
$.fn.popover = $.fn.bootstrapPopover
delete $.fn.bootstrapPopover
}
})
QUnit.test('should provide no conflict', function (assert) {
assert.expect(1)
assert.strictEqual($.fn.popover, undefined, 'popover was set back to undefined (org value)')
})
QUnit.test('should return jquery collection containing the element', function (assert) {
assert.expect(2)
var $el = $('<div/>')
var $popover = $el.bootstrapPopover()
assert.ok($popover instanceof $, 'returns jquery collection')
assert.strictEqual($popover[0], $el[0], 'collection contains element')
})
QUnit.test('should render popover element', function (assert) {
assert.expect(2)
var $popover = $('<a href="#" title="mdo" data-content="https://twitter.com/mdo">@mdo</a>')
.appendTo('#qunit-fixture')
.bootstrapPopover('show')
assert.notEqual($('.popover').length, 0, 'popover was inserted')
$popover.bootstrapPopover('hide')
assert.strictEqual($('.popover').length, 0, 'popover removed')
})
QUnit.test('should store popover instance in popover data object', function (assert) {
assert.expect(1)
var $popover = $('<a href="#" title="mdo" data-content="https://twitter.com/mdo">@mdo</a>').bootstrapPopover()
assert.ok($popover.data('bs.popover'), 'popover instance exists')
})
QUnit.test('should store popover trigger in popover instance data object', function (assert) {
assert.expect(1)
var $popover = $('<a href="#" title="ResentedHook">@ResentedHook</a>')
.appendTo('#qunit-fixture')
.bootstrapPopover()
$popover.bootstrapPopover('show')
assert.ok($('.popover').data('bs.popover'), 'popover trigger stored in instance data')
})
QUnit.test('should get title and content from options', function (assert) {
assert.expect(4)
var $popover = $('<a href="#">@fat</a>')
.appendTo('#qunit-fixture')
.bootstrapPopover({
title: function () {
return '@fat'
},
content: function () {
return 'loves writing tests (╯°□°)╯︵ ┻━┻'
}
})
$popover.bootstrapPopover('show')
assert.notEqual($('.popover').length, 0, 'popover was inserted')
assert.strictEqual($('.popover .popover-title').text(), '@fat', 'title correctly inserted')
assert.strictEqual($('.popover .popover-content').text(), 'loves writing tests (╯°□°)╯︵ ┻━┻', 'content correctly inserted')
$popover.bootstrapPopover('hide')
assert.strictEqual($('.popover').length, 0, 'popover was removed')
})
QUnit.test('should not duplicate HTML object', function (assert) {
assert.expect(6)
var $div = $('<div/>').html('loves writing tests (╯°□°)╯︵ ┻━┻')
var $popover = $('<a href="#">@fat</a>')
.appendTo('#qunit-fixture')
.bootstrapPopover({
content: function () {
return $div
}
})
$popover.bootstrapPopover('show')
assert.notEqual($('.popover').length, 0, 'popover was inserted')
assert.equal($('.popover .popover-content').html(), $div, 'content correctly inserted')<|fim▁hole|>
$popover.bootstrapPopover('show')
assert.notEqual($('.popover').length, 0, 'popover was inserted')
assert.equal($('.popover .popover-content').html(), $div, 'content correctly inserted')
$popover.bootstrapPopover('hide')
assert.strictEqual($('.popover').length, 0, 'popover was removed')
})
QUnit.test('should get title and content from attributes', function (assert) {
assert.expect(4)
var $popover = $('<a href="#" title="@mdo" data-content="loves data attributes (づ。◕‿‿◕。)づ ︵ ┻━┻" >@mdo</a>')
.appendTo('#qunit-fixture')
.bootstrapPopover()
.bootstrapPopover('show')
assert.notEqual($('.popover').length, 0, 'popover was inserted')
assert.strictEqual($('.popover .popover-title').text(), '@mdo', 'title correctly inserted')
assert.strictEqual($('.popover .popover-content').text(), 'loves data attributes (づ。◕‿‿◕。)づ ︵ ┻━┻', 'content correctly inserted')
$popover.bootstrapPopover('hide')
assert.strictEqual($('.popover').length, 0, 'popover was removed')
})
QUnit.test('should get title and content from attributes ignoring options passed via js', function (assert) {
assert.expect(4)
var $popover = $('<a href="#" title="@mdo" data-content="loves data attributes (づ。◕‿‿◕。)づ ︵ ┻━┻" >@mdo</a>')
.appendTo('#qunit-fixture')
.bootstrapPopover({
title: 'ignored title option',
content: 'ignored content option'
})
.bootstrapPopover('show')
assert.notEqual($('.popover').length, 0, 'popover was inserted')
assert.strictEqual($('.popover .popover-title').text(), '@mdo', 'title correctly inserted')
assert.strictEqual($('.popover .popover-content').text(), 'loves data attributes (づ。◕‿‿◕。)づ ︵ ┻━┻', 'content correctly inserted')
$popover.bootstrapPopover('hide')
assert.strictEqual($('.popover').length, 0, 'popover was removed')
})
QUnit.test('should respect custom template', function (assert) {
assert.expect(3)
var $popover = $('<a href="#">@fat</a>')
.appendTo('#qunit-fixture')
.bootstrapPopover({
title: 'Test',
content: 'Test',
template: '<div class="popover foobar"><div class="arrow"></div><div class="inner"><h3 class="title"/><div class="content"><p/></div></div></div>'
})
$popover.bootstrapPopover('show')
assert.notEqual($('.popover').length, 0, 'popover was inserted')
assert.ok($('.popover').hasClass('foobar'), 'custom class is present')
$popover.bootstrapPopover('hide')
assert.strictEqual($('.popover').length, 0, 'popover was removed')
})
QUnit.test('should destroy popover', function (assert) {
assert.expect(7)
var $popover = $('<div/>')
.bootstrapPopover({
trigger: 'hover'
})
.on('click.foo', $.noop)
assert.ok($popover.data('bs.popover'), 'popover has data')
assert.ok($._data($popover[0], 'events').mouseover && $._data($popover[0], 'events').mouseout, 'popover has hover event')
assert.strictEqual($._data($popover[0], 'events').click[0].namespace, 'foo', 'popover has extra click.foo event')
$popover.bootstrapPopover('show')
$popover.bootstrapPopover('destroy')
assert.ok(!$popover.hasClass('in'), 'popover is hidden')
assert.ok(!$popover.data('popover'), 'popover does not have data')
assert.strictEqual($._data($popover[0], 'events').click[0].namespace, 'foo', 'popover still has click.foo')
assert.ok(!$._data($popover[0], 'events').mouseover && !$._data($popover[0], 'events').mouseout, 'popover does not have any events')
})
QUnit.test('should render popover element using delegated selector', function (assert) {
assert.expect(2)
var $div = $('<div><a href="#" title="mdo" data-content="https://twitter.com/mdo">@mdo</a></div>')
.appendTo('#qunit-fixture')
.bootstrapPopover({
selector: 'a',
trigger: 'click'
})
$div.find('a').trigger('click')
assert.notEqual($('.popover').length, 0, 'popover was inserted')
$div.find('a').trigger('click')
assert.strictEqual($('.popover').length, 0, 'popover was removed')
})
QUnit.test('should detach popover content rather than removing it so that event handlers are left intact', function (assert) {
assert.expect(1)
var $content = $('<div class="content-with-handler"><a class="btn btn-warning">Button with event handler</a></div>').appendTo('#qunit-fixture')
var handlerCalled = false
$('.content-with-handler .btn').on('click', function () {
handlerCalled = true
})
var $div = $('<div><a href="#">Show popover</a></div>')
.appendTo('#qunit-fixture')
.bootstrapPopover({
html: true,
trigger: 'manual',
container: 'body',
content: function () {
return $content
}
})
var done = assert.async()
$div
.one('shown.bs.popover', function () {
$div
.one('hidden.bs.popover', function () {
$div
.one('shown.bs.popover', function () {
$('.content-with-handler .btn').trigger('click')
$div.bootstrapPopover('destroy')
assert.ok(handlerCalled, 'content\'s event handler still present')
done()
})
.bootstrapPopover('show')
})
.bootstrapPopover('hide')
})
.bootstrapPopover('show')
})
QUnit.test('should throw an error when initializing popover on the document object without specifying a delegation selector', function (assert) {
assert.expect(1)
assert.throws(function () {
$(document).bootstrapPopover({title: 'What am I on?', content: 'My selector is missing'})
}, new Error('`selector` option must be specified when initializing popover on the window.document object!'))
})
QUnit.test('should do nothing when an attempt is made to hide an uninitialized popover', function (assert) {
assert.expect(1)
var $popover = $('<span data-toggle="popover" data-title="some title" data-content="some content">some text</span>')
.appendTo('#qunit-fixture')
.on('hidden.bs.popover shown.bs.popover', function () {
assert.ok(false, 'should not fire any popover events')
})
.bootstrapPopover('hide')
assert.strictEqual($popover.data('bs.popover'), undefined, 'should not initialize the popover')
})
QUnit.test('should throw an error when template contains multiple top-level elements', function (assert) {
assert.expect(1)
assert.throws(function () {
$('<span data-toggle="popover" data-title="some title" data-content="some content">some text</span>')
.appendTo('#qunit-fixture')
.bootstrapPopover({template: '<div>Foo</div><div>Bar</div>'})
.bootstrapPopover('show')
}, new Error('popover `template` option must consist of exactly 1 top-level element!'))
})
QUnit.test('should fire inserted event', function (assert) {
assert.expect(2)
var done = assert.async()
$('<a href="#">@Johann-S</a>')
.appendTo('#qunit-fixture')
.on('inserted.bs.popover', function () {
assert.notEqual($('.popover').length, 0, 'popover was inserted')
assert.ok(true, 'inserted event fired')
done()
})
.bootstrapPopover({
title: 'Test',
content: 'Test'
})
.bootstrapPopover('show')
})
})<|fim▁end|> |
$popover.bootstrapPopover('hide')
assert.strictEqual($('.popover').length, 0, 'popover was removed') |
<|file_name|>minwinbase.rs<|end_file_name|><|fim▁begin|>// Copyright © 2016-2017 winapi-rs developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms
//! This module defines the 32-Bit Windows Base APIs
use shared::basetsd::ULONG_PTR;
use shared::minwindef::{BOOL, BYTE, DWORD, FILETIME, HMODULE, LPVOID, MAX_PATH, UINT, ULONG, WORD};
use shared::ntstatus::{
STATUS_ACCESS_VIOLATION, STATUS_ARRAY_BOUNDS_EXCEEDED, STATUS_BREAKPOINT,
STATUS_CONTROL_C_EXIT, STATUS_DATATYPE_MISALIGNMENT, STATUS_FLOAT_DENORMAL_OPERAND,
STATUS_FLOAT_DIVIDE_BY_ZERO, STATUS_FLOAT_INEXACT_RESULT, STATUS_FLOAT_INVALID_OPERATION,
STATUS_FLOAT_OVERFLOW, STATUS_FLOAT_STACK_CHECK, STATUS_FLOAT_UNDERFLOW,
STATUS_GUARD_PAGE_VIOLATION, STATUS_ILLEGAL_INSTRUCTION, STATUS_INTEGER_DIVIDE_BY_ZERO,
STATUS_INTEGER_OVERFLOW, STATUS_INVALID_DISPOSITION, STATUS_INVALID_HANDLE,
STATUS_IN_PAGE_ERROR, STATUS_NONCONTINUABLE_EXCEPTION, STATUS_PENDING,
STATUS_POSSIBLE_DEADLOCK, STATUS_PRIVILEGED_INSTRUCTION, STATUS_SINGLE_STEP,
STATUS_STACK_OVERFLOW,
};
use um::winnt::{
CHAR, EXCEPTION_RECORD, HANDLE, LPSTR, LPWSTR, PCONTEXT, PRTL_CRITICAL_SECTION,
PRTL_CRITICAL_SECTION_DEBUG, PVOID, RTL_CRITICAL_SECTION, RTL_CRITICAL_SECTION_DEBUG, WCHAR,
};
//MoveMemory
//CopyMemory
//FillMemory
//ZeroMemory
STRUCT!{struct SECURITY_ATTRIBUTES {
nLength: DWORD,
lpSecurityDescriptor: LPVOID,
bInheritHandle: BOOL,
}}
pub type PSECURITY_ATTRIBUTES = *mut SECURITY_ATTRIBUTES;
pub type LPSECURITY_ATTRIBUTES = *mut SECURITY_ATTRIBUTES;
STRUCT!{struct OVERLAPPED_u_s {
Offset: DWORD,
OffsetHigh: DWORD,
}}
UNION!{union OVERLAPPED_u {
[u32; 2] [u64; 1],
s s_mut: OVERLAPPED_u_s,
Pointer Pointer_mut: PVOID,
}}
STRUCT!{struct OVERLAPPED {
Internal: ULONG_PTR,
InternalHigh: ULONG_PTR,
u: OVERLAPPED_u,
hEvent: HANDLE,
}}
pub type LPOVERLAPPED = *mut OVERLAPPED;
STRUCT!{struct OVERLAPPED_ENTRY {
lpCompletionKey: ULONG_PTR,
lpOverlapped: LPOVERLAPPED,
Internal: ULONG_PTR,
dwNumberOfBytesTransferred: DWORD,
}}
pub type LPOVERLAPPED_ENTRY = *mut OVERLAPPED_ENTRY;
STRUCT!{struct SYSTEMTIME {
wYear: WORD,
wMonth: WORD,
wDayOfWeek: WORD,
wDay: WORD,
wHour: WORD,
wMinute: WORD,
wSecond: WORD,
wMilliseconds: WORD,
}}
pub type PSYSTEMTIME = *mut SYSTEMTIME;
pub type LPSYSTEMTIME = *mut SYSTEMTIME;
STRUCT!{struct WIN32_FIND_DATAA {
dwFileAttributes: DWORD,
ftCreationTime: FILETIME,
ftLastAccessTime: FILETIME,
ftLastWriteTime: FILETIME,
nFileSizeHigh: DWORD,
nFileSizeLow: DWORD,
dwReserved0: DWORD,
dwReserved1: DWORD,
cFileName: [CHAR; MAX_PATH],
cAlternateFileName: [CHAR; 14],
}}
pub type PWIN32_FIND_DATAA = *mut WIN32_FIND_DATAA;
pub type LPWIN32_FIND_DATAA = *mut WIN32_FIND_DATAA;
STRUCT!{struct WIN32_FIND_DATAW {
dwFileAttributes: DWORD,
ftCreationTime: FILETIME,
ftLastAccessTime: FILETIME,
ftLastWriteTime: FILETIME,
nFileSizeHigh: DWORD,
nFileSizeLow: DWORD,
dwReserved0: DWORD,
dwReserved1: DWORD,
cFileName: [WCHAR; MAX_PATH],
cAlternateFileName: [WCHAR; 14],
}}
pub type PWIN32_FIND_DATAW = *mut WIN32_FIND_DATAW;
pub type LPWIN32_FIND_DATAW = *mut WIN32_FIND_DATAW;
ENUM!{enum FINDEX_INFO_LEVELS {
FindExInfoStandard,
FindExInfoBasic,
FindExInfoMaxInfoLevel,
}}
pub const FIND_FIRST_EX_CASE_SENSITIVE: DWORD = 0x00000001;
pub const FIND_FIRST_EX_LARGE_FETCH: DWORD = 0x00000002;
ENUM!{enum FINDEX_SEARCH_OPS {
FindExSearchNameMatch,
FindExSearchLimitToDirectories,
FindExSearchLimitToDevices,
FindExSearchMaxSearchOp,
}}
ENUM!{enum GET_FILEEX_INFO_LEVELS {
GetFileExInfoStandard,
GetFileExMaxInfoLevel,
}}
ENUM!{enum FILE_INFO_BY_HANDLE_CLASS {
FileBasicInfo,
FileStandardInfo,
FileNameInfo,
FileRenameInfo,
FileDispositionInfo,
FileAllocationInfo,
FileEndOfFileInfo,
FileStreamInfo,
FileCompressionInfo,
FileAttributeTagInfo,
FileIdBothDirectoryInfo,
FileIdBothDirectoryRestartInfo,
FileIoPriorityHintInfo,
FileRemoteProtocolInfo,
FileFullDirectoryInfo,
FileFullDirectoryRestartInfo,
FileStorageInfo,
FileAlignmentInfo,
FileIdInfo,
FileIdExtdDirectoryInfo,
FileIdExtdDirectoryRestartInfo,
FileDispositionInfoEx,
FileRenameInfoEx,
MaximumFileInfoByHandleClass,
}}
pub type PFILE_INFO_BY_HANDLE_CLASS = *mut FILE_INFO_BY_HANDLE_CLASS;
pub type CRITICAL_SECTION = RTL_CRITICAL_SECTION;
pub type PCRITICAL_SECTION = PRTL_CRITICAL_SECTION;
pub type LPCRITICAL_SECTION = PRTL_CRITICAL_SECTION;
pub type CRITICAL_SECTION_DEBUG = RTL_CRITICAL_SECTION_DEBUG;
pub type PCRITICAL_SECTION_DEBUG = PRTL_CRITICAL_SECTION_DEBUG;
pub type LPCRITICAL_SECTION_DEBUG = PRTL_CRITICAL_SECTION_DEBUG;
FN!{stdcall LPOVERLAPPED_COMPLETION_ROUTINE(
dwErrorCode: DWORD,
dwNumberOfBytesTransfered: DWORD,
lpOverlapped: LPOVERLAPPED,
) -> ()}
pub const LOCKFILE_FAIL_IMMEDIATELY: DWORD = 0x00000001;
pub const LOCKFILE_EXCLUSIVE_LOCK: DWORD = 0x00000002;
STRUCT!{struct PROCESS_HEAP_ENTRY_Block {
hMem: HANDLE,
dwReserved: [DWORD; 3],
}}
STRUCT!{struct PROCESS_HEAP_ENTRY_Region {
dwCommittedSize: DWORD,
dwUnCommittedSize: DWORD,
lpFirstBlock: LPVOID,
lpLastBlock: LPVOID,
}}
UNION!{union PROCESS_HEAP_ENTRY_u {
[u32; 4] [u64; 3],
Block Block_mut: PROCESS_HEAP_ENTRY_Block,
Region Region_mut: PROCESS_HEAP_ENTRY_Region,
}}
STRUCT!{struct PROCESS_HEAP_ENTRY {
lpData: PVOID,
cbData: DWORD,
cbOverhead: BYTE,
iRegionIndex: BYTE,
wFlags: WORD,
u: PROCESS_HEAP_ENTRY_u,
}}
pub type LPPROCESS_HEAP_ENTRY = *mut PROCESS_HEAP_ENTRY;
pub type PPROCESS_HEAP_ENTRY = *mut PROCESS_HEAP_ENTRY;
pub const PROCESS_HEAP_REGION: WORD = 0x0001;
pub const PROCESS_HEAP_UNCOMMITTED_RANGE: WORD = 0x0002;
pub const PROCESS_HEAP_ENTRY_BUSY: WORD = 0x0004;
pub const PROCESS_HEAP_SEG_ALLOC: WORD = 0x0008;
pub const PROCESS_HEAP_ENTRY_MOVEABLE: WORD = 0x0010;
pub const PROCESS_HEAP_ENTRY_DDESHARE: WORD = 0x0020;
STRUCT!{struct REASON_CONTEXT_Detailed {
LocalizedReasonModule: HMODULE,
LocalizedReasonId: ULONG,
ReasonStringCount: ULONG,
ReasonStrings: *mut LPWSTR,
}}
UNION!{union REASON_CONTEXT_Reason {
[u32; 4] [u64; 3],
Detailed Detailed_mut: REASON_CONTEXT_Detailed,
SimpleReasonString SimpleReasonString_mut: LPWSTR,
}}
STRUCT!{struct REASON_CONTEXT {
Version: ULONG,
Flags: DWORD,
Reason: REASON_CONTEXT_Reason,
}}
pub type PREASON_CONTEXT = *mut REASON_CONTEXT;
pub const EXCEPTION_DEBUG_EVENT: DWORD = 1;
pub const CREATE_THREAD_DEBUG_EVENT: DWORD = 2;
pub const CREATE_PROCESS_DEBUG_EVENT: DWORD = 3;
pub const EXIT_THREAD_DEBUG_EVENT: DWORD = 4;
pub const EXIT_PROCESS_DEBUG_EVENT: DWORD = 5;
pub const LOAD_DLL_DEBUG_EVENT: DWORD = 6;
pub const UNLOAD_DLL_DEBUG_EVENT: DWORD = 7;
pub const OUTPUT_DEBUG_STRING_EVENT: DWORD = 8;
pub const RIP_EVENT: DWORD = 9;
FN!{stdcall PTHREAD_START_ROUTINE(
lpThreadParameter: LPVOID,
) -> DWORD}
pub type LPTHREAD_START_ROUTINE = PTHREAD_START_ROUTINE;
STRUCT!{struct EXCEPTION_DEBUG_INFO {
ExceptionRecord: EXCEPTION_RECORD,
dwFirstChance: DWORD,
}}
pub type LPEXCEPTION_DEBUG_INFO = *mut EXCEPTION_DEBUG_INFO;
STRUCT!{struct CREATE_THREAD_DEBUG_INFO {
hThread: HANDLE,
lpThreadLocalBase: LPVOID,
lpStartAddress: LPTHREAD_START_ROUTINE,
}}
pub type LPCREATE_THREAD_DEBUG_INFO = *mut CREATE_THREAD_DEBUG_INFO;
STRUCT!{struct CREATE_PROCESS_DEBUG_INFO {
hFile: HANDLE,
hProcess: HANDLE,
hThread: HANDLE,
lpBaseOfImage: LPVOID,
dwDebugInfoFileOffset: DWORD,
nDebugInfoSize: DWORD,
lpThreadLocalBase: LPVOID,
lpStartAddress: LPTHREAD_START_ROUTINE,
lpImageName: LPVOID,
fUnicode: WORD,
}}
pub type LPCREATE_PROCESS_DEBUG_INFO = *mut CREATE_PROCESS_DEBUG_INFO;
STRUCT!{struct EXIT_THREAD_DEBUG_INFO {
dwExitCode: DWORD,
}}
pub type LPEXIT_THREAD_DEBUG_INFO = *mut EXIT_THREAD_DEBUG_INFO;
STRUCT!{struct EXIT_PROCESS_DEBUG_INFO {
dwExitCode: DWORD,
}}
pub type LPEXIT_PROCESS_DEBUG_INFO = *mut EXIT_PROCESS_DEBUG_INFO;
STRUCT!{struct LOAD_DLL_DEBUG_INFO {
hFile: HANDLE,
lpBaseOfDll: LPVOID,
dwDebugInfoFileOffset: DWORD,
nDebugInfoSize: DWORD,
lpImageName: LPVOID,
fUnicode: WORD,
}}
pub type LPLOAD_DLL_DEBUG_INFO = *mut LOAD_DLL_DEBUG_INFO;
STRUCT!{struct UNLOAD_DLL_DEBUG_INFO {
lpBaseOfDll: LPVOID,
}}
pub type LPUNLOAD_DLL_DEBUG_INFO = *mut UNLOAD_DLL_DEBUG_INFO;
STRUCT!{struct OUTPUT_DEBUG_STRING_INFO {
lpDebugStringData: LPSTR,
fUnicode: WORD,
nDebugStringLength: WORD,
}}
pub type LPOUTPUT_DEBUG_STRING_INFO = *mut OUTPUT_DEBUG_STRING_INFO;
STRUCT!{struct RIP_INFO {
dwError: DWORD,
dwType: DWORD,
}}
pub type LPRIP_INFO = *mut RIP_INFO;
UNION!{union DEBUG_EVENT_u {
[u32; 21] [u64; 20],
Exception Exception_mut: EXCEPTION_DEBUG_INFO,
CreateThread CreateThread_mut: CREATE_THREAD_DEBUG_INFO,
CreateProcessInfo CreateProcessInfo_mut: CREATE_PROCESS_DEBUG_INFO,
ExitThread ExitThread_mut: EXIT_THREAD_DEBUG_INFO,
ExitProcess ExitProcess_mut: EXIT_PROCESS_DEBUG_INFO,
LoadDll LoadDll_mut: LOAD_DLL_DEBUG_INFO,
UnloadDll UnloadDll_mut: UNLOAD_DLL_DEBUG_INFO,
DebugString DebugString_mut: OUTPUT_DEBUG_STRING_INFO,
RipInfo RipInfo_mut: RIP_INFO,
}}
STRUCT!{struct DEBUG_EVENT {
dwDebugEventCode: DWORD,
dwProcessId: DWORD,
dwThreadId: DWORD,
u: DEBUG_EVENT_u,
}}
pub type LPDEBUG_EVENT = *mut DEBUG_EVENT;
pub type LPCONTEXT = PCONTEXT;
pub const STILL_ACTIVE: DWORD = STATUS_PENDING as u32;
pub const EXCEPTION_ACCESS_VIOLATION: DWORD = STATUS_ACCESS_VIOLATION as u32;
pub const EXCEPTION_DATATYPE_MISALIGNMENT: DWORD = STATUS_DATATYPE_MISALIGNMENT as u32;
pub const EXCEPTION_BREAKPOINT: DWORD = STATUS_BREAKPOINT as u32;
pub const EXCEPTION_SINGLE_STEP: DWORD = STATUS_SINGLE_STEP as u32;
pub const EXCEPTION_ARRAY_BOUNDS_EXCEEDED: DWORD = STATUS_ARRAY_BOUNDS_EXCEEDED as u32;
pub const EXCEPTION_FLT_DENORMAL_OPERAND: DWORD = STATUS_FLOAT_DENORMAL_OPERAND as u32;
pub const EXCEPTION_FLT_DIVIDE_BY_ZERO: DWORD = STATUS_FLOAT_DIVIDE_BY_ZERO as u32;
pub const EXCEPTION_FLT_INEXACT_RESULT: DWORD = STATUS_FLOAT_INEXACT_RESULT as u32;
pub const EXCEPTION_FLT_INVALID_OPERATION: DWORD = STATUS_FLOAT_INVALID_OPERATION as u32;
pub const EXCEPTION_FLT_OVERFLOW: DWORD = STATUS_FLOAT_OVERFLOW as u32;
pub const EXCEPTION_FLT_STACK_CHECK: DWORD = STATUS_FLOAT_STACK_CHECK as u32;
pub const EXCEPTION_FLT_UNDERFLOW: DWORD = STATUS_FLOAT_UNDERFLOW as u32;
pub const EXCEPTION_INT_DIVIDE_BY_ZERO: DWORD = STATUS_INTEGER_DIVIDE_BY_ZERO as u32;
pub const EXCEPTION_INT_OVERFLOW: DWORD = STATUS_INTEGER_OVERFLOW as u32;
pub const EXCEPTION_PRIV_INSTRUCTION: DWORD = STATUS_PRIVILEGED_INSTRUCTION as u32;
pub const EXCEPTION_IN_PAGE_ERROR: DWORD = STATUS_IN_PAGE_ERROR as u32;
pub const EXCEPTION_ILLEGAL_INSTRUCTION: DWORD = STATUS_ILLEGAL_INSTRUCTION as u32;
pub const EXCEPTION_NONCONTINUABLE_EXCEPTION: DWORD = STATUS_NONCONTINUABLE_EXCEPTION as u32;
pub const EXCEPTION_STACK_OVERFLOW: DWORD = STATUS_STACK_OVERFLOW as u32;
pub const EXCEPTION_INVALID_DISPOSITION: DWORD = STATUS_INVALID_DISPOSITION as u32;
pub const EXCEPTION_GUARD_PAGE: DWORD = STATUS_GUARD_PAGE_VIOLATION as u32;
pub const EXCEPTION_INVALID_HANDLE: DWORD = STATUS_INVALID_HANDLE as u32;
pub const EXCEPTION_POSSIBLE_DEADLOCK: DWORD = STATUS_POSSIBLE_DEADLOCK as u32;
pub const CONTROL_C_EXIT: DWORD = STATUS_CONTROL_C_EXIT as u32;
pub const LMEM_FIXED: UINT = 0x0000;
pub const LMEM_MOVEABLE: UINT = 0x0002;
pub const LMEM_NOCOMPACT: UINT = 0x0010;
pub const LMEM_NODISCARD: UINT = 0x0020;
pub const LMEM_ZEROINIT: UINT = 0x0040;
pub const LMEM_MODIFY: UINT = 0x0080;
pub const LMEM_DISCARDABLE: UINT = 0x0F00;<|fim▁hole|>pub const LHND: UINT = LMEM_MOVEABLE | LMEM_ZEROINIT;
pub const LPTR: UINT = LMEM_FIXED | LMEM_ZEROINIT;
pub const NONZEROLHND: UINT = LMEM_MOVEABLE;
pub const NONZEROLPTR: UINT = LMEM_FIXED;
//LocalDiscard
pub const LMEM_DISCARDED: UINT = 0x4000;
pub const LMEM_LOCKCOUNT: UINT = 0x00FF;
pub const NUMA_NO_PREFERRED_NODE: DWORD = -1i32 as DWORD;<|fim▁end|> | pub const LMEM_VALID_FLAGS: UINT = 0x0F72;
pub const LMEM_INVALID_HANDLE: UINT = 0x8000; |
<|file_name|>ScriptBinding.py<|end_file_name|><|fim▁begin|>"""Extension to execute code outside the Python shell window.
This adds the following commands:
- Check module does a full syntax check of the current module.
It also runs the tabnanny to catch any inconsistent tabs.
- Run module executes the module's code in the __main__ namespace. The window
must have been saved previously. The module is added to sys.modules, and is
also added to the __main__ namespace.
XXX GvR Redesign this interface (yet again) as follows:
- Present a dialog box for ``Run Module''
<|fim▁hole|>
- Allow specify command line arguments in the dialog box
"""
import os
import re
import string
import tabnanny
import tokenize
import tkMessageBox
from idlelib import PyShell
from idlelib.configHandler import idleConf
IDENTCHARS = string.ascii_letters + string.digits + "_"
indent_message = """Error: Inconsistent indentation detected!
1) Your indentation is outright incorrect (easy to fix), OR
2) Your indentation mixes tabs and spaces.
To fix case 2, change all tabs to spaces by using Edit->Select All followed \
by Format->Untabify Region and specify the number of columns used by each tab.
"""
class ScriptBinding:
menudefs = [
('run', [None,
('Check Module', '<<check-module>>'),
('Run Module', '<<run-module>>'), ]), ]
def __init__(self, editwin):
self.editwin = editwin
# Provide instance variables referenced by Debugger
# XXX This should be done differently
self.flist = self.editwin.flist
self.root = self.editwin.root
def check_module_event(self, event):
filename = self.getfilename()
if not filename:
return 'break'
if not self.checksyntax(filename):
return 'break'
if not self.tabnanny(filename):
return 'break'
def tabnanny(self, filename):
f = open(filename, 'r')
try:
tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
except tokenize.TokenError, msg:
msgtxt, (lineno, start) = msg
self.editwin.gotoline(lineno)
self.errorbox("Tabnanny Tokenizing Error",
"Token Error: %s" % msgtxt)
return False
except tabnanny.NannyNag, nag:
# The error messages from tabnanny are too confusing...
self.editwin.gotoline(nag.get_lineno())
self.errorbox("Tab/space error", indent_message)
return False
return True
def checksyntax(self, filename):
self.shell = shell = self.flist.open_shell()
saved_stream = shell.get_warning_stream()
shell.set_warning_stream(shell.stderr)
f = open(filename, 'r')
source = f.read()
f.close()
if '\r' in source:
source = re.sub(r"\r\n", "\n", source)
source = re.sub(r"\r", "\n", source)
if source and source[-1] != '\n':
source = source + '\n'
text = self.editwin.text
text.tag_remove("ERROR", "1.0", "end")
try:
try:
# If successful, return the compiled code
return compile(source, filename, "exec")
except (SyntaxError, OverflowError), err:
try:
msg, (errorfilename, lineno, offset, line) = err
if not errorfilename:
err.args = msg, (filename, lineno, offset, line)
err.filename = filename
self.colorize_syntax_error(msg, lineno, offset)
except:
msg = "*** " + str(err)
self.errorbox("Syntax error",
"There's an error in your program:\n" + msg)
return False
finally:
shell.set_warning_stream(saved_stream)
def colorize_syntax_error(self, msg, lineno, offset):
text = self.editwin.text
pos = "0.0 + %d lines + %d chars" % (lineno-1, offset-1)
text.tag_add("ERROR", pos)
char = text.get(pos)
if char and char in IDENTCHARS:
text.tag_add("ERROR", pos + " wordstart", pos)
if '\n' == text.get(pos): # error at line end
text.mark_set("insert", pos)
else:
text.mark_set("insert", pos + "+1c")
text.see(pos)
def run_module_event(self, event):
"""Run the module after setting up the environment.
First check the syntax. If OK, make sure the shell is active and
then transfer the arguments, set the run environment's working
directory to the directory of the module being executed and also
add that directory to its sys.path if not already included.
"""
filename = self.getfilename()
if not filename:
return 'break'
code = self.checksyntax(filename)
if not code:
return 'break'
if not self.tabnanny(filename):
return 'break'
shell = self.shell
interp = shell.interp
if PyShell.use_subprocess:
shell.restart_shell()
dirname = os.path.dirname(filename)
# XXX Too often this discards arguments the user just set...
interp.runcommand("""if 1:
_filename = %r
import sys as _sys
from os.path import basename as _basename
if (not _sys.argv or
_basename(_sys.argv[0]) != _basename(_filename)):
_sys.argv = [_filename]
import os as _os
_os.chdir(%r)
del _filename, _sys, _basename, _os
\n""" % (filename, dirname))
interp.prepend_syspath(filename)
# XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still
# go to __stderr__. With subprocess, they go to the shell.
# Need to change streams in PyShell.ModifiedInterpreter.
interp.runcode(code)
return 'break'
def getfilename(self):
"""Get source filename. If not saved, offer to save (or create) file
The debugger requires a source file. Make sure there is one, and that
the current version of the source buffer has been saved. If the user
declines to save or cancels the Save As dialog, return None.
If the user has configured IDLE for Autosave, the file will be
silently saved if it already exists and is dirty.
"""
filename = self.editwin.io.filename
if not self.editwin.get_saved():
autosave = idleConf.GetOption('main', 'General',
'autosave', type='bool')
if autosave and filename:
self.editwin.io.save(None)
else:
reply = self.ask_save_dialog()
self.editwin.text.focus_set()
if reply == "ok":
self.editwin.io.save(None)
filename = self.editwin.io.filename
else:
filename = None
return filename
def ask_save_dialog(self):
msg = "Source Must Be Saved\n" + 5*' ' + "OK to Save?"
mb = tkMessageBox.Message(title="Save Before Run or Check",
message=msg,
icon=tkMessageBox.QUESTION,
type=tkMessageBox.OKCANCEL,
default=tkMessageBox.OK,
master=self.editwin.text)
return mb.show()
def errorbox(self, title, message):
# XXX This should really be a function of EditorWindow...
tkMessageBox.showerror(title, message, master=self.editwin.text)
self.editwin.text.focus_set()<|fim▁end|> | |
<|file_name|>ProgressBarTest.py<|end_file_name|><|fim▁begin|>##########################################################################
#
# Copyright (c) 2011-2012, Image Engine Design Inc. All rights reserved.<|fim▁hole|># Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import GafferUI
import GafferUITest
class ProgressBarTest( GafferUITest.TestCase ) :
def testConstructor( self ) :
b = GafferUI.ProgressBar()
self.assertEqual( b.getRange(), ( 0, 100 ) )
self.assertEqual( b.getProgress(), 0 )
self.assertEqual( b.getText(), "%p%" )
b = GafferUI.ProgressBar( 10, ( 5, 15 ), "doing something %p%" )
self.assertEqual( b.getRange(), ( 5, 15 ) )
self.assertEqual( b.getProgress(), 10 )
self.assertEqual( b.getText(), "doing something %p%" )
def testAccessors( self ) :
b = GafferUI.ProgressBar()
b.setRange( ( 0, 20 ) )
self.assertEqual( b.getRange(), ( 0, 20 ) )
b.setProgress( 10 )
self.assertEqual( b.getProgress(), 10 )
b.setText( "woteva" )
self.assertEqual( b.getText(), "woteva" )
if __name__ == "__main__":
unittest.main()<|fim▁end|> | # |
<|file_name|>qsdateutil.py<|end_file_name|><|fim▁begin|>'''
(c) 2011, 2012 Georgia Tech Research Corporation
This source code is released under the New BSD license. Please see
http://wiki.quantsoftware.org/index.php?title=QSTK_License
for license details.
Created on Jan 1, 2011
@author:Drew Bratcher
@contact: [email protected]
@summary: Contains tutorial for backtester and report.
'''
import datetime as dt
from datetime import timedelta
import time as t
import numpy as np
import os
import pandas as pd
def _cache_dates():
''' Caches dates '''
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure you have NYSE_dates.txt in the qstkutil directory"
datestxt = np.loadtxt(filename, dtype=str)
dates = []
for i in datestxt:
dates.append(dt.datetime.strptime(i, "%m/%d/%Y"))
return pd.TimeSeries(index=dates, data=dates)
GTS_DATES = _cache_dates()
def getMonthNames():
return(['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'])
def getYears(funds):
years=[]
for date in funds.index:
if(not(date.year in years)):
years.append(date.year)
return(years)
def getMonths(funds,year):
months=[]
for date in funds.index:
if((date.year==year) and not(date.month in months)):
months.append(date.month)
return(months)
def getDays(funds,year,month):
days=[]
for date in funds.index:
if((date.year==year) and (date.month==month)):
days.append(date)
return(days)
def getDaysBetween(ts_start, ts_end):
days=[]
for i in range(0,(ts_end-ts_start).days):
days.append(ts_start+timedelta(days=1)*i)
return(days)
def getFirstDay(funds,year,month):
for date in funds.index:
if((date.year==year) and (date.month==month)):
return(date)
return('ERROR')
def getLastDay(funds,year,month):
return_date = 'ERROR'
for date in funds.index:
if((date.year==year) and (date.month==month)):
return_date = date
return(return_date)
def getNextOptionClose(day, trade_days, offset=0):
#get third friday in month of day
#get first of month
year_off=0
if day.month+offset > 12:
year_off = 1
offset = offset - 12
first = dt.datetime(day.year+year_off, day.month+offset, 1, hour=16)
#get weekday
day_num = first.weekday()
#get first friday (friday - weekday) add 7 if less than 1
dif = 5 - day_num
if dif < 1:
dif = dif+7
#move to third friday
dif = dif + 14
friday = first+dt.timedelta(days=(dif-1))
#if friday is a holiday, options expire then
if friday in trade_days:
month_close = first + dt.timedelta(days=dif)
else:
month_close = friday
#if day is past the day after that
if month_close < day:
return_date = getNextOptionClose(day, trade_days, offset=1)
else:
return_date = month_close
return(return_date)
def getLastOptionClose(day, trade_days):
start = day
while getNextOptionClose(day, trade_days)>=start:
day= day - dt.timedelta(days=1)
return(getNextOptionClose(day, trade_days))
def getNYSEoffset(mark, offset):
''' Returns NYSE date offset by number of days '''
mark = mark.replace(hour=0, minute=0, second=0, microsecond=0)
i = GTS_DATES.index.searchsorted(mark, side='right')
# If there is no exact match, take first date in past
if GTS_DATES[i] != mark:
i -= 1
ret = GTS_DATES[i + offset]
ret = ret.replace(hour=16)
return ret
def getNYSEdays(startday = dt.datetime(1964,7,5), endday = dt.datetime(2020,12,31),
timeofday = dt.timedelta(0)):
"""
@summary: Create a list of timestamps between startday and endday (inclusive)
that correspond to the days there was trading at the NYSE. This function
depends on a separately created a file that lists all days since July 4,
1962 that the NYSE has been open, going forward to 2020 (based
on the holidays that NYSE recognizes).
@param startday: First timestamp to consider (inclusive)
@param endday: Last day to consider (inclusive)
@return list: of timestamps between startday and endday on which NYSE traded
@rtype datetime<|fim▁hole|> """
start = startday - timeofday
end = endday - timeofday
dates = GTS_DATES[start:end]
ret = [x + timeofday for x in dates]
return(ret)
def getNextNNYSEdays(startday, days, timeofday):
"""
@summary: Create a list of timestamps from startday that is days days long
that correspond to the days there was trading at NYSE. This function
depends on the file used in getNYSEdays and assumes the dates within are
in order.
@param startday: First timestamp to consider (inclusive)
@param days: Number of timestamps to return
@return list: List of timestamps starting at startday on which NYSE traded
@rtype datetime
"""
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure to set the value for QS in config.sh or\n"
print "in local.sh and then \'source local.sh\'.\n"
datestxt = np.loadtxt(filename,dtype=str)
dates=[]
for i in datestxt:
if(len(dates)<days):
if((dt.datetime.strptime(i,"%m/%d/%Y")+timeofday)>=startday):
dates.append(dt.datetime.strptime(i,"%m/%d/%Y")+timeofday)
return(dates)
def getPrevNNYSEday(startday, timeofday):
"""
@summary: This function returns the last valid trading day before the start
day, or returns the start day if it is a valid trading day. This function
depends on the file used in getNYSEdays and assumes the dates within are
in order.
@param startday: First timestamp to consider (inclusive)
@param days: Number of timestamps to return
@return list: List of timestamps starting at startday on which NYSE traded
@rtype datetime
"""
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure to set the value for QS in config.sh or\n"
print "in local.sh and then \'source local.sh\'.\n"
datestxt = np.loadtxt(filename,dtype=str)
#''' Set return to first day '''
dtReturn = dt.datetime.strptime( datestxt[0],"%m/%d/%Y")+timeofday
#''' Loop through all but first '''
for i in datestxt[1:]:
dtNext = dt.datetime.strptime(i,"%m/%d/%Y")
#''' If we are > startday, then use previous valid day '''
if( dtNext > startday ):
break
dtReturn = dtNext + timeofday
return(dtReturn)
def ymd2epoch(year, month, day):
"""
@summary: Convert YMD info into a unix epoch value.
@param year: The year
@param month: The month
@param day: The day
@return epoch: number of seconds since epoch
"""
return(t.mktime(dt.date(year,month,day).timetuple()))
def epoch2date(ts):
"""
@summary Convert seconds since epoch into date
@param ts: Seconds since epoch
@return thedate: A date object
"""
tm = t.gmtime(ts)
return(dt.date(tm.tm_year,tm.tm_mon,tm.tm_mday))
def _trade_dates(dt_start, dt_end, s_period):
'''
@summary: Generate dates on which we need to trade
@param c_strat: Strategy config class
@param dt_start: Start date
@param dt_end: End date
'''
ldt_timestamps = getNYSEdays(dt_start,
dt_end, dt.timedelta(hours=16) )
# Use pandas reindex method instead
# Note, dates are index as well as values, we select based on index
# but return values since it is a numpy array of datetimes instead of
# pandas specific.
ts_dates = pd.TimeSeries(index=ldt_timestamps, data=ldt_timestamps)
# These are the dates we want
if s_period[:2] == 'BW':
# special case for biweekly
dr_range = pd.DateRange(dt_start, dt_end,
timeRule=s_period[1:])
dr_range = np.asarray(dr_range)
li_even = np.array(range(len(dr_range)))
dr_range = dr_range[li_even[li_even % 2 == 0]]
else:
dr_range = pd.DateRange(dt_start, dt_end,
timeRule=s_period)
dr_range = np.asarray(dr_range)
# Warning, we MUST copy the date range, if we modify it it will be returned
# in it's modified form the next time we use it.
dr_range = np.copy(dr_range)
dr_range += pd.DateOffset(hours=16)
ts_dates = ts_dates.reindex( dr_range, method='bfill' )
ldt_dates = ts_dates[ts_dates.notnull()].values
#Make unique
sdt_unique = set()
ldt_dates = [x for x in ldt_dates
if x not in sdt_unique and not sdt_unique.add(x)]
return ldt_dates<|fim▁end|> | |
<|file_name|>bot.py<|end_file_name|><|fim▁begin|>class Bot:
def setup(self, initial_data):
pass
<|fim▁hole|><|fim▁end|> | def update(self, state, response):
pass |
<|file_name|>slack.py<|end_file_name|><|fim▁begin|>import json
import requests
class SlackNotification(object):<|fim▁hole|> self.slack_url = slack_url
self.channel = channel
if not self.channel.startswith("#"):
self.channel = "#%s" % (self.channel,)
def notify(self, data):
if not all([self.slack_url, self.channel]):
return
payload = {
"channel": self.channel,
"username": "PacktPub Free Learning",
"icon_url": self.icon_url,
"attachments": [
{
"fallback": "Today's Free eBook: %s" % data["title"],
"pretext": "Today's Free eBook:",
"title": data["title"],
"title_link": data["book_url"],
"color": "#ff7f00",
"text": "%s\n%s" % (data["description"], data.get("url", "")),
"thumb_url": data["image_url"].replace(" ", "%20"),
}
],
}
requests.post(self.slack_url, data={"payload": json.dumps(payload)})<|fim▁end|> | icon_url = "https://github-bogdal.s3.amazonaws.com/freepacktbook/icon.png"
def __init__(self, slack_url, channel): |
<|file_name|>get-rnd-thumbnail.js<|end_file_name|><|fim▁begin|>'use babel';
//import reddit from './api/reddit';
import giphy from './api/giphy';
import ohMaGif from './api/oh-ma-gif';
import reactionGifs from './api/reaction-gifs';
const apis = [
//reddit,
giphy,
ohMaGif,
reactionGifs
];
export default function getRndThumbnail() {
const apiFn = apis[Math.floor(Math.random() * apis.length)];
return apiFn().catch(err => {
// Show error notification<|fim▁hole|> dismissable: true
});
});
}<|fim▁end|> | atom.notifications.addError(err, { |
<|file_name|>polyfills.ts<|end_file_name|><|fim▁begin|>/**
* This file includes polyfills needed by Angular and is loaded before the app.
* You can add your own extra polyfills to this file.
*
* This file is divided into 2 sections:
* 1. Browser polyfills. These are applied before loading ZoneJS and are sorted by browsers.
* 2. Application imports. Files imported after ZoneJS that should be loaded before your main
* file.
*
* The current setup is for so-called "evergreen" browsers; the last versions of browsers that
* automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera),
* Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile.
*
* Learn more in https://angular.io/docs/ts/latest/guide/browser-support.html
*/
/***************************************************************************************************
* BROWSER POLYFILLS
*/
/** IE9, IE10 and IE11 requires all of the following polyfills. **/
// import 'core-js/es6/symbol';
// import 'core-js/es6/object';
// import 'core-js/es6/function';
// import 'core-js/es6/parse-int';
// import 'core-js/es6/parse-float';
// import 'core-js/es6/number';
// import 'core-js/es6/math';
// import 'core-js/es6/string';
// import 'core-js/es6/date';
// import 'core-js/es6/array';
// import 'core-js/es6/regexp';
// import 'core-js/es6/map';
// import 'core-js/es6/weak-map';
// import 'core-js/es6/set';
/** IE10 and IE11 requires the following for NgClass support on SVG elements */
// import 'classlist.js'; // Run `npm install --save classlist.js`.
/** IE10 and IE11 requires the following for the Reflect API. */
// import 'core-js/es6/reflect';
/** Evergreen browsers require these. **/
// Used for reflect-metadata in JIT. If you use AOT (and only Angular decorators), you can remove.<|fim▁hole|> * Required to support Web Animations `@angular/platform-browser/animations`.
* Needed for: All but Chrome, Firefox and Opera. http://caniuse.com/#feat=web-animation
**/
// import 'web-animations-js'; // Run `npm install --save web-animations-js`.
/***************************************************************************************************
* Zone JS is required by default for Angular itself.
*/
import 'zone.js/dist/zone'; // Included with Angular CLI.
/***************************************************************************************************
* APPLICATION IMPORTS
*/<|fim▁end|> | import 'core-js/es7/reflect';
/** |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) Open Solutions Finland 2013.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Exporting and Intrastat",
"version" : "1.0",
"author" : "Open Solutions Finland",
"description" : """
OpenERP module for exporting goods. Intrastat additions to invoices. Adds country of origin and customs code fields to products.
""",
"website" : "www.opensolutions.fi",
"depends" : ["base","product","sale","stock"],
"category" : "Generic Modules",
"init_xml" : [],
"demo_xml" : [],
"data" : [<|fim▁hole|> 'test': [
],
'installable': True,
'active': False,
'certificate': '',
}<|fim▁end|> | 'product_extension_view.xml',
'invoice_extension_view.xml'
], |
<|file_name|>gmm.py<|end_file_name|><|fim▁begin|>""" synrcat
gaussian mixture model
"""
import sys
import os
import numpy as np
import logging
from collections import OrderedDict
from astropy.table import Table
from pypeline import pype, add_param, depends_on
from syn import Syn
from syncat.errors import NoPoints
import syncat.misc as misc
import syncat.fileio as fileio
import time
@add_param('cat_model', metavar='filename', default='out/syn.pickle', type=str,
help='file with catalogue model to load')
@add_param('hints_file', metavar='filename', default='in/syn_hints.txt', type=str,
help='give hints about parameter distributions')
@depends_on(Syn)
class GaussianMixtureModel(pype):
""" SynCat mode to generate random catalogue by sampling from a gaussian mixture model.
Parameters
----------
mask : minimask.Mask instance
mask describing survey geometry to sample from. If None, sample from full-sky.
cat_model : str
path to file with catalogue model to load
hints_file : str
path to file with hints about parameter distributions
"""
def __init__(self, config={}, mask=None, **kwargs):
""" """
self._parse_config(config, **kwargs)
self._setup_logging()
self.load_hints()
self.mask = mask
self.syn = None
def sample_sky(self, zone=None, nside=None, order=None):
""" Sample sky coordinates.
Parameters
----------
zone : int, list
optional healpix zone index or list of indices from which to sample. Otherwise sample from all zones.
nside : int
healpix nside for zone pixelization
order : str
healpix ordering for zone pixelization
"""
return np.transpose(self.mask.draw_random_position(density=self.config['density'], n=self.config['count'],<|fim▁hole|> cell=zone, nside=nside))
def load_hints(self):
""" Load the hints file.
The hints file contains information about the parameter distributions.
"""
self.hints = {}
if os.path.exists(self.config['hints_file']):
for line in file(self.config['hints_file']):
line = line.strip()
if line == "":
continue
if line.startswith("#"):
continue
words = line.split()
instruction = None
low = None
high = None
name = words.pop(0)
if len(words) > 0:
instruction = words.pop(0)
if len(words) > 0:
low = float(words.pop(0))
if len(words) > 0:
high = float(words.pop(0))
if instruction not in self.hints:
self.hints[instruction] = []
self.hints[instruction].append((name, low, high))
self.logger.info("got hint for '%s': instruction is %s with range: %s, %s", name, instruction, low, high)
return self.hints
def fit(self, filename=None, add_columns=True):
""" Fit a Gaussian mixture model to the input catalogue.
Parameters
----------
filename : str
path to input catalogue.
"""
if filename is None:
filename = self.config['in_cat']
if os.path.exists(self.config['cat_model']) and not self.config['overwrite']:
self.logger.info("reading %s", self.config['cat_model'])
self.syn = Syn(self.config['cat_model'])
self.labels = self.syn.labels
return
hints = self.load_hints()
self.logger.info("loading %s", filename)
table = fileio.read_catalogue(filename, format=self.config['input_format'], columns=self.config['input_columns'], quick=self.config['quick'])
table_dtype = table.dtype
table = misc.remove_columns(table, self.config['skip'])
properties = list(table.dtype.names)
if self.logger.isEnabledFor(logging.INFO):
mesg = ""
for i, p in enumerate(properties):
mesg += "\n{:>3} {}".format(1 + i, p)
self.logger.info("got these %i columns:%s", len(properties), mesg)
self.syn = Syn(labels=properties, hints=hints, config=self.config)
dtype = table.dtype
if add_columns:
dtype = misc.append_dtypes(dtype, self.config['add_columns'], table_dtype)
if self.config['sample_sky'] and self.config['skycoord_name'] not in dtype.names:
skycoord_name = self.config['skycoord_name']
alpha, delta = skycoord_name
skycoord_dtype = np.dtype([(alpha, np.float64), (delta, np.float64)])
dtype = misc.concatenate_dtypes([dtype, skycoord_dtype])
self.syn.fit(table, dtype=dtype)
# store column names
self.labels = properties
# save catalogue model
self.syn.save(self.config['cat_model'])
def sample(self):
""" Sample from the Gaussian mixture model.
Returns
-------
numpy strucarray : random catalogue
"""
if self.syn is None:
if not os.path.exists(self.config['cat_model']):
raise Exception("Cannot load catalogue model. Files does not exist: %s"%self.config['cat_model'])
self.syn = Syn(self.config['cat_model'])
if self.config['sample_sky']:
skycoord = self.sample_sky()
count = len(skycoord)
else:
count = self.config['count']
if count == 0:
raise NoPoints
randoms = self.syn.sample(n=count)
if self.config['sample_sky']:
skycoord_name = self.config['skycoord_name']
for i in range(len(skycoord_name)):
randoms[skycoord_name[i]] = skycoord[:,i]
return randoms<|fim▁end|> | |
<|file_name|>get_worker_test_list.py<|end_file_name|><|fim▁begin|>"""
This script strips the console log of a pytest-xdist Jenkins run into the test<|fim▁hole|>Assumes the following format:
[test-suite] [worker] RESULT test
"""
import io
import os
import re
import shutil
import click
@click.command()
@click.option(
'--log-file',
help="File name of console log .txt file from a Jenkins build "
"that ran pytest-xdist. This can be acquired by running: "
"curl -o console.txt https://build.testeng.edx.org/job/JOBNAME/BUILDNUMBER/consoleText",
required=True
)
@click.option(
'--test-suite',
help="Test suite that the pytest worker ran.",
type=click.Choice(['lms-unit', 'cms-unit', 'commonlib-unit']),
required=True
)
def main(log_file, test_suite):
worker_test_dict = {}
with open(log_file, 'r') as console_file:
for line in console_file:
regex_search = re.search(fr'\[gw(\d+)] (PASSED|FAILED|SKIPPED|ERROR) (\S+)', line)
if regex_search:
worker_num_string = regex_search.group(1)
if worker_num_string not in worker_test_dict:
worker_test_dict[worker_num_string] = []
test = regex_search.group(3)
if test_suite == "commonlib-unit":
if "pavelib" not in test and not test.startswith('scripts'):
test = f"common/lib/{test}"
worker_test_dict[worker_num_string].append(test)
output_folder_name = "worker_list_files"
if os.path.isdir(output_folder_name):
shutil.rmtree(output_folder_name)
os.mkdir(output_folder_name)
for worker_num in worker_test_dict:
output_file_name = f"{output_folder_name}/{test_suite}_gw{worker_num}_test_list.txt"
with open(output_file_name, 'w') as output_file:
for line in worker_test_dict[worker_num]:
output_file.write(line + "\n")
if __name__ == "__main__":
main()<|fim▁end|> | lists of each pytest worker. |
<|file_name|>AbstractFrame.java<|end_file_name|><|fim▁begin|>/*
* Created on May 17, 2004
*
* Paros and its related class files.
*
* Paros is an HTTP/HTTPS proxy for assessing web application security.
* Copyright (C) 2003-2004 Chinotec Technologies Company
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the Clarified Artistic License
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Clarified Artistic License for more details.
*
* You should have received a copy of the Clarified Artistic License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
// ZAP: 2013/01/16 Minor fix to prevent NPE
// ZAP: 2014/10/17 Issue 1308: Updated for latest icons
// ZAP: 2015/02/10 Issue 1528: Support user defined font size
// ZAP: 2015/09/07 Move icon loading to a utility class
package org.parosproxy.paros.view;
import java.awt.Dimension;
import java.awt.Frame;
import java.awt.Image;
import java.awt.Point;
import java.awt.Toolkit;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.WindowEvent;
import java.awt.event.WindowStateListener;
import java.util.ArrayList;
import java.util.List;
import java.util.prefs.BackingStoreException;
import java.util.prefs.Preferences;
import javax.swing.JFrame;
import org.apache.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.zaproxy.zap.utils.DisplayUtils;
/**
* Generic Frame, which handles some basic properties.
* <ul>
* <li>Sets the icon(s) for the frame, which are the ZAP icons</li>
* <li>Centers the frame on screen</li>
* <li>Sets the frame to _not_ visible</li>
* <li>Sets a common font for the frame</li>
* <li>Sets a default title (ZAP application name)</li>
* <li>Preserves window state, location and size correctly (will survive multiple session)</li>
* </ul>
* Hint for implementers: If you use this class,
* don't use {@link #setSize(Dimension)}, but {@link #setPreferredSize(Dimension)}
* instead. Also, don't use {@link #setLocation(Point)}. This abstract class
* will automatically take care of size and position.
*/
public abstract class AbstractFrame extends JFrame {
private static final long serialVersionUID = 6751593232255236597L;
private static final String PREF_WINDOW_STATE = "window.state";
private static final String PREF_WINDOW_SIZE = "window.size";
private static final String PREF_WINDOW_POSITION = "window.position";
private static final int WINDOW_DEFAULT_WIDTH = 800;
private static final int WINDOW_DEFAULT_HEIGHT = 600;
/**
* Hint: Preferences are only saved by package.
* We have to use a prefix for separation.
*/
private final Preferences preferences;
private final String prefnzPrefix = this.getClass().getSimpleName()+".";
private final Logger logger = Logger.getLogger(AbstractFrame.class);
/**
* This is the default constructor
*/
public AbstractFrame() {
super();
this.preferences = Preferences.userNodeForPackage(getClass());
initialize();
}
/**
* This method initializes this
*/
private void initialize() {
// ZAP: Rebrand
this.setIconImages(DisplayUtils.getZapIconImages());
this.setVisible(false);
this.setTitle(Constant.PROGRAM_NAME);
final Dimension dim = restoreWindowSize();
if (dim == null) {
this.setSize(WINDOW_DEFAULT_WIDTH, WINDOW_DEFAULT_HEIGHT);
}
final Point point = restoreWindowLocation();
if (point == null) {
centerFrame();
}
restoreWindowState();
this.addWindowStateListener(new FrameWindowStateListener());
this.addComponentListener(new FrameResizedListener());
}
/**
* Centre this frame.
*
*/
public void centerFrame() {
final Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
final Dimension frameSize = this.getSize();
if (frameSize.height > screenSize.height) {
frameSize.height = screenSize.height;
}
if (frameSize.width > screenSize.width) {
frameSize.width = screenSize.width;
}
this.setLocation((screenSize.width - frameSize.width) / 2, (screenSize.height - frameSize.height) / 2);
}
/**
* @param windowstate integer value, see {@link JFrame#getExtendedState()}
*/
private void saveWindowState(int windowstate) {
if ((windowstate & Frame.ICONIFIED) == Frame.ICONIFIED) {
preferences.put(prefnzPrefix+PREF_WINDOW_STATE, SimpleWindowState.ICONFIED.toString());
if (logger.isDebugEnabled()) logger.debug("Saving preference "+PREF_WINDOW_STATE+"=" + SimpleWindowState.ICONFIED);
}
if ((windowstate & Frame.MAXIMIZED_BOTH) == Frame.MAXIMIZED_BOTH) {
preferences.put(prefnzPrefix+PREF_WINDOW_STATE, SimpleWindowState.MAXIMIZED.toString());
if (logger.isDebugEnabled()) logger.debug("Saving preference "+PREF_WINDOW_STATE+"=" + SimpleWindowState.MAXIMIZED);
}
if (windowstate == Frame.NORMAL) { // hint: Frame.NORMAL = 0, thats why no masking
preferences.put(prefnzPrefix+PREF_WINDOW_STATE, SimpleWindowState.NORMAL.toString());
if (logger.isDebugEnabled()) logger.debug("Saving preference "+PREF_WINDOW_STATE+"=" + SimpleWindowState.NORMAL);
}
}
/**
* Loads and sets the last window state of the frame.
* Additionally, the last state will be returned.
*
* @return last window state OR null
*/
private SimpleWindowState restoreWindowState() {
SimpleWindowState laststate = null;
final String statestr = preferences.get(prefnzPrefix+PREF_WINDOW_STATE, null);
if (logger.isDebugEnabled()) logger.debug("Restoring preference "+PREF_WINDOW_STATE+"=" + statestr);
if (statestr != null) {
SimpleWindowState state = null;
try {
state = SimpleWindowState.valueOf(statestr);
} catch (final IllegalArgumentException e) { state = null; }
if (state != null) {
switch (state) {
case ICONFIED: this.setExtendedState(Frame.ICONIFIED); break;
case NORMAL: this.setExtendedState(Frame.NORMAL); break;
case MAXIMIZED: this.setExtendedState(Frame.MAXIMIZED_BOTH); break;
default:
logger.error("Invalid window state (nothing will changed): " + statestr);
}
}
laststate = state;
}
return laststate;
}
/**
* Saves the size of this frame, but only, if window state is 'normal'.
* If window state is iconfied or maximized, the size is not saved!
*
* @param size<|fim▁hole|> */
private void saveWindowSize(Dimension size) {
if (size != null) {
if (getExtendedState() == Frame.NORMAL) {
if (logger.isDebugEnabled()) logger.debug("Saving preference " + PREF_WINDOW_SIZE + "=" + size.width + "," + size.height);
this.preferences.put(prefnzPrefix+PREF_WINDOW_SIZE, size.width + "," + size.height);
} else {
if (logger.isDebugEnabled()) logger.debug("Preference " + PREF_WINDOW_SIZE + " not saved, cause window state is not 'normal'.");
}
}
}
/**
* Loads and set the saved size preferences for this frame.
*
* @return the size of the frame OR null, if there wasn't any preference.
*/
private Dimension restoreWindowSize() {
Dimension result = null;
final String sizestr = preferences.get(prefnzPrefix+PREF_WINDOW_SIZE, null);
if (sizestr != null) {
int width = 0;
int height = 0;
final String[] sizes = sizestr.split("[,]");
try {
width = Integer.parseInt(sizes[0].trim());
height = Integer.parseInt(sizes[1].trim());
} catch (final Exception e) {
// ignoring, cause is prevented by default values;
}
if (width > 0 && height > 0) {
result = new Dimension(width, height);
if (logger.isDebugEnabled()) logger.debug("Restoring preference " + PREF_WINDOW_SIZE + "=" + result.width + "," + result.height);
this.setSize(result);
}
}
return result;
}
/**
* Saves the location of this frame, but only, if window state is 'normal'.
* If window state is iconfied or maximized, the location is not saved!
*
* @param point
*/
private void saveWindowLocation(Point point) {
if (point != null) {
if (getExtendedState() == Frame.NORMAL) {
if (logger.isDebugEnabled()) logger.debug("Saving preference " + PREF_WINDOW_POSITION + "=" + point.x + "," + point.y);
this.preferences.put(prefnzPrefix+PREF_WINDOW_POSITION, point.x + "," + point.y);
} else {
if (logger.isDebugEnabled()) logger.debug("Preference " + PREF_WINDOW_POSITION + " not saved, cause window state is not 'normal'.");
}
}
}
/**
* Loads and set the saved position preferences for this frame.
*
* @return the size of the frame OR null, if there wasn't any preference.
*/
private Point restoreWindowLocation() {
Point result = null;
final String sizestr = preferences.get(prefnzPrefix+PREF_WINDOW_POSITION, null);
if (sizestr != null) {
int x = 0;
int y = 0;
final String[] sizes = sizestr.split("[,]");
try {
x = Integer.parseInt(sizes[0].trim());
y = Integer.parseInt(sizes[1].trim());
} catch (final Exception e) {
// ignoring, cause is prevented by default values;
}
if (x > 0 && y > 0) {
result = new Point(x, y);
if (logger.isDebugEnabled()) logger.debug("Restoring preference " + PREF_WINDOW_POSITION + "=" + result.x + "," + result.y);
this.setLocation(result);
}
}
return result;
}
/**
* @deprecated (2.4.2) Use {@link DisplayUtils#getZapIconImages()} instead. It will be removed in a future release.
*/
@Deprecated
@SuppressWarnings("javadoc")
protected List<Image> loadIconImages() {
return new ArrayList<>(DisplayUtils.getZapIconImages());
}
@Override
public void dispose() {
super.dispose();
try {
this.preferences.flush();
} catch (final BackingStoreException e) {
logger.error("Error while saving the preferences", e);
}
}
/*
* ========================================================================
*/
private final class FrameWindowStateListener implements WindowStateListener {
@Override
public void windowStateChanged(WindowEvent e) {
saveWindowState(e.getNewState());
}
}
private final class FrameResizedListener extends ComponentAdapter {
@Override
public void componentResized(ComponentEvent e) {
if (e.getComponent() != null) {
saveWindowSize(e.getComponent().getSize());
}
}
@Override
public void componentMoved(ComponentEvent e) {
if (e.getComponent() != null) {
saveWindowLocation(e.getComponent().getLocation());
}
}
}
/**
* Simplified version for easier handling of the states ...
*/
private enum SimpleWindowState {
ICONFIED,
NORMAL,
MAXIMIZED;
}
} // @jve:visual-info decl-index=0 visual-constraint="31,17"<|fim▁end|> | |
<|file_name|>clean_raxml_parsimony_tree.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# File created on 10 Nov 2011
from __future__ import division
__author__ = "Jesse Stombaugh"
__copyright__ = "Copyright 2011, The QIIME project"<|fim▁hole|>__maintainer__ = "Jesse Stombaugh"
__email__ = "[email protected]"
from qiime.util import parse_command_line_parameters, make_option
from cogent.parse.tree import DndParser
from cogent.core.tree import PhyloNode
from qiime.clean_raxml_parsimony_tree import decorate_numtips, decorate_depth,\
get_insert_dict, drop_duplicate_nodes
scoring_methods = ['depth', 'numtips']
script_info = {}
script_info['brief_description'] = "Remove duplicate tips from Raxml Tree"
script_info[
'script_description'] = "This script allows the user to remove specific duplicate tips from a Raxml tree."
script_info['script_usage'] = []
script_info['script_usage'].append(
("Example (depth):",
"For this case the user can pass in input Raxml tree, duplicate tips, and define an output filepath. When using the depth option, only the deepest replicate is kept. ",
" %prog -i raxml_v730_final_placement.tre -t 6 -o raxml_v730_final_placement_depth.tre"))
script_info['script_usage'].append(
("Example (numtips):",
"For this case the user can pass in input Raxml tree, duplicate tips, and define an output filepath. When using the numtips option, the replicate with the fewest siblings is kept. ",
" %prog -i raxml_v730_final_placement.tre -t 6 -o raxml_v730_final_placement_numtips.tre -s numtips"))
script_info['output_description'] = ""
script_info['required_options'] = [
make_option(
'-i',
'--input_tree',
type="existing_filepath",
help='the input raxml parsimony tree'),
make_option(
'-t',
'--tips_to_keep',
type="string",
help='the input tips to score and retain (comma-separated list)'),
make_option(
'-o',
'--output_fp',
type="new_filepath",
help='the output filepath'),
]
script_info['optional_options'] = [
make_option(
'-s',
'--scoring_method',
type="choice",
help='the scoring method either depth or numtips [default: %default]',
default='depth',
choices=scoring_methods),
]
script_info['version'] = __version__
def main():
option_parser, opts, args =\
parse_command_line_parameters(**script_info)
# get options
tree_fp = opts.input_tree
tips_to_keep = opts.tips_to_keep.split(',')
scoring_method = opts.scoring_method
# load tree
tree = DndParser(open(tree_fp, 'U'), constructor=PhyloNode)
# decorate measurements onto tree (either by depth or by number of
# children)
if scoring_method == 'depth':
tree2 = decorate_depth(tree)
elif scoring_method == 'numtips':
tree2 = decorate_numtips(tree)
# get the nodes for the inserted sequences
nodes_dict = get_insert_dict(tree2, set(tips_to_keep))
# remove nodes accordingly
final_tree = drop_duplicate_nodes(tree2, nodes_dict)
# final_tree.nameUnnamedNodes()
# write out the resulting tree
open_outpath = open(opts.output_fp, 'w')
open_outpath.write(final_tree.getNewick(with_distances=True))
open_outpath.close()
if __name__ == "__main__":
main()<|fim▁end|> | __credits__ = ["Jesse Stombaugh"]
__license__ = "GPL"
__version__ = "1.9.1-dev" |
<|file_name|>dataset.py<|end_file_name|><|fim▁begin|>import os
import numpy as np
class Dataset(object):
"""
This class represents a dataset and consists of a list of SongData along with some metadata about the dataset
"""
def __init__(self, songs_data=None):
if songs_data is None:
self.songs_data = []
else:
self.songs_data = songs_data
def add_song(self, song_data):
self.songs_data.append(song_data)
def songs(self):
for s in self.songs_data:
yield s
@property
def num_features(self):
if len(self.songs_data):
return self.songs_data[0].X.shape[1]
@property
def size(self):
return len(self.songs_data)
def __repr__(self):
return ', '.join([s.name for s in self.songs()])
class SongData(object):
"""
This class holds features, labels, and metadata for a song.
"""
def __init__(self, audio_path, label_path):
if not os.path.isfile(audio_path):
raise IOError("Audio file at %s does not exist" % audio_path)
if label_path and not os.path.isfile(label_path):
raise IOError("MIDI file at %s does not exist" % label_path)
self.audio_path = audio_path
self.label_path = label_path
"""
x [num_samples,] is the samples of the song
"""
@property
def x(self):
return self.__x
@x.setter
def x(self, x):
self.__x = x
"""
X [num_frames x num_features] is the feature matrix for the song
"""
@property
def X(self):
return self.__X
@X.setter
def X(self, X):
if hasattr(self, 'Y') and self.Y.shape[0] != X.shape[0]:
raise ValueError("Number of feature frames must equal number of label frames")
self.__X = X
"""
Y [num_frames x num_pitches] is the label matrix for the song
"""
@property
def Y(self):
return self.__Y
@Y.setter
def Y(self, Y):
if hasattr(self, 'X') and self.X.shape[0] != Y.shape[0]:
raise ValueError("Number of label frames must equal number of feature frames")
self.__Y = Y
@property
def num_pitches(self):
if hasattr(self, 'Y'):
return np.shape(self.Y)[1]
return 0
@property
def num_features(self):
if hasattr(self, 'X'):
return self.X.shape[1]
<|fim▁hole|> if hasattr(self, 'X'):
return self.X.shape[0]
@property
def name(self):
return os.path.splitext(os.path.split(self.audio_path)[-1])[0]<|fim▁end|> | @property
def num_frames(self): |
<|file_name|>controllers.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|>created by: Chris Lemelin ([email protected])
created on: 04/20/18
"""
from flask_socketio import emit
from app.decorators import ensure_dict
from app import socketio, db
from app.committee_notes.models import *
from app.committees.models import *
from app.users.models import Users
from app.committee_notes.committee_notes_response import Response
##
## @brief Creates a committee note. (Must be admin user or committe head)
##
## @param user_data The user data required to create a committee note
##
## All the following fields are required:
## committee - id of the committee
## description - Description of new committee note
##
@socketio.on('create_committee_note')
@ensure_dict
def create_note(user_data):
user = Users.verify_auth(user_data.get("token", ""))
committe_id = user_data.get('committee', '')
committee = Committees.query.filter_by(id=committe_id).first()
if committee is not None:
if(user is not None and (user.is_admin or committee.head == user.id)):
committee_note = CommitteeNotes()
committee_note.committee = committee.id
committee_note.description = user_data.get('description',"")
committee_note.author = user.id
committee_note.hidden = False
db.session.add(committee_note)
try:
db.session.commit()
emit('create_committee_note', Response.AddSuccess)
get_notes(action.id, broadcast = True)
except Exception as e:
db.session.rollback()
db.session.flush()
emit("create_committee_note", Response.AddError)
else:
emit("create_committee_note", Response.UsrNotAuth)
else:
emit("create_committee_note", Response.CommitteeDoesntExist)
##
## @brief Gets committee notes from a committee
##
## @param committee_id - id of the committee
##
@socketio.on('get_committee_notes')
def get_notes(committee_id, broadcast = False):
notes = CommitteeNotes.query.filter_by(committee= committee_id).all()
note_ser = [
{
"id": c.id,
"author": c.author,
"committee": c.committee,
"description": c.description,
"created_at": c.created_at,
"hidden": c.hidden
}
for c in notes
]
emit("get_committee_notes", note_ser, broadcast = broadcast)
##
## @brief Gets a committee note
##
## @param id - id of committee note.
##
@socketio.on('get_committee_note')
def get_note(id, broadcast = False):
note = CommitteeNotes.query.filter_by(id= id).first()
if note is not None:
note_data = {
"id": note.id,
"author": note.author,
"committee": note.committee,
"description": note.description,
"created_at": note.created_at,
"hidden": note.hidden
}
emit('get_committee_note', note_data, broadcast = broadcast)
else:
emit("get_committee_note", {}, broadcast = broadcast)
##
## @brief Edits a committee note (Must be admin user or committe head to hide,
## only the author can edit the description)
##
## @param user_data The user data to edit a note, must
## contain a token, an id and any of the following
## fields:
## - description
## - hidden
##
## Any other field will be ignored.
##
## @emit Emits a success mesage if edited, errors otherwise.
##
@socketio.on('modify_committee_note')
@ensure_dict
def modify_note(user_data):
user = Users.verify_auth(user_data.get("token",""))
if(user is None):
emit('modify_note', Response.UsrDoesntExist)
return
committee_note_id = user_data.get("id","")
committee_note = CommitteeNotes.query.filter_by(id=committee_note_id).first()
if(committee_note is None):
emit('modify_note', Response.CommitteeNoteDoesntExist)
return
committee = Committees.query.filter_by(id= committee_note.committee).first()
if(user.id == committee_note.author):
if "description" in user_data:
committee_note.description = user_data['description']
if(user.id == committee.head or user.is_admin or user.id == committee_note.author):
if "hidden" in user_data:
committee_note.hidden = user_data['hidden']
db.session.add(committee_note)
try:
db.session.commit()
emit('modify_committee_note', Response.ModifySuccess)
#get_note(committee_note.id, broadcast = True)
except Exception as e:
db.session.rollback()
db.session.flush()
emit("modify_committee_note", Response.ModifyError)
else:
emit("modify_committee_note", Response.UsrNotAuth)<|fim▁end|> | filename: controllers.py
description: Controllers for committee notes. |
<|file_name|>credentials-in-url.https.window.js<|end_file_name|><|fim▁begin|>// META: script=/service-workers/service-worker/resources/test-helpers.sub.js
// META: script=resources/utils.js
'use strict';
// "If parsedURL includes credentials, then throw a TypeError."<|fim▁hole|>// "A URL includes credentials if its username or password is not the empty
// string."
// https://url.spec.whatwg.org/#include-credentials
backgroundFetchTest((t, bgFetch) => {
return bgFetch.fetch(uniqueTag(), 'https://example.com');
}, 'fetch without credentials in URL should register ok');
backgroundFetchTest((t, bgFetch) => {
return promise_rejects(
t, new TypeError(),
bgFetch.fetch(uniqueTag(), 'https://username:[email protected]'));
}, 'fetch with username and password in URL should reject');
backgroundFetchTest((t, bgFetch) => {
return promise_rejects(
t, new TypeError(),
bgFetch.fetch(uniqueTag(), 'https://username:@example.com'));
}, 'fetch with username and empty password in URL should reject');
backgroundFetchTest((t, bgFetch) => {
return promise_rejects(
t, new TypeError(),
bgFetch.fetch(uniqueTag(), 'https://:[email protected]'));
}, 'fetch with empty username and password in URL should reject');<|fim▁end|> | // https://fetch.spec.whatwg.org/#dom-request
// (Added by https://github.com/whatwg/fetch/issues/26). |
<|file_name|>FromAddressSpinner.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2012, Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.mail.compose;
import android.content.Context;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Spinner;
import com.android.mail.providers.Account;
import com.android.mail.providers.Message;
import com.android.mail.providers.ReplyFromAccount;
import com.android.mail.utils.AccountUtils;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import java.util.List;
public class FromAddressSpinner extends Spinner implements OnItemSelectedListener {
private List<Account> mAccounts;
private ReplyFromAccount mAccount;
private final List<ReplyFromAccount> mReplyFromAccounts = Lists.newArrayList();
private OnAccountChangedListener mAccountChangedListener;
public FromAddressSpinner(Context context) {
this(context, null);
}
public FromAddressSpinner(Context context, AttributeSet set) {
super(context, set);
}
public void setCurrentAccount(ReplyFromAccount account) {
mAccount = account;
selectCurrentAccount();
}<|fim▁hole|> private void selectCurrentAccount() {
if (mAccount == null) {
return;
}
int currentIndex = 0;
for (ReplyFromAccount acct : mReplyFromAccounts) {
if (TextUtils.equals(mAccount.name, acct.name)
&& TextUtils.equals(mAccount.address, acct.address)) {
setSelection(currentIndex, true);
break;
}
currentIndex++;
}
}
public ReplyFromAccount getMatchingReplyFromAccount(String accountString) {
if (!TextUtils.isEmpty(accountString)) {
for (ReplyFromAccount acct : mReplyFromAccounts) {
if (accountString.equals(acct.address)) {
return acct;
}
}
}
return null;
}
public ReplyFromAccount getCurrentAccount() {
return mAccount;
}
/**
* @param action Action being performed; if this is COMPOSE, show all
* accounts. Otherwise, show just the account this was launched
* with.
* @param currentAccount Account used to launch activity.
* @param syncingAccounts
*/
public void initialize(int action, Account currentAccount, Account[] syncingAccounts,
Message refMessage) {
final List<Account> accounts = AccountUtils.mergeAccountLists(mAccounts,
syncingAccounts, true /* prioritizeAccountList */);
if (action == ComposeActivity.COMPOSE) {
mAccounts = accounts;
} else {
// First assume that we are going to use the current account as the reply account
Account replyAccount = currentAccount;
if (refMessage != null && refMessage.accountUri != null) {
// This is a reply or forward of a message access through the "combined" account.
// We want to make sure that the real account is in the spinner
for (Account account : accounts) {
if (account.uri.equals(refMessage.accountUri)) {
replyAccount = account;
break;
}
}
}
mAccounts = ImmutableList.of(replyAccount);
}
initFromSpinner();
}
@VisibleForTesting
protected void initFromSpinner() {
// If there are not yet any accounts in the cached synced accounts
// because this is the first time mail was opened, and it was opened
// directly to the compose activity, don't bother populating the reply
// from spinner yet.
if (mAccounts == null || mAccounts.size() == 0) {
return;
}
FromAddressSpinnerAdapter adapter =
new FromAddressSpinnerAdapter(getContext());
mReplyFromAccounts.clear();
for (Account account : mAccounts) {
mReplyFromAccounts.addAll(account.getReplyFroms());
}
adapter.addAccounts(mReplyFromAccounts);
setAdapter(adapter);
selectCurrentAccount();
setOnItemSelectedListener(this);
}
public List<ReplyFromAccount> getReplyFromAccounts() {
return mReplyFromAccounts;
}
public void setOnAccountChangedListener(OnAccountChangedListener listener) {
mAccountChangedListener = listener;
}
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
ReplyFromAccount selection = (ReplyFromAccount) getItemAtPosition(position);
if (!selection.address.equals(mAccount.address)) {
mAccount = selection;
mAccountChangedListener.onAccountChanged();
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
// Do nothing.
}
/**
* Classes that want to know when a different account in the
* FromAddressSpinner has been selected should implement this interface.
* Note: if the user chooses the same account as the one that has already
* been selected, this method will not be called.
*/
public static interface OnAccountChangedListener {
public void onAccountChanged();
}
}<|fim▁end|> | |
<|file_name|>executors.py<|end_file_name|><|fim▁begin|>"""Single and multi-threaded executors."""
import datetime
import functools
import logging
import math
import os
import threading
from abc import ABCMeta, abstractmethod
from threading import Lock
from typing import (
Dict,
Iterable,
List,
MutableSequence,
Optional,
Set,
Tuple,
Union,
cast,
)
import psutil
from schema_salad.exceptions import ValidationException
from schema_salad.sourceline import SourceLine
from .command_line_tool import CallbackJob, ExpressionJob
from .context import RuntimeContext, getdefault
from .errors import WorkflowException
from .job import JobBase
from .loghandler import _logger
from .mutation import MutationManager
from .process import Process, cleanIntermediate, relocateOutputs
from .provenance_profile import ProvenanceProfile
from .task_queue import TaskQueue
from .update import ORIGINAL_CWLVERSION
from .utils import CWLObjectType, JobsType
from .workflow import Workflow
from .workflow_job import WorkflowJob, WorkflowJobStep
TMPDIR_LOCK = Lock()
class JobExecutor(metaclass=ABCMeta):
"""Abstract base job executor."""
def __init__(self) -> None:
"""Initialize."""
self.final_output = [] # type: MutableSequence[Optional[CWLObjectType]]
self.final_status = [] # type: List[str]
self.output_dirs = set() # type: Set[str]
def __call__(
self,
process: Process,
job_order_object: CWLObjectType,
runtime_context: RuntimeContext,
logger: logging.Logger = _logger,
) -> Tuple[Optional[CWLObjectType], str]:
return self.execute(process, job_order_object, runtime_context, logger)
def output_callback(
self, out: Optional[CWLObjectType], process_status: str
) -> None:
"""Collect the final status and outputs."""
self.final_status.append(process_status)
self.final_output.append(out)
@abstractmethod
def run_jobs(
self,
process: Process,
job_order_object: CWLObjectType,
logger: logging.Logger,
runtime_context: RuntimeContext,
) -> None:
"""Execute the jobs for the given Process."""
def execute(
self,
process: Process,
job_order_object: CWLObjectType,
runtime_context: RuntimeContext,
logger: logging.Logger = _logger,
) -> Tuple[Union[Optional[CWLObjectType]], str]:
"""Execute the process."""
if not runtime_context.basedir:
raise WorkflowException("Must provide 'basedir' in runtimeContext")
def check_for_abstract_op(tool: CWLObjectType) -> None:
if tool["class"] == "Operation":
raise SourceLine(
tool, "class", WorkflowException, runtime_context.debug
).makeError("Workflow has unrunnable abstract Operation")
process.visit(check_for_abstract_op)
finaloutdir = None # Type: Optional[str]
original_outdir = runtime_context.outdir
if isinstance(original_outdir, str):
finaloutdir = os.path.abspath(original_outdir)
runtime_context = runtime_context.copy()
outdir = runtime_context.create_outdir()
self.output_dirs.add(outdir)
runtime_context.outdir = outdir
runtime_context.mutation_manager = MutationManager()
runtime_context.toplevel = True
runtime_context.workflow_eval_lock = threading.Condition(threading.RLock())
job_reqs = None # type: Optional[List[CWLObjectType]]
if "https://w3id.org/cwl/cwl#requirements" in job_order_object:
if process.metadata.get(ORIGINAL_CWLVERSION) == "v1.0":
raise WorkflowException(
"`cwl:requirements` in the input object is not part of CWL "
"v1.0. You can adjust to use `cwltool:overrides` instead; or you "
"can set the cwlVersion to v1.1"
)
job_reqs = cast(
List[CWLObjectType],
job_order_object["https://w3id.org/cwl/cwl#requirements"],
)
elif (
"cwl:defaults" in process.metadata
and "https://w3id.org/cwl/cwl#requirements"
in cast(CWLObjectType, process.metadata["cwl:defaults"])<|fim▁hole|> "v1.0. You can adjust to use `cwltool:overrides` instead; or you "
"can set the cwlVersion to v1.1"
)
job_reqs = cast(
Optional[List[CWLObjectType]],
cast(CWLObjectType, process.metadata["cwl:defaults"])[
"https://w3id.org/cwl/cwl#requirements"
],
)
if job_reqs is not None:
for req in job_reqs:
process.requirements.append(req)
self.run_jobs(process, job_order_object, logger, runtime_context)
if (
self.final_output
and self.final_output[0] is not None
and finaloutdir is not None
):
self.final_output[0] = relocateOutputs(
self.final_output[0],
finaloutdir,
self.output_dirs,
runtime_context.move_outputs,
runtime_context.make_fs_access(""),
getdefault(runtime_context.compute_checksum, True),
path_mapper=runtime_context.path_mapper,
)
if runtime_context.rm_tmpdir:
if not runtime_context.cachedir:
output_dirs = self.output_dirs # type: Iterable[str]
else:
output_dirs = filter(
lambda x: not x.startswith(runtime_context.cachedir), # type: ignore
self.output_dirs,
)
cleanIntermediate(output_dirs)
if self.final_output and self.final_status:
if (
runtime_context.research_obj is not None
and isinstance(
process, (JobBase, Process, WorkflowJobStep, WorkflowJob)
)
and process.parent_wf
):
process_run_id = None # type: Optional[str]
name = "primary"
process.parent_wf.generate_output_prov(
self.final_output[0], process_run_id, name
)
process.parent_wf.document.wasEndedBy(
process.parent_wf.workflow_run_uri,
None,
process.parent_wf.engine_uuid,
datetime.datetime.now(),
)
process.parent_wf.finalize_prov_profile(name=None)
return (self.final_output[0], self.final_status[0])
return (None, "permanentFail")
class SingleJobExecutor(JobExecutor):
"""Default single-threaded CWL reference executor."""
def run_jobs(
self,
process: Process,
job_order_object: CWLObjectType,
logger: logging.Logger,
runtime_context: RuntimeContext,
) -> None:
process_run_id = None # type: Optional[str]
# define provenance profile for single commandline tool
if (
not isinstance(process, Workflow)
and runtime_context.research_obj is not None
):
process.provenance_object = ProvenanceProfile(
runtime_context.research_obj,
full_name=runtime_context.cwl_full_name,
host_provenance=False,
user_provenance=False,
orcid=runtime_context.orcid,
# single tool execution, so RO UUID = wf UUID = tool UUID
run_uuid=runtime_context.research_obj.ro_uuid,
fsaccess=runtime_context.make_fs_access(""),
)
process.parent_wf = process.provenance_object
jobiter = process.job(job_order_object, self.output_callback, runtime_context)
try:
for job in jobiter:
if job is not None:
if runtime_context.builder is not None and hasattr(job, "builder"):
job.builder = runtime_context.builder # type: ignore
if job.outdir is not None:
self.output_dirs.add(job.outdir)
if runtime_context.research_obj is not None:
if not isinstance(process, Workflow):
prov_obj = process.provenance_object
else:
prov_obj = job.prov_obj
if prov_obj:
runtime_context.prov_obj = prov_obj
prov_obj.fsaccess = runtime_context.make_fs_access("")
prov_obj.evaluate(
process,
job,
job_order_object,
runtime_context.research_obj,
)
process_run_id = prov_obj.record_process_start(process, job)
runtime_context = runtime_context.copy()
runtime_context.process_run_id = process_run_id
job.run(runtime_context)
else:
logger.error("Workflow cannot make any more progress.")
break
except (
ValidationException,
WorkflowException,
): # pylint: disable=try-except-raise
raise
except Exception as err:
logger.exception("Got workflow error")
raise WorkflowException(str(err)) from err
class MultithreadedJobExecutor(JobExecutor):
"""
Experimental multi-threaded CWL executor.
Does simple resource accounting, will not start a job unless it
has cores / ram available, but does not make any attempt to
optimize usage.
"""
def __init__(self) -> None:
"""Initialize."""
super().__init__()
self.exceptions = [] # type: List[WorkflowException]
self.pending_jobs = [] # type: List[JobsType]
self.pending_jobs_lock = threading.Lock()
self.max_ram = int(psutil.virtual_memory().available / 2**20) # type: ignore[no-untyped-call]
self.max_cores = float(psutil.cpu_count())
self.allocated_ram = float(0)
self.allocated_cores = float(0)
def select_resources(
self, request: Dict[str, Union[int, float]], runtime_context: RuntimeContext
) -> Dict[str, Union[int, float]]: # pylint: disable=unused-argument
"""Naïve check for available cpu cores and memory."""
result: Dict[str, Union[int, float]] = {}
maxrsc = {"cores": self.max_cores, "ram": self.max_ram}
for rsc in ("cores", "ram"):
rsc_min = request[rsc + "Min"]
if rsc_min > maxrsc[rsc]:
raise WorkflowException(
f"Requested at least {rsc_min} {rsc} but only "
f"{maxrsc[rsc]} available"
)
rsc_max = request[rsc + "Max"]
if rsc_max < maxrsc[rsc]:
result[rsc] = math.ceil(rsc_max)
else:
result[rsc] = maxrsc[rsc]
result["tmpdirSize"] = math.ceil(request["tmpdirMin"])
result["outdirSize"] = math.ceil(request["outdirMin"])
if "cudaDeviceCount" in request:
result["cudaDeviceCount"] = request["cudaDeviceCount"]
return result
def _runner(self, job, runtime_context, TMPDIR_LOCK):
# type: (Union[JobBase, WorkflowJob, CallbackJob, ExpressionJob], RuntimeContext, threading.Lock) -> None
"""Job running thread."""
try:
_logger.debug(
"job: {}, runtime_context: {}, TMPDIR_LOCK: {}".format(
job, runtime_context, TMPDIR_LOCK
)
)
job.run(runtime_context, TMPDIR_LOCK)
except WorkflowException as err:
_logger.exception(f"Got workflow error: {err}")
self.exceptions.append(err)
except Exception as err: # pylint: disable=broad-except
_logger.exception(f"Got workflow error: {err}")
self.exceptions.append(WorkflowException(str(err)))
finally:
if runtime_context.workflow_eval_lock:
with runtime_context.workflow_eval_lock:
if isinstance(job, JobBase):
ram = job.builder.resources["ram"]
self.allocated_ram -= ram
cores = job.builder.resources["cores"]
self.allocated_cores -= cores
runtime_context.workflow_eval_lock.notifyAll()
def run_job(
self,
job: Optional[JobsType],
runtime_context: RuntimeContext,
) -> None:
"""Execute a single Job in a separate thread."""
if job is not None:
with self.pending_jobs_lock:
self.pending_jobs.append(job)
with self.pending_jobs_lock:
n = 0
while (n + 1) <= len(self.pending_jobs):
# Simple greedy resource allocation strategy. Go
# through pending jobs in the order they were
# generated and add them to the queue only if there
# are resources available.
job = self.pending_jobs[n]
if isinstance(job, JobBase):
ram = job.builder.resources["ram"]
cores = job.builder.resources["cores"]
if ram > self.max_ram or cores > self.max_cores:
_logger.error(
'Job "%s" cannot be run, requests more resources (%s) '
"than available on this host (max ram %d, max cores %d",
job.name,
job.builder.resources,
self.allocated_ram,
self.allocated_cores,
self.max_ram,
self.max_cores,
)
self.pending_jobs.remove(job)
return
if (
self.allocated_ram + ram > self.max_ram
or self.allocated_cores + cores > self.max_cores
):
_logger.debug(
'Job "%s" cannot run yet, resources (%s) are not '
"available (already allocated ram is %d, allocated cores is %d, "
"max ram %d, max cores %d",
job.name,
job.builder.resources,
self.allocated_ram,
self.allocated_cores,
self.max_ram,
self.max_cores,
)
n += 1
continue
if isinstance(job, JobBase):
ram = job.builder.resources["ram"]
self.allocated_ram += ram
cores = job.builder.resources["cores"]
self.allocated_cores += cores
self.taskqueue.add(
functools.partial(self._runner, job, runtime_context, TMPDIR_LOCK),
runtime_context.workflow_eval_lock,
)
self.pending_jobs.remove(job)
def wait_for_next_completion(self, runtime_context):
# type: (RuntimeContext) -> None
"""Wait for jobs to finish."""
if runtime_context.workflow_eval_lock is not None:
runtime_context.workflow_eval_lock.wait(timeout=3)
if self.exceptions:
raise self.exceptions[0]
def run_jobs(
self,
process: Process,
job_order_object: CWLObjectType,
logger: logging.Logger,
runtime_context: RuntimeContext,
) -> None:
self.taskqueue = TaskQueue(
threading.Lock(), psutil.cpu_count()
) # type: TaskQueue
try:
jobiter = process.job(
job_order_object, self.output_callback, runtime_context
)
if runtime_context.workflow_eval_lock is None:
raise WorkflowException(
"runtimeContext.workflow_eval_lock must not be None"
)
runtime_context.workflow_eval_lock.acquire()
for job in jobiter:
if job is not None:
if isinstance(job, JobBase):
job.builder = runtime_context.builder or job.builder
if job.outdir is not None:
self.output_dirs.add(job.outdir)
self.run_job(job, runtime_context)
if job is None:
if self.taskqueue.in_flight > 0:
self.wait_for_next_completion(runtime_context)
else:
logger.error("Workflow cannot make any more progress.")
break
self.run_job(None, runtime_context)
while self.taskqueue.in_flight > 0:
self.wait_for_next_completion(runtime_context)
self.run_job(None, runtime_context)
runtime_context.workflow_eval_lock.release()
finally:
self.taskqueue.drain()
self.taskqueue.join()
class NoopJobExecutor(JobExecutor):
"""Do nothing executor, for testing purposes only."""
def run_jobs(
self,
process: Process,
job_order_object: CWLObjectType,
logger: logging.Logger,
runtime_context: RuntimeContext,
) -> None:
pass
def execute(
self,
process: Process,
job_order_object: CWLObjectType,
runtime_context: RuntimeContext,
logger: Optional[logging.Logger] = None,
) -> Tuple[Optional[CWLObjectType], str]:
return {}, "success"<|fim▁end|> | ):
if process.metadata.get(ORIGINAL_CWLVERSION) == "v1.0":
raise WorkflowException(
"`cwl:requirements` in the input object is not part of CWL " |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import annotations
from os import getenv
import gc
import sys
from unittest import TestCase, main
from unittest import skip as skip
from unittest import skipIf as skipIf
import logging
from progressivis import Scheduler, log_level
from progressivis.storage import init_temp_dir_if, cleanup_temp_dir
import numpy as np
from typing import Any, Type, Optional
_ = skip # shut-up pylint
__ = skipIf
<|fim▁hole|> WARNING = logging.WARNING
INFO = logging.INFO
DEBUG = logging.DEBUG
NOTSET = logging.NOTSET
levels = {
"CRITICAL": logging.CRITICAL,
"ERROR": logging.ERROR,
"WARNING": logging.WARNING,
"INFO": logging.INFO,
"DEBUG": logging.DEBUG,
"NOTSET": logging.NOTSET,
}
def __init__(self, *args: Any) -> None:
super(ProgressiveTest, self).__init__(*args)
self._output: bool = False
self._scheduler: Optional[Scheduler] = None
self._temp_dir_flag: bool = False
level: Any = getenv("LOGLEVEL")
if level in ProgressiveTest.levels:
level = ProgressiveTest.levels[level]
if level:
print(f"Logger level {level} for {self}", file=sys.stderr)
self.log(int(level))
@staticmethod
def terse(x: Any) -> None:
_ = x
print(".", end="", file=sys.stderr)
@staticmethod
async def _stop(scheduler: Scheduler, run_number: int) -> None:
await scheduler.stop()
def setUp(self) -> None:
np.random.seed(42)
def tearDown(self) -> None:
# print('Logger level for %s back to ERROR' % self, file=sys.stderr)
# self.log()
gc.collect()
logger = logging.getLogger()
logger.setLevel(logging.NOTSET)
while logger.hasHandlers():
logger.removeHandler(logger.handlers[0])
@classmethod
def cleanup(self) -> None:
cleanup_temp_dir()
@classmethod
def setUpClass(cls: Type[ProgressiveTest]) -> None:
cleanup_temp_dir()
init_temp_dir_if()
@classmethod
def tearDownClass(cls: Type[ProgressiveTest]) -> None:
cleanup_temp_dir()
def scheduler(self, clean: bool = False) -> Scheduler:
if self._scheduler is None or clean:
self._scheduler = Scheduler()
return self._scheduler
@staticmethod
def log(level: int = logging.NOTSET, package: str = "progressivis") -> None:
log_level(level, package=package)
@staticmethod
def main() -> None:
main()<|fim▁end|> |
class ProgressiveTest(TestCase):
CRITICAL = logging.CRITICAL
ERROR = logging.ERROR |
<|file_name|>showMeMore.js<|end_file_name|><|fim▁begin|>jQuery.fn.showMeMore = function (options) {
var options = $.extend({
current: 4, // number to be displayed at start
count: 4, // how many show in one click
fadeSpeed: 300, // animation speed
showButton: '', // show button (false / string)
hideButton: '', // hide button
showButtonText: 'showButton', //text at the showButton
hideButtonText: 'hideButton', //text at the showButton
enableHide: false, // allow to hide (true / false)
generateBtn: true,// auto generate buttons if they not added by default
list: 'li' //tile elements
}, options);
var make = function () {
var showButton = $(options.showButton),
hideButton = $(options.hideButton),
enableHide = options.enableHide,
count = options.count,
current = options.current,
fadeSpeed = options.fadeSpeed,
list = $(this).find(options.list),//find all 'list' elements
quantity = list.length;//list elements count
//add SHOW button if it is not installed by the user
if (options.generateBtn && options.showButton == '') {
$(this).append('<button class="showButton">' + options.showButtonText + '</button>');
showButton = $(this).find('.showButton');
}
//add HIDE button if it is not installed by the user and if enableHide is true
if (options.generateBtn && enableHide && options.showButton == '') {
$(this).append('<button class="hideButton">' + options.hideButtonText + '</button>');
hideButton = $(this).find('.hideButton');
}
list.hide();//hide all elements
hideButton.hide()//hide "hideButton"
if (quantity <= current) {
showButton.hide();
}
showItem(0);//show first elements
function switchButtons() {
<|fim▁hole|> } else {
showButton.hide();
hideButton.show();
}
}
//this function show next elements
function showItem(time) {
for (var i = 0; i < current; i++) {
if ($(list[i]).is(':hidden')) {
$(list[i]).fadeIn(time);
}
}
}
//this function hide all elements
function hideAll(time) {
for (var i = current; i < quantity; i++) {
$(list[i]).fadeOut(time);
}
}
showButton.click(function (event) {
event.preventDefault();
current += count;
showItem(fadeSpeed);
if (current >= quantity) {
switchButtons();
}
});
hideButton.click(function (event) {
event.preventDefault();
current = options.current;
hideAll(fadeSpeed);
hideButton.hide();
showButton.show();
});
};
return this.each(make);
};<|fim▁end|> | if (enableHide == false) {
showButton.hide();
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Style sheets and their CSS rules.
mod counter_style_rule;
mod document_rule;
mod font_face_rule;
pub mod font_feature_values_rule;
pub mod import_rule;
pub mod keyframes_rule;
mod loader;
mod media_rule;
mod namespace_rule;
pub mod origin;
mod page_rule;
mod rule_list;
mod rule_parser;
mod rules_iterator;
mod style_rule;
mod stylesheet;
pub mod supports_rule;
pub mod viewport_rule;
use crate::parser::ParserContext;
use crate::shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked};
use crate::shared_lock::{SharedRwLock, SharedRwLockReadGuard, ToCssWithGuard};
use crate::str::CssStringWriter;
use cssparser::{parse_one_rule, Parser, ParserInput};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use servo_arc::Arc;
use std::fmt;
use style_traits::ParsingMode;
pub use self::counter_style_rule::CounterStyleRule;
pub use self::document_rule::DocumentRule;
pub use self::font_face_rule::FontFaceRule;
pub use self::font_feature_values_rule::FontFeatureValuesRule;
pub use self::import_rule::ImportRule;
pub use self::keyframes_rule::KeyframesRule;
pub use self::loader::StylesheetLoader;
pub use self::media_rule::MediaRule;
pub use self::namespace_rule::NamespaceRule;
pub use self::origin::{Origin, OriginSet, OriginSetIterator, PerOrigin, PerOriginIter};
pub use self::page_rule::PageRule;
pub use self::rule_list::{CssRules, CssRulesHelpers};
pub use self::rule_parser::{InsertRuleContext, State, TopLevelRuleParser};
pub use self::rules_iterator::{AllRules, EffectiveRules};
pub use self::rules_iterator::{NestedRuleIterationCondition, RulesIterator};
pub use self::style_rule::StyleRule;
pub use self::stylesheet::{DocumentStyleSheet, Namespaces, Stylesheet};
pub use self::stylesheet::{StylesheetContents, StylesheetInDocument, UserAgentStylesheets};
pub use self::supports_rule::SupportsRule;
pub use self::viewport_rule::ViewportRule;
/// Extra data that the backend may need to resolve url values.
#[cfg(not(feature = "gecko"))]
pub type UrlExtraData = ::servo_url::ServoUrl;
/// Extra data that the backend may need to resolve url values.
#[cfg(feature = "gecko")]
#[derive(Clone, PartialEq)]
pub struct UrlExtraData(
pub crate::gecko_bindings::sugar::refptr::RefPtr<crate::gecko_bindings::structs::URLExtraData>,
);
#[cfg(feature = "gecko")]
impl UrlExtraData {
/// True if this URL scheme is chrome.
#[inline]
pub fn is_chrome(&self) -> bool {
self.0.mIsChrome
}
/// Create a reference to this `UrlExtraData` from a reference to pointer.
///
/// The pointer must be valid and non null.
///
/// This method doesn't touch refcount.
#[inline]
pub unsafe fn from_ptr_ref(ptr: &*mut crate::gecko_bindings::structs::URLExtraData) -> &Self {
::std::mem::transmute(ptr)
}
}
#[cfg(feature = "gecko")]
impl fmt::Debug for UrlExtraData {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
use crate::gecko_bindings::{bindings, structs};
struct DebugURI(*mut structs::nsIURI);
impl fmt::Debug for DebugURI {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
use nsstring::nsCString;
let mut spec = nsCString::new();
unsafe {
bindings::Gecko_nsIURI_Debug(self.0, &mut spec);
}
spec.fmt(formatter)
}
}
formatter
.debug_struct("URLExtraData")
.field("is_chrome", &self.is_chrome())
.field("base", &DebugURI(self.0.mBaseURI.raw::<structs::nsIURI>()))
.field(
"referrer",
&DebugURI(self.0.mReferrer.raw::<structs::nsIURI>()),
)
.finish()
}
}
// XXX We probably need to figure out whether we should mark Eq here.
// It is currently marked so because properties::UnparsedValue wants Eq.
#[cfg(feature = "gecko")]
impl Eq for UrlExtraData {}
/// A CSS rule.
///
/// TODO(emilio): Lots of spec links should be around.
#[derive(Clone, Debug)]
#[allow(missing_docs)]
pub enum CssRule {
// No Charset here, CSSCharsetRule has been removed from CSSOM
// https://drafts.csswg.org/cssom/#changes-from-5-december-2013
Namespace(Arc<Locked<NamespaceRule>>),
Import(Arc<Locked<ImportRule>>),
Style(Arc<Locked<StyleRule>>),
Media(Arc<Locked<MediaRule>>),
FontFace(Arc<Locked<FontFaceRule>>),
FontFeatureValues(Arc<Locked<FontFeatureValuesRule>>),
CounterStyle(Arc<Locked<CounterStyleRule>>),
Viewport(Arc<Locked<ViewportRule>>),
Keyframes(Arc<Locked<KeyframesRule>>),
Supports(Arc<Locked<SupportsRule>>),
Page(Arc<Locked<PageRule>>),
Document(Arc<Locked<DocumentRule>>),
}
impl CssRule {
/// Measure heap usage.
#[cfg(feature = "gecko")]
fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
match *self {
// Not all fields are currently fully measured. Extra measurement
// may be added later.
CssRule::Namespace(_) => 0,
// We don't need to measure ImportRule::stylesheet because we measure
// it on the C++ side in the child list of the ServoStyleSheet.
CssRule::Import(_) => 0,
CssRule::Style(ref lock) => {
lock.unconditional_shallow_size_of(ops) + lock.read_with(guard).size_of(guard, ops)
},
CssRule::Media(ref lock) => {
lock.unconditional_shallow_size_of(ops) + lock.read_with(guard).size_of(guard, ops)
},
CssRule::FontFace(_) => 0,
CssRule::FontFeatureValues(_) => 0,
CssRule::CounterStyle(_) => 0,
CssRule::Viewport(_) => 0,
CssRule::Keyframes(_) => 0,
CssRule::Supports(ref lock) => {
lock.unconditional_shallow_size_of(ops) + lock.read_with(guard).size_of(guard, ops)
},
CssRule::Page(ref lock) => {
lock.unconditional_shallow_size_of(ops) + lock.read_with(guard).size_of(guard, ops)
},
CssRule::Document(ref lock) => {
lock.unconditional_shallow_size_of(ops) + lock.read_with(guard).size_of(guard, ops)
},
}
}
}
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum CssRuleType {
// https://drafts.csswg.org/cssom/#the-cssrule-interface
Style = 1,
Charset = 2,
Import = 3,
Media = 4,
FontFace = 5,
Page = 6,
// https://drafts.csswg.org/css-animations-1/#interface-cssrule-idl
Keyframes = 7,
Keyframe = 8,
// https://drafts.csswg.org/cssom/#the-cssrule-interface
Margin = 9,<|fim▁hole|> CounterStyle = 11,
// https://drafts.csswg.org/css-conditional-3/#extentions-to-cssrule-interface
Supports = 12,
// https://www.w3.org/TR/2012/WD-css3-conditional-20120911/#extentions-to-cssrule-interface
Document = 13,
// https://drafts.csswg.org/css-fonts-3/#om-fontfeaturevalues
FontFeatureValues = 14,
// https://drafts.csswg.org/css-device-adapt/#css-rule-interface
Viewport = 15,
}
#[allow(missing_docs)]
pub enum RulesMutateError {
Syntax,
IndexSize,
HierarchyRequest,
InvalidState,
}
impl CssRule {
/// Returns the CSSOM rule type of this rule.
pub fn rule_type(&self) -> CssRuleType {
match *self {
CssRule::Style(_) => CssRuleType::Style,
CssRule::Import(_) => CssRuleType::Import,
CssRule::Media(_) => CssRuleType::Media,
CssRule::FontFace(_) => CssRuleType::FontFace,
CssRule::FontFeatureValues(_) => CssRuleType::FontFeatureValues,
CssRule::CounterStyle(_) => CssRuleType::CounterStyle,
CssRule::Keyframes(_) => CssRuleType::Keyframes,
CssRule::Namespace(_) => CssRuleType::Namespace,
CssRule::Viewport(_) => CssRuleType::Viewport,
CssRule::Supports(_) => CssRuleType::Supports,
CssRule::Page(_) => CssRuleType::Page,
CssRule::Document(_) => CssRuleType::Document,
}
}
fn rule_state(&self) -> State {
match *self {
// CssRule::Charset(..) => State::Start,
CssRule::Import(..) => State::Imports,
CssRule::Namespace(..) => State::Namespaces,
_ => State::Body,
}
}
/// Parse a CSS rule.
///
/// Returns a parsed CSS rule and the final state of the parser.
///
/// Input state is None for a nested rule
pub fn parse(
css: &str,
insert_rule_context: InsertRuleContext,
parent_stylesheet_contents: &StylesheetContents,
shared_lock: &SharedRwLock,
state: State,
loader: Option<&StylesheetLoader>,
) -> Result<Self, RulesMutateError> {
let url_data = parent_stylesheet_contents.url_data.read();
let context = ParserContext::new(
parent_stylesheet_contents.origin,
&url_data,
None,
ParsingMode::DEFAULT,
parent_stylesheet_contents.quirks_mode,
None,
None,
);
let mut input = ParserInput::new(css);
let mut input = Parser::new(&mut input);
let mut guard = parent_stylesheet_contents.namespaces.write();
// nested rules are in the body state
let mut rule_parser = TopLevelRuleParser {
context,
shared_lock: &shared_lock,
loader,
state,
dom_error: None,
namespaces: &mut *guard,
insert_rule_context: Some(insert_rule_context),
};
parse_one_rule(&mut input, &mut rule_parser)
.map_err(|_| rule_parser.dom_error.unwrap_or(RulesMutateError::Syntax))
}
}
impl DeepCloneWithLock for CssRule {
/// Deep clones this CssRule.
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> CssRule {
match *self {
CssRule::Namespace(ref arc) => {
let rule = arc.read_with(guard);
CssRule::Namespace(Arc::new(lock.wrap(rule.clone())))
},
CssRule::Import(ref arc) => {
let rule = arc
.read_with(guard)
.deep_clone_with_lock(lock, guard, params);
CssRule::Import(Arc::new(lock.wrap(rule)))
},
CssRule::Style(ref arc) => {
let rule = arc.read_with(guard);
CssRule::Style(Arc::new(
lock.wrap(rule.deep_clone_with_lock(lock, guard, params)),
))
},
CssRule::Media(ref arc) => {
let rule = arc.read_with(guard);
CssRule::Media(Arc::new(
lock.wrap(rule.deep_clone_with_lock(lock, guard, params)),
))
},
CssRule::FontFace(ref arc) => {
let rule = arc.read_with(guard);
CssRule::FontFace(Arc::new(lock.wrap(rule.clone())))
},
CssRule::FontFeatureValues(ref arc) => {
let rule = arc.read_with(guard);
CssRule::FontFeatureValues(Arc::new(lock.wrap(rule.clone())))
},
CssRule::CounterStyle(ref arc) => {
let rule = arc.read_with(guard);
CssRule::CounterStyle(Arc::new(lock.wrap(rule.clone())))
},
CssRule::Viewport(ref arc) => {
let rule = arc.read_with(guard);
CssRule::Viewport(Arc::new(lock.wrap(rule.clone())))
},
CssRule::Keyframes(ref arc) => {
let rule = arc.read_with(guard);
CssRule::Keyframes(Arc::new(
lock.wrap(rule.deep_clone_with_lock(lock, guard, params)),
))
},
CssRule::Supports(ref arc) => {
let rule = arc.read_with(guard);
CssRule::Supports(Arc::new(
lock.wrap(rule.deep_clone_with_lock(lock, guard, params)),
))
},
CssRule::Page(ref arc) => {
let rule = arc.read_with(guard);
CssRule::Page(Arc::new(
lock.wrap(rule.deep_clone_with_lock(lock, guard, params)),
))
},
CssRule::Document(ref arc) => {
let rule = arc.read_with(guard);
CssRule::Document(Arc::new(
lock.wrap(rule.deep_clone_with_lock(lock, guard, params)),
))
},
}
}
}
impl ToCssWithGuard for CssRule {
// https://drafts.csswg.org/cssom/#serialize-a-css-rule
fn to_css(&self, guard: &SharedRwLockReadGuard, dest: &mut CssStringWriter) -> fmt::Result {
match *self {
CssRule::Namespace(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::Import(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::Style(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::FontFace(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::FontFeatureValues(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::CounterStyle(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::Viewport(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::Keyframes(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::Media(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::Supports(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::Page(ref lock) => lock.read_with(guard).to_css(guard, dest),
CssRule::Document(ref lock) => lock.read_with(guard).to_css(guard, dest),
}
}
}<|fim▁end|> | Namespace = 10,
// https://drafts.csswg.org/css-counter-styles-3/#extentions-to-cssrule-interface |
<|file_name|>0003_auto__add_orderset.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'OrderSet'
db.create_table(u'costs_orderset', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['myuser.MyUser'], null=True, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=250)),
))
db.send_create_signal(u'costs', ['OrderSet'])
# Adding M2M table for field RVUs on 'OrderSet'
m2m_table_name = db.shorten_name(u'costs_orderset_RVUs')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('orderset', models.ForeignKey(orm[u'costs.orderset'], null=False)),
('rvu', models.ForeignKey(orm[u'costs.rvu'], null=False))
))
db.create_unique(m2m_table_name, ['orderset_id', 'rvu_id'])
def backwards(self, orm):
# Deleting model 'OrderSet'
db.delete_table(u'costs_orderset')
# Removing M2M table for field RVUs on 'OrderSet'
db.delete_table(db.shorten_name(u'costs_orderset_RVUs'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'costs.gpci': {
'Meta': {'unique_together': "(('year', 'location'),)", 'object_name': 'GPCI'},
'facility': ('django.db.models.fields.FloatField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'malpractice': ('django.db.models.fields.FloatField', [], {}),
'work': ('django.db.models.fields.FloatField', [], {}),
'year': ('django.db.models.fields.PositiveSmallIntegerField', [], {})
},
u'costs.medicarefactor': {
'Meta': {'object_name': 'MedicareFactor'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'value': ('django.db.models.fields.FloatField', [], {}),
'year': ('django.db.models.fields.PositiveSmallIntegerField', [], {'unique': 'True'})
},
u'costs.orderset': {
'Meta': {'object_name': 'OrderSet'},
'RVUs': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['costs.RVU']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['myuser.MyUser']", 'null': 'True', 'blank': 'True'})
},
u'costs.rvu': {
'Meta': {'unique_together': "(('year', 'code', 'mod'),)", 'object_name': 'RVU'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'description': ('django.db.models.fields.TextField', [], {}),
'facility': ('django.db.models.fields.FloatField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'malpractice': ('django.db.models.fields.FloatField', [], {}),
'mod': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'work': ('django.db.models.fields.FloatField', [], {}),
'year': ('django.db.models.fields.PositiveSmallIntegerField', [], {})
},
u'myuser.myuser': {
'Meta': {'ordering': "('last_name',)", 'object_name': 'MyUser'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),<|fim▁hole|> 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"})
}
}
complete_apps = ['costs']<|fim▁end|> | 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), |
<|file_name|>PlacementPoliciesImpl.java<|end_file_name|><|fim▁begin|>// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.avs.implementation;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.SimpleResponse;
import com.azure.core.util.Context;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.avs.fluent.PlacementPoliciesClient;
import com.azure.resourcemanager.avs.fluent.models.PlacementPolicyInner;
import com.azure.resourcemanager.avs.models.PlacementPolicies;
import com.azure.resourcemanager.avs.models.PlacementPolicy;
import com.fasterxml.jackson.annotation.JsonIgnore;<|fim▁hole|> private final PlacementPoliciesClient innerClient;
private final com.azure.resourcemanager.avs.AvsManager serviceManager;
public PlacementPoliciesImpl(
PlacementPoliciesClient innerClient, com.azure.resourcemanager.avs.AvsManager serviceManager) {
this.innerClient = innerClient;
this.serviceManager = serviceManager;
}
public PagedIterable<PlacementPolicy> list(String resourceGroupName, String privateCloudName, String clusterName) {
PagedIterable<PlacementPolicyInner> inner =
this.serviceClient().list(resourceGroupName, privateCloudName, clusterName);
return Utils.mapPage(inner, inner1 -> new PlacementPolicyImpl(inner1, this.manager()));
}
public PagedIterable<PlacementPolicy> list(
String resourceGroupName, String privateCloudName, String clusterName, Context context) {
PagedIterable<PlacementPolicyInner> inner =
this.serviceClient().list(resourceGroupName, privateCloudName, clusterName, context);
return Utils.mapPage(inner, inner1 -> new PlacementPolicyImpl(inner1, this.manager()));
}
public PlacementPolicy get(
String resourceGroupName, String privateCloudName, String clusterName, String placementPolicyName) {
PlacementPolicyInner inner =
this.serviceClient().get(resourceGroupName, privateCloudName, clusterName, placementPolicyName);
if (inner != null) {
return new PlacementPolicyImpl(inner, this.manager());
} else {
return null;
}
}
public Response<PlacementPolicy> getWithResponse(
String resourceGroupName,
String privateCloudName,
String clusterName,
String placementPolicyName,
Context context) {
Response<PlacementPolicyInner> inner =
this
.serviceClient()
.getWithResponse(resourceGroupName, privateCloudName, clusterName, placementPolicyName, context);
if (inner != null) {
return new SimpleResponse<>(
inner.getRequest(),
inner.getStatusCode(),
inner.getHeaders(),
new PlacementPolicyImpl(inner.getValue(), this.manager()));
} else {
return null;
}
}
public void delete(
String resourceGroupName, String privateCloudName, String clusterName, String placementPolicyName) {
this.serviceClient().delete(resourceGroupName, privateCloudName, clusterName, placementPolicyName);
}
public void delete(
String resourceGroupName,
String privateCloudName,
String clusterName,
String placementPolicyName,
Context context) {
this.serviceClient().delete(resourceGroupName, privateCloudName, clusterName, placementPolicyName, context);
}
public PlacementPolicy getById(String id) {
String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups");
if (resourceGroupName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String
.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id)));
}
String privateCloudName = Utils.getValueFromIdByName(id, "privateClouds");
if (privateCloudName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String.format("The resource ID '%s' is not valid. Missing path segment 'privateClouds'.", id)));
}
String clusterName = Utils.getValueFromIdByName(id, "clusters");
if (clusterName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id)));
}
String placementPolicyName = Utils.getValueFromIdByName(id, "placementPolicies");
if (placementPolicyName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String
.format(
"The resource ID '%s' is not valid. Missing path segment 'placementPolicies'.", id)));
}
return this
.getWithResponse(resourceGroupName, privateCloudName, clusterName, placementPolicyName, Context.NONE)
.getValue();
}
public Response<PlacementPolicy> getByIdWithResponse(String id, Context context) {
String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups");
if (resourceGroupName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String
.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id)));
}
String privateCloudName = Utils.getValueFromIdByName(id, "privateClouds");
if (privateCloudName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String.format("The resource ID '%s' is not valid. Missing path segment 'privateClouds'.", id)));
}
String clusterName = Utils.getValueFromIdByName(id, "clusters");
if (clusterName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id)));
}
String placementPolicyName = Utils.getValueFromIdByName(id, "placementPolicies");
if (placementPolicyName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String
.format(
"The resource ID '%s' is not valid. Missing path segment 'placementPolicies'.", id)));
}
return this.getWithResponse(resourceGroupName, privateCloudName, clusterName, placementPolicyName, context);
}
public void deleteById(String id) {
String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups");
if (resourceGroupName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String
.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id)));
}
String privateCloudName = Utils.getValueFromIdByName(id, "privateClouds");
if (privateCloudName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String.format("The resource ID '%s' is not valid. Missing path segment 'privateClouds'.", id)));
}
String clusterName = Utils.getValueFromIdByName(id, "clusters");
if (clusterName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id)));
}
String placementPolicyName = Utils.getValueFromIdByName(id, "placementPolicies");
if (placementPolicyName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String
.format(
"The resource ID '%s' is not valid. Missing path segment 'placementPolicies'.", id)));
}
this.delete(resourceGroupName, privateCloudName, clusterName, placementPolicyName, Context.NONE);
}
public void deleteByIdWithResponse(String id, Context context) {
String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups");
if (resourceGroupName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String
.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id)));
}
String privateCloudName = Utils.getValueFromIdByName(id, "privateClouds");
if (privateCloudName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String.format("The resource ID '%s' is not valid. Missing path segment 'privateClouds'.", id)));
}
String clusterName = Utils.getValueFromIdByName(id, "clusters");
if (clusterName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id)));
}
String placementPolicyName = Utils.getValueFromIdByName(id, "placementPolicies");
if (placementPolicyName == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
String
.format(
"The resource ID '%s' is not valid. Missing path segment 'placementPolicies'.", id)));
}
this.delete(resourceGroupName, privateCloudName, clusterName, placementPolicyName, context);
}
private PlacementPoliciesClient serviceClient() {
return this.innerClient;
}
private com.azure.resourcemanager.avs.AvsManager manager() {
return this.serviceManager;
}
public PlacementPolicyImpl define(String name) {
return new PlacementPolicyImpl(name, this.manager());
}
}<|fim▁end|> |
public final class PlacementPoliciesImpl implements PlacementPolicies {
@JsonIgnore private final ClientLogger logger = new ClientLogger(PlacementPoliciesImpl.class);
|
<|file_name|>ResteasyUriInfo.java<|end_file_name|><|fim▁begin|>package org.jboss.resteasy.spi;
import org.jboss.resteasy.specimpl.MultivaluedMapImpl;
import org.jboss.resteasy.specimpl.PathSegmentImpl;
import org.jboss.resteasy.specimpl.ResteasyUriBuilder;
import org.jboss.resteasy.util.Encode;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.PathSegment;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.List;
/**
* UriInfo implementation with some added extra methods to help process requests
*
* @author <a href="mailto:[email protected]">Bill Burke</a>
* @version $Revision: 1 $
*/
public class ResteasyUriInfo implements UriInfo
{
private String path;
private String encodedPath;
private String matchingPath;
private MultivaluedMap<String, String> queryParameters;
private MultivaluedMap<String, String> encodedQueryParameters;
private MultivaluedMap<String, String> pathParameters;
private MultivaluedMap<String, String> encodedPathParameters;
private MultivaluedMap<String, PathSegment[]> pathParameterPathSegments;
private MultivaluedMap<String, PathSegment[]> encodedPathParameterPathSegments;
private List<PathSegment> pathSegments;
private List<PathSegment> encodedPathSegments;
private URI absolutePath;
private URI requestURI;
private URI baseURI;
private List<String> matchedUris;
private List<String> encodedMatchedUris;
private List<String> encodedMatchedPaths = new ArrayList<String>();
private List<Object> ancestors;
public ResteasyUriInfo(URI base, URI relative)
{
String b = base.toString();
if (!b.endsWith("/")) b += "/";
String r = relative.getRawPath();
if (r.startsWith("/"))
{
encodedPath = r;
path = relative.getPath();
}
else
{
encodedPath = "/" + r;
path = "/" + relative.getPath();
}
requestURI = UriBuilder.fromUri(base).path(relative.getRawPath()).replaceQuery(relative.getRawQuery()).build();
baseURI = base;
encodedPathSegments = PathSegmentImpl.parseSegments(encodedPath, false);
this.pathSegments = new ArrayList<PathSegment>(encodedPathSegments.size());
for (PathSegment segment : encodedPathSegments)
{
pathSegments.add(new PathSegmentImpl(((PathSegmentImpl) segment).getOriginal(), true));
}
extractParameters(requestURI.getRawQuery());
extractMatchingPath(encodedPathSegments);
absolutePath = UriBuilder.fromUri(requestURI).replaceQuery(null).build();
}
public ResteasyUriInfo(URI requestURI)
{
String r = requestURI.getRawPath();
if (r.startsWith("/"))
{
encodedPath = r;
path = requestURI.getPath();
}
else
{
encodedPath = "/" + r;
path = "/" + requestURI.getPath();
}
this.requestURI = requestURI;
baseURI = UriBuilder.fromUri(requestURI).replacePath("").build();
encodedPathSegments = PathSegmentImpl.parseSegments(encodedPath, false);
this.pathSegments = new ArrayList<PathSegment>(encodedPathSegments.size());
for (PathSegment segment : encodedPathSegments)
{
pathSegments.add(new PathSegmentImpl(((PathSegmentImpl) segment).getOriginal(), true));
}
extractParameters(requestURI.getRawQuery());
extractMatchingPath(encodedPathSegments);
absolutePath = UriBuilder.fromUri(requestURI).replaceQuery(null).build();
}
/**
* matching path without matrix parameters
*
* @param encodedPathSegments
*/
protected void extractMatchingPath(List<PathSegment> encodedPathSegments)
{
StringBuilder preprocessedPath = new StringBuilder();
for (PathSegment pathSegment : encodedPathSegments)
{
preprocessedPath.append("/").append(pathSegment.getPath());
}
matchingPath = preprocessedPath.toString();
}
/**
* Encoded path without matrix parameters
*
* @return
*/
public String getMatchingPath()
{
return matchingPath;
}
/**
* Create a UriInfo from the baseURI
*
* @param relative
* @return
*/
public ResteasyUriInfo setRequestUri(URI relative)
{
String rel = relative.toString();
if (rel.startsWith(baseURI.toString()))
{
relative = URI.create(rel.substring(baseURI.toString().length()));
}
return new ResteasyUriInfo(baseURI, relative);
}
public String getPath()
{
return path;
}
public String getPath(boolean decode)
{
if (decode) return getPath();
return encodedPath;
}
public List<PathSegment> getPathSegments()
{
return pathSegments;
}
public List<PathSegment> getPathSegments(boolean decode)
{
if (decode) return getPathSegments();
return encodedPathSegments;
}
public URI getRequestUri()
{
return requestURI;
}
public UriBuilder getRequestUriBuilder()
{
return UriBuilder.fromUri(requestURI);
}
public URI getAbsolutePath()
{
return absolutePath;
}
public UriBuilder getAbsolutePathBuilder()
{
return UriBuilder.fromUri(absolutePath);
}
public URI getBaseUri()
{
return baseURI;
}
public UriBuilder getBaseUriBuilder()
{
return UriBuilder.fromUri(baseURI);
}
public MultivaluedMap<String, String> getPathParameters()
{
if (pathParameters == null)
{
pathParameters = new MultivaluedMapImpl<String, String>();
}
return pathParameters;
}
public void addEncodedPathParameter(String name, String value)
{
getEncodedPathParameters().add(name, value);
String value1 = Encode.decodePath(value);
getPathParameters().add(name, value1);
}
private MultivaluedMap<String, String> getEncodedPathParameters()
{
if (encodedPathParameters == null)
{
encodedPathParameters = new MultivaluedMapImpl<String, String>();
}
return encodedPathParameters;
}
public MultivaluedMap<String, PathSegment[]> getEncodedPathParameterPathSegments()
{
if (encodedPathParameterPathSegments == null)
{
encodedPathParameterPathSegments = new MultivaluedMapImpl<String, PathSegment[]>();
}
return encodedPathParameterPathSegments;
}
public MultivaluedMap<String, PathSegment[]> getPathParameterPathSegments()
{<|fim▁hole|> {
pathParameterPathSegments = new MultivaluedMapImpl<String, PathSegment[]>();
}
return pathParameterPathSegments;
}
public MultivaluedMap<String, String> getPathParameters(boolean decode)
{
if (decode) return getPathParameters();
return getEncodedPathParameters();
}
public MultivaluedMap<String, String> getQueryParameters()
{
if (queryParameters == null)
{
queryParameters = new MultivaluedMapImpl<String, String>();
}
return queryParameters;
}
protected MultivaluedMap<String, String> getEncodedQueryParameters()
{
if (encodedQueryParameters == null)
{
this.encodedQueryParameters = new MultivaluedMapImpl<String, String>();
}
return encodedQueryParameters;
}
public MultivaluedMap<String, String> getQueryParameters(boolean decode)
{
if (decode) return getQueryParameters();
else return getEncodedQueryParameters();
}
protected void extractParameters(String queryString)
{
if (queryString == null || queryString.equals("")) return;
String[] params = queryString.split("&");
for (String param : params)
{
if (param.indexOf('=') >= 0)
{
String[] nv = param.split("=", 2);
try
{
String name = URLDecoder.decode(nv[0], "UTF-8");
String val = nv.length > 1 ? nv[1] : "";
getEncodedQueryParameters().add(name, val);
getQueryParameters().add(name, URLDecoder.decode(val, "UTF-8"));
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException(e);
}
}
else
{
try
{
String name = URLDecoder.decode(param, "UTF-8");
getEncodedQueryParameters().add(name, "");
getQueryParameters().add(name, "");
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException(e);
}
}
}
}
public List<String> getMatchedURIs(boolean decode)
{
if (decode)
{
if (matchedUris == null) matchedUris = new ArrayList<String>();
return matchedUris;
}
else
{
if (encodedMatchedUris == null) encodedMatchedUris = new ArrayList<String>();
return encodedMatchedUris;
}
}
public List<String> getMatchedURIs()
{
return getMatchedURIs(true);
}
public List<Object> getMatchedResources()
{
if (ancestors == null) ancestors = new ArrayList<Object>();
return ancestors;
}
public void pushCurrentResource(Object resource)
{
if (ancestors == null) ancestors = new ArrayList<Object>();
ancestors.add(0, resource);
}
public void pushMatchedPath(String encoded)
{
encodedMatchedPaths.add(0, encoded);
}
public List<String> getEncodedMatchedPaths()
{
return encodedMatchedPaths;
}
public void popMatchedPath()
{
encodedMatchedPaths.remove(0);
}
public void pushMatchedURI(String encoded)
{
if (encoded.endsWith("/")) encoded = encoded.substring(0, encoded.length() - 1);
if (encoded.startsWith("/")) encoded = encoded.substring(1);
String decoded = Encode.decode(encoded);
if (encodedMatchedUris == null) encodedMatchedUris = new ArrayList<String>();
encodedMatchedUris.add(0, encoded);
if (matchedUris == null) matchedUris = new ArrayList<String>();
matchedUris.add(0, decoded);
}
@Override
public URI resolve(URI uri)
{
return getBaseUri().resolve(uri);
}
@Override
public URI relativize(URI uri)
{
URI from = getRequestUri();
URI to = uri;
if (uri.getScheme() == null && uri.getHost() == null)
{
to = getBaseUriBuilder().replaceQuery(null).path(uri.getPath()).replaceQuery(uri.getQuery()).fragment(uri.getFragment()).build();
}
return ResteasyUriBuilder.relativize(from, to);
}
}<|fim▁end|> | if (pathParameterPathSegments == null) |
<|file_name|>escape.py<|end_file_name|><|fim▁begin|>from rpython.flowspace.model import Variable
from rpython.rtyper.lltypesystem import lltype
from rpython.translator.simplify import get_graph
from rpython.tool.uid import uid
class CreationPoint(object):
def __init__(self, creation_method, TYPE, op=None):
self.escapes = False
self.returns = False
self.creation_method = creation_method
if creation_method == "constant":
self.escapes = True
self.TYPE = TYPE
self.op = op
def __repr__(self):
return ("CreationPoint(<0x%x>, %r, %s, esc=%s)" %
(uid(self), self.TYPE, self.creation_method, self.escapes))
class VarState(object):
def __init__(self, *creps):
self.creation_points = set()
for crep in creps:
self.creation_points.add(crep)
def contains(self, other):
return other.creation_points.issubset(self.creation_points)
def merge(self, other):
creation_points = self.creation_points.union(other.creation_points)
return VarState(*creation_points)
def setescapes(self):
changed = []
for crep in self.creation_points:
if not crep.escapes:
changed.append(crep)
crep.escapes = True
return changed
def setreturns(self):
changed = []
for crep in self.creation_points:
if not crep.returns:
changed.append(crep)
crep.returns = True
return changed
def does_escape(self):
for crep in self.creation_points:
if crep.escapes:
return True
return False
def does_return(self):
for crep in self.creation_points:
if crep.returns:
return True
return False
def __repr__(self):
return "<VarState %s>" % (self.creation_points, )
class AbstractDataFlowInterpreter(object):
def __init__(self, translation_context):
self.translation_context = translation_context
self.scheduled = {} # block: graph containing it
self.varstates = {} # var-or-const: state
self.creationpoints = {} # var: creationpoint
self.constant_cps = {} # const: creationpoint
self.dependencies = {} # creationpoint: {block: graph containing it}
self.functionargs = {} # graph: list of state of args
self.flown_blocks = {} # block: True
def seen_graphs(self):
return self.functionargs.keys()
def getstate(self, var_or_const):
if not isonheap(var_or_const):
return None
if var_or_const in self.varstates:
return self.varstates[var_or_const]
if isinstance(var_or_const, Variable):
varstate = VarState()
else:
if var_or_const not in self.constant_cps:
crep = CreationPoint("constant", var_or_const.concretetype)
self.constant_cps[var_or_const] = crep
else:
crep = self.constant_cps[var_or_const]
varstate = VarState(crep)
self.varstates[var_or_const] = varstate
return varstate
def getstates(self, varorconstlist):
return [self.getstate(var) for var in varorconstlist]
def setstate(self, var, state):
self.varstates[var] = state
def get_creationpoint(self, var, method="?", op=None):
if var in self.creationpoints:
return self.creationpoints[var]
crep = CreationPoint(method, var.concretetype, op)
self.creationpoints[var] = crep
return crep
def schedule_function(self, graph):
startblock = graph.startblock
if graph in self.functionargs:
args = self.functionargs[graph]
else:
args = []
for var in startblock.inputargs:
if not isonheap(var):
varstate = None
else:
crep = self.get_creationpoint(var, "arg")
varstate = VarState(crep)
self.setstate(var, varstate)
args.append(varstate)
self.scheduled[startblock] = graph
self.functionargs[graph] = args
resultstate = self.getstate(graph.returnblock.inputargs[0])
return resultstate, args
def flow_block(self, block, graph):
self.flown_blocks[block] = True
if block is graph.returnblock:
if isonheap(block.inputargs[0]):
self.returns(self.getstate(block.inputargs[0]))
return
if block is graph.exceptblock:<|fim▁hole|> if isonheap(block.inputargs[1]):
self.escapes(self.getstate(block.inputargs[1]))
return
self.curr_block = block
self.curr_graph = graph
for op in block.operations:
self.flow_operation(op)
for exit in block.exits:
args = self.getstates(exit.args)
targetargs = self.getstates(exit.target.inputargs)
# flow every block at least once
if (multicontains(targetargs, args) and
exit.target in self.flown_blocks):
continue
for prevstate, origstate, var in zip(args, targetargs,
exit.target.inputargs):
if not isonheap(var):
continue
newstate = prevstate.merge(origstate)
self.setstate(var, newstate)
self.scheduled[exit.target] = graph
def flow_operation(self, op):
args = self.getstates(op.args)
opimpl = getattr(self, 'op_' + op.opname, None)
if opimpl is not None:
res = opimpl(op, *args)
if res is not NotImplemented:
self.setstate(op.result, res)
return
if isonheap(op.result) or filter(None, args):
for arg in args:
if arg is not None:
self.escapes(arg)
def complete(self):
while self.scheduled:
block, graph = self.scheduled.popitem()
self.flow_block(block, graph)
def escapes(self, arg):
changed = arg.setescapes()
self.handle_changed(changed)
def returns(self, arg):
changed = arg.setreturns()
self.handle_changed(changed)
def handle_changed(self, changed):
for crep in changed:
if crep not in self.dependencies:
continue
self.scheduled.update(self.dependencies[crep])
def register_block_dependency(self, state, block=None, graph=None):
if block is None:
block = self.curr_block
graph = self.curr_graph
for crep in state.creation_points:
self.dependencies.setdefault(crep, {})[block] = graph
def register_state_dependency(self, state1, state2):
"state1 depends on state2: if state2 does escape/change, so does state1"
# change state1 according to how state2 is now
if state2.does_escape():
self.escapes(state1)
if state2.does_return():
self.returns(state1)
# register a dependency of the current block on state2:
# that means that if state2 changes the current block will be reflown
# triggering this function again and thus updating state1
self.register_block_dependency(state2)
# _____________________________________________________________________
# operation implementations
def op_malloc(self, op, typestate, flagsstate):
assert flagsstate is None
flags = op.args[1].value
if flags != {'flavor': 'gc'}:
return NotImplemented
return VarState(self.get_creationpoint(op.result, "malloc", op))
def op_malloc_varsize(self, op, typestate, flagsstate, lengthstate):
assert flagsstate is None
flags = op.args[1].value
if flags != {'flavor': 'gc'}:
return NotImplemented
return VarState(self.get_creationpoint(op.result, "malloc_varsize", op))
def op_cast_pointer(self, op, state):
return state
def op_setfield(self, op, objstate, fieldname, valuestate):
if valuestate is not None:
# be pessimistic for now:
# everything that gets stored into a structure escapes
self.escapes(valuestate)
return None
def op_setarrayitem(self, op, objstate, indexstate, valuestate):
if valuestate is not None:
# everything that gets stored into a structure escapes
self.escapes(valuestate)
return None
def op_getarrayitem(self, op, objstate, indexstate):
if isonheap(op.result):
return VarState(self.get_creationpoint(op.result, "getarrayitem", op))
def op_getfield(self, op, objstate, fieldname):
if isonheap(op.result):
# assume that getfield creates a new value
return VarState(self.get_creationpoint(op.result, "getfield", op))
def op_getarraysize(self, op, arraystate):
pass
def op_direct_call(self, op, function, *args):
graph = get_graph(op.args[0], self.translation_context)
if graph is None:
for arg in args:
if arg is None:
continue
# an external function can escape every parameter:
self.escapes(arg)
funcargs = [None] * len(args)
else:
result, funcargs = self.schedule_function(graph)
assert len(args) == len(funcargs)
for localarg, funcarg in zip(args, funcargs):
if localarg is None:
assert funcarg is None
continue
if funcarg is not None:
self.register_state_dependency(localarg, funcarg)
if isonheap(op.result):
# assume that a call creates a new value
return VarState(self.get_creationpoint(op.result, "direct_call", op))
def op_indirect_call(self, op, function, *args):
graphs = op.args[-1].value
args = args[:-1]
if graphs is None:
for localarg in args:
if localarg is None:
continue
self.escapes(localarg)
else:
for graph in graphs:
result, funcargs = self.schedule_function(graph)
assert len(args) == len(funcargs)
for localarg, funcarg in zip(args, funcargs):
if localarg is None:
assert funcarg is None
continue
self.register_state_dependency(localarg, funcarg)
if isonheap(op.result):
# assume that a call creates a new value
return VarState(self.get_creationpoint(op.result, "indirect_call", op))
def op_ptr_iszero(self, op, ptrstate):
return None
op_cast_ptr_to_int = op_keepalive = op_ptr_nonzero = op_ptr_iszero
def op_ptr_eq(self, op, ptr1state, ptr2state):
return None
op_ptr_ne = op_ptr_eq
def op_same_as(self, op, objstate):
return objstate
def isonheap(var_or_const):
return isinstance(var_or_const.concretetype, lltype.Ptr)
def multicontains(l1, l2):
assert len(l1) == len(l2)
for a, b in zip(l1, l2):
if a is None:
assert b is None
elif not a.contains(b):
return False
return True
def is_malloc_like(adi, graph, seen):
if graph in seen:
return seen[graph]
return_state = adi.getstate(graph.getreturnvar())
if return_state is None or len(return_state.creation_points) != 1:
seen[graph] = False
return False
crep, = return_state.creation_points
if crep.escapes:
seen[graph] = False
return False
if crep.creation_method in ["malloc", "malloc_varsize"]:
assert crep.returns
seen[graph] = True
return True
if crep.creation_method == "direct_call":
subgraph = get_graph(crep.op.args[0], adi.translation_context)
if subgraph is None:
seen[graph] = False
return False
res = is_malloc_like(adi, subgraph, seen)
seen[graph] = res
return res
seen[graph] = False
return False
def malloc_like_graphs(adi):
seen = {}
return [graph for graph in adi.seen_graphs()
if is_malloc_like(adi, graph, seen)]<|fim▁end|> | if isonheap(block.inputargs[0]):
self.escapes(self.getstate(block.inputargs[0])) |
<|file_name|>p2.d.ts<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2014,Egret-Labs.org
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Egret-Labs.org nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY EGRET-LABS.ORG AND CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL EGRET-LABS.ORG AND CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
declare module p2 {
export class AABB {
upperBound: number[];
lowerBound: number[];
constructor(options?: {
upperBound?: number[];
lowerBound?: number[];
});
setFromPoints(points: number[][], position: number[], angle: number, skinSize: number): void;
copy(aabb: AABB): void;
extend(aabb: AABB): void;
overlaps(aabb: AABB): boolean;
}
export class Broadphase {
static AABB: number;
static BOUNDING_CIRCLE: number;
static NAIVE: number;
static SAP: number;
static boundingRadiusCheck(bodyA: Body, bodyB: Body): boolean;
static aabbCheck(bodyA: Body, bodyB: Body): boolean;
static canCollide(bodyA: Body, bodyB: Body): boolean;
constructor(type: number);
type: number;
result: Body[];
world: World;
boundingVolumeType: number;
setWorld(world: World): void;
getCollisionPairs(world: World): Body[];
boundingVolumeCheck(bodyA: Body, bodyB: Body): boolean;
}
export class GridBroadphase extends Broadphase {
constructor(options?: {
xmin?: number;
xmax?: number;
ymin?: number;
ymax?: number;
nx?: number;
ny?: number;
});
xmin: number;
xmax: number;
ymin: number;
ymax: number;
nx: number;
ny: number;
binsizeX: number;
binsizeY: number;
}
export class NativeBroadphase extends Broadphase {
}
export class Narrowphase {
contactEquations: ContactEquation[];
frictionEquations: FrictionEquation[];
enableFriction: boolean;
slipForce: number;
frictionCoefficient: number;
surfaceVelocity: number;
reuseObjects: boolean;
resuableContactEquations: any[];
reusableFrictionEquations: any[];
restitution: number;
stiffness: number;
relaxation: number;
frictionStiffness: number;
frictionRelaxation: number;
enableFrictionReduction: boolean;
contactSkinSize: number;
collidedLastStep(bodyA: Body, bodyB: Body): boolean;
reset(): void;
createContactEquation(bodyA: Body, bodyB: Body, shapeA: Shape, shapeB: Shape): ContactEquation;
createFrictionFromContact(c: ContactEquation): FrictionEquation;
}
export class SAPBroadphase extends Broadphase {
axisList: Body[];
axisIndex: number;
}
export class Constraint {
static DISTANCE: number;
static GEAR: number;
static LOCK: number;
static PRISMATIC: number;
static REVOLUTE: number;
constructor(bodyA: Body, bodyB: Body, type: number, options?: {
collideConnected?: boolean;
wakeUpBodies?: boolean;
});
type: number;
equeations: Equation[];
bodyA: Body;
bodyB: Body;
collideConnected: boolean;
update(): void;
setStiffness(stiffness: number): void;
setRelaxation(relaxation: number): void;
}
export class DistanceConstraint extends Constraint {
constructor(bodyA: Body, bodyB: Body, options?: {
collideConnected?: boolean;
wakeUpBodies?: boolean;
distance?: number;
localAnchorA?: number[];
localAnchorB?: number[];
maxForce?: number;
});
localAnchorA: number[];
localAnchorB: number[];
distance: number;
maxForce: number;
upperLimitEnabled: boolean;
upperLimit: number;
lowerLimitEnabled: boolean;
lowerLimit: number;
position: number;
setMaxForce(f: number): void;
getMaxForce(): number;
}
export class GearConstraint extends Constraint {
constructor(bodyA: Body, bodyB: Body, options?: {
collideConnected?: boolean;
wakeUpBodies?: boolean;
angle?: number;
ratio?: number;
maxTorque?: number;
});
ratio: number;
angle: number;
setMaxTorque(torque: number): void;
getMaxTorque(): number;
}
export class LockConstraint extends Constraint {
constructor(bodyA: Body, bodyB: Body, options?: {
collideConnected?: boolean;
wakeUpBodies?: boolean;
localOffsetB?: number[];
localAngleB?: number;
maxForce?: number;
});
localOffsetB: number[];
localAngleB: number;
setMaxForce(force: number): void;
getMaxForce(): number;
}
export class PrismaticConstraint extends Constraint {
constructor(bodyA: Body, bodyB: Body, options?: {
collideConnected?: boolean;
wakeUpBodies?: boolean;
maxForce?: number;
localAnchorA?: number[];
localAnchorB?: number[];
localAxisA?: number[];
disableRotationalLock?: boolean;
upperLimit?: number;
lowerLimit?: number;
});
localAnchorA: number[];
localAnchorB: number[];
localAxisA: number[];
position: number;
velocity: number;
lowerLimitEnabled: boolean;
upperLimitEnabled: boolean;
lowerLimit: number;
upperLimit: number;
upperLimitEquation: ContactEquation;
lowerLimitEquation: ContactEquation;
motorEquation: Equation;
motorEnabled: boolean;
motorSpeed: number;
disableRotationalLock: boolean;
enableMotor(): void;
disableMotor(): void;
setLimits(lower: number, upper: number): void;
}
export class RevoluteConstraint extends Constraint {
constructor(bodyA: Body, bodyB: Body, options?: {
collideConnected?: boolean;
wakeUpBodies?: boolean;
worldPivot?: number[];
localPivotA?: number[];
localPivotB?: number[];
maxForce?: number;
});
pivotA: number[];
pivotB: number[];
motorEquation: RotationalVelocityEquation;
motorEnabled: boolean;
angle: number;
lowerLimitEnabled: boolean;
upperLimitEnabled: boolean;
lowerLimit: number;
upperLimit: number;
upperLimitEquation: ContactEquation;
lowerLimitEquation: ContactEquation;
enableMotor(): void;
disableMotor(): void;
motorIsEnabled(): boolean;
setLimits(lower: number, upper: number): void;
setMotorSpeed(speed: number): void;
getMotorSpeed(): number;
}
export class AngleLockEquation extends Equation {
constructor(bodyA: Body, bodyB: Body, options?: {
angle?: number;
ratio?: number;
});
computeGq(): number;
setRatio(ratio: number): number;
setMaxTorque(torque: number): number;
}
export class ContactEquation extends Equation {
constructor(bodyA: Body, bodyB: Body);
contactPointA: number[];
penetrationVec: number[];
contactPointB: number[];
normalA: number[];
restitution: number;
firstImpact: boolean;
shapeA: Shape;
shapeB: Shape;
computeB(a: number, b: number, h: number): number;
}
export class Equation {
static DEFAULT_STIFFNESS: number;
static DEFAULT_RELAXATION: number;
constructor(bodyA: Body, bodyB: Body, minForce?: number, maxForce?: number);
minForce: number;
maxForce: number;
bodyA: Body;
bodyB: Body;
stiffness: number;
relaxation: number;
G: number[];
offset: number;
a: number;
b: number;
epsilon: number;
timeStep: number;
needsUpdate: boolean;
multiplier: number;
relativeVelocity: number;
enabled: boolean;
gmult(G: number[], vi: number[], wi: number[], vj: number[], wj: number[]): number;
computeB(a: number, b: number, h: number): number;
computeGq(): number;
computeGW(): number;
computeGWlambda(): number;
computeGiMf(): number;
computeGiMGt(): number;
addToWlambda(deltalambda: number): number;
computeInvC(eps: number): number;
update():void;
}
export class FrictionEquation extends Equation {
constructor(bodyA: Body, bodyB: Body, slipForce: number);
contactPointA: number[];
contactPointB: number[];
t: number[];
shapeA: Shape;
shapeB: Shape;
frictionCoefficient: number;
setSlipForce(slipForce: number): number;
getSlipForce(): number;
computeB(a: number, b: number, h: number): number;
}
export class RotationalLockEquation extends Equation {
constructor(bodyA: Body, bodyB: Body, options?: {
angle?: number;
});
angle: number;
computeGq(): number;
}
export class RotationalVelocityEquation extends Equation {
constructor(bodyA: Body, bodyB: Body);
computeB(a: number, b: number, h: number): number;
}
export class EventEmitter {
on(type: string, listener: Function): EventEmitter;
has(type: string, listener: Function): boolean;
off(type: string, listener: Function): EventEmitter;
emit(event: any): EventEmitter;
}
export class ContactMaterialOptions {
friction: number;
restitution: number;
stiffness: number;
relaxation: number;
frictionStiffness: number;
frictionRelaxation: number;
surfaceVelocity: number;
}
export class ContactMaterial {
static idCounter: number;
constructor(materialA: Material, materialB: Material, options?: ContactMaterialOptions);
id: number;
materialA: Material;
materialB: Material;
friction: number;
restitution: number;
stiffness: number;
relaxation: number;
frictionStuffness: number;
frictionRelaxation: number;
surfaceVelocity: number;
contactSkinSize: number;
}
export class Material {
static idCounter: number;
constructor(id: number);
id: number;
}
export class vec2 {
static crossLength(a: number[], b: number[]): number;
static crossVZ(out: number[], vec: number[], zcomp: number): number;
static crossZV(out: number[], zcomp: number, vec: number[]): number;
static rotate(out: number[], a: number[], angle: number): void;
static rotate90cw(out: number[], a: number[]): number;
static centroid(out: number[], a: number[], b: number[], c: number[]): number[];
static create(): number[];
static clone(a: number[]): number[];
static fromValues(x: number, y: number): number[];
static copy(out: number[], a: number[]): number[];
static set(out: number[], x: number, y: number): number[];
static toLocalFrame(out: number[], worldPoint: number[], framePosition: number[], frameAngle: number): void;
static toGlobalFrame(out: number[], localPoint: number[], framePosition: number[], frameAngle: number): void;
static add(out: number[], a: number[], b: number[]): number[];
static subtract(out: number[], a: number[], b: number[]): number[];
static sub(out: number[], a: number[], b: number[]): number[];
static multiply(out: number[], a: number[], b: number[]): number[];
static mul(out: number[], a: number[], b: number[]): number[];
static divide(out: number[], a: number[], b: number[]): number[];
static div(out: number[], a: number[], b: number[]): number[];
static scale(out: number[], a: number[], b: number): number[];
static distance(a: number[], b: number[]): number;
static dist(a: number[], b: number[]): number;
static squaredDistance(a: number[], b: number[]): number;
static sqrDist(a: number[], b: number[]): number;
static length(a: number[]): number;
static len(a: number[]): number;
static squaredLength(a: number[]): number;
static sqrLen(a: number[]): number;
static negate(out: number[], a: number[]): number[];
static normalize(out: number[], a: number[]): number[];
static dot(a: number[], b: number[]): number;
static str(a: number[]): string;
}
// export class BodyOptions {
//
// mass: number;
// position: number[];
// velocity: number[];
// angle: number;
// angularVelocity: number;
// force: number[];
// angularForce: number;
// fixedRotation: number;
//
// }
/**
* 刚体。有质量、位置、速度等属性以及一组被用于碰撞的形状
*
* @class Body
* @constructor
* @extends EventEmitter
* @param {Object} [options]
* @param {Number} [options.mass=0] 一个大于0的数字。如果设置成0,其type属性将被设置为 Body.STATIC.
* @param {Array} [options.position]
* @param {Array} [options.velocity]
* @param {Number} [options.angle=0]
* @param {Number} [options.angularVelocity=0]
* @param {Array} [options.force]
* @param {Number} [options.angularForce=0]
* @param {Number} [options.fixedRotation=false]
*
* @example
* // 创建一个刚体
* var body = new Body({
* mass: 1,
* position: [0, 0],
* angle: 0,
* velocity: [0, 0],
* angularVelocity: 0
* });
*
* // 将一个圆形形状添加到刚体
* body.addShape(new Circle(1));
*
* // 将刚体加入 world
* world.addBody(body);
*/
export class Body extends EventEmitter {
sleepyEvent: {
type: string;
};
sleepEvent: {
type: string;
};
wakeUpEvent: {
type: string;
};
static DYNAMIC: number;
static STATIC: number;
static KINEMATIC: number;
static AWAKE: number;
static SLEEPY: number;
static SLEEPING: number;
constructor(options?);
/**
* 刚体id
* @property id
* @type {Number}
*/
id: number;
/**
* 刚体被添加到的 world。如果没有被添加到 world 该属性将被设置为 null
* @property world
* @type {World}
*/
world: World;
/**
* 刚体的碰撞形状
*
* @property shapes
* @type {Array}
*/
shapes: Shape[];
/**
* 碰撞形状相对于刚体中心的偏移
* @property shapeOffsets
* @type {Array}
*/
//shapeOffsets: number[][];
/**
* 碰撞形状的角度变换
* @property shapeAngles
* @type {Array}
*/
//shapeAngles: number[];
/**
* 质量
* @property mass
* @type {number}
*/
mass: number;
/**
* 惯性
* @property inertia
* @type {number}
*/
inertia: number;
/**
* 是否固定旋转
* @property fixedRotation
* @type {Boolean}
*/
fixedRotation: boolean;
/**
* 位置
* @property position
* @type {Array}
*/
position: number[];
/**
* 位置插值
* @property interpolatedPosition
* @type {Array}
*/
interpolatedPosition: number[];
/**
* 角度插值
* @property interpolatedAngle
* @type {Number}
*/
interpolatedAngle: number;
/**
* 速度
* @property velocity
* @type {Array}
*/
velocity: number[];
/**
* 角度
* @property angle
* @type {number}
*/
angle: number;
/**
* 力
* @property force
* @type {Array}
*/
force: number[];
/**
* 角力
* @property angularForce
* @type {number}
*/
angularForce: number;
/**
* 限行阻尼。取值区间[0,1]
* @property damping
* @type {Number}
* @default 0.1
*/
damping: number;
/**
* 角阻尼。取值区间[0,1]
* @property angularDamping
* @type {Number}
* @default 0.1
*/
angularDamping: number;
/**
* 运动类型。 应该是Body.STATIC,Body.DYNAMIC,Body.KINEMATIC之一
*
* * Static 刚体不会动,不响应力或者碰撞
* * Dynamic 刚体会动,响应力和碰撞
* * Kinematic 刚体仅根据自身属性运动,不响应力或者碰撞
*
* @property type
* @type {number}
*
* @example
* // 默认值是STATIC
* var body = new Body();
* console.log(body.type == Body.STATIC); // true
*
* @example
* // 将质量设置为非0的值,会变为DYNAMIC
* var dynamicBody = new Body({
* mass : 1
* });
* console.log(dynamicBody.type == Body.DYNAMIC); // true
*
* @example
* // KINEMATIC刚体只会运动,如果你改变它的速度
* var kinematicBody = new Body({
* type: Body.KINEMATIC
* });
*/
type: number;
/**
* 边界圆半径
* @property boundingRadius
* @type {Number}
*/
boundingRadius: number;
/**
* 边框
* @property aabb
* @type {AABB}
*/
aabb: AABB;
/**
* 设置AABB是否会更新。通过调用 updateAABB 方法更新它
* @property aabbNeedsUpdate
* @type {Boolean}
* @see updateAABB
*
* @example
* body.aabbNeedsUpdate = true;
* body.updateAABB();
* console.log(body.aabbNeedsUpdate); // false
*/
aabbNeedsUpdate: boolean;
/**
* 设置为true,刚体会自动进入睡眠。需要在 World 中允许刚体睡眠
* @property allowSleep
* @type {Boolean}
* @default true
*/
allowSleep: boolean;
wantsToSleep: boolean;
/**
* Body.AWAKE,Body.SLEEPY,Body.SLEEPING之一
*
* 默认值是 Body.AWAKE。如果刚体速度低于 sleepSpeedLimit,该属性将变为 Body.SLEEPY。如果持续 Body.SLEEPY 状态 sleepTimeLimit 秒,该属性将变为 Body.SLEEPY。
*
* @property sleepState
* @type {Number}
* @default Body.AWAKE
*/
sleepState: number;
/**
* 如果速度小于该值,sleepState 将变为 Body.SLEEPY 状态
* @property sleepSpeedLimit
* @type {Number}
* @default 0.2
*/
sleepSpeedLimit: number;
/**
* 如果持续 Body.SLEEPY 状态 sleepTimeLimit 秒,sleepState 将变为 Body.SLEEPING
* @property sleepTimeLimit
* @type {Number}
* @default 1
*/
sleepTimeLimit: number;
/**
* 重力缩放因子。如果你想忽略刚体重心,设置为零。如果你想反转重力,将其设置为-1。
* @property {Number} gravityScale
* @default 1
*/
gravityScale: number;
/**
* 与每个形状对应的显示对象
*/
displays: egret.DisplayObject[];
userData: any;
updateSolveMassProperties(): void;
/**
* 设置刚体总密度
* @method setDensity
*/
setDensity(density: number): void;
/**
* 得到所有形状的总面积
* @method getArea
* @return {Number}
*/
getArea(): number;
/**
* 获得AABB
* @method getAABB
*/
getAABB(): AABB;
/**
* 更新AABB
* @method updateAABB
*/
updateAABB(): void;
/**
* 更新外边界
* @method updateBoundingRadius
*/
updateBoundingRadius(): void;
/**
* 添加一个形状
*
* @method addShape
* @param {Shape} shape 形状
* @param {Array} [offset] 偏移
* @param {Number} [angle] 角度
*
* @example
* var body = new Body(),
* shape = new Circle();
*
* // 位于中心
* body.addShape(shape);
*
* // 偏移量为x轴一个单位
* body.addShape(shape,[1,0]);
*
* // 偏移量为y轴一个单位,同时逆时针旋转90度
* body.addShape(shape,[0,1],Math.PI/2);
*/
addShape(shape: Shape, offset?: number[], angle?: number): void;
/**
* 移除形状
* @method removeShape
* @param {Shape} shape
* @return {Boolean}
*/
removeShape(shape: Shape): boolean;
/**
* 更新属性,结构或者质量改变时会被调用
*
* @method updateMassProperties
*
* @example
* body.mass += 1;
* body.updateMassProperties();
*/
updateMassProperties(): void;
/**
* 相对于 world 中的一个点施加力
* @method applyForce
* @param {Array} force 力
* @param {Array} worldPoint world 中的点
*/
applyForce(force: number[], worldPoint: number[]): void;
/**
* Wake the body up. Normally you should not need this, as the body is automatically awoken at events such as collisions.
* Sets the sleepState to {{#crossLink "Body/AWAKE:property"}}Body.AWAKE{{/crossLink}} and emits the wakeUp event if the body wasn't awake before.
* @method wakeUp
*/
wakeUp(): void;
/**
* Force body sleep
* @method sleep
*/
sleep(): void;
/**
* Called every timestep to update internal sleep timer and change sleep state if needed.
* @method sleepTick
* @param {number} time The world time in seconds
* @param {boolean} dontSleep
* @param {number} dt
*/
sleepTick(time: number, dontSleep: boolean, dt: number): void;
getVelocityFromPosition(story: number[], dt: number): number[];
getAngularVelocityFromPosition(timeStep: number): number;
/**
* Check if the body is overlapping another body. Note that this method only works if the body was added to a World and if at least one step was taken.
* @method overlaps
* @param {Body} body
* @return {boolean}
*/
overlaps(body: Body): boolean;
// functions below was added by ladeng6666
angularVelocity: number;
toWorldFrame(out: number[], localPoint: number[]): void;
toLocalFrame(out: number[], worldPoint: number[]): void;
adjustCenterOfMass(): void;
fromPolygon(vertices: number[][], options?: any): Body;
applyDamping(dt: number): void;
applyImpulse(force: number[], worldPoint: number[]): void;
collisionResponse: boolean;
}
export class Spring {
constructor(bodyA: Body, bodyB: Body, options?: {
stiffness?: number;
damping?: number;
localAnchorA?: number[];
localAnchorB?: number[];
worldAnchorA?: number[];
worldAnchorB?: number[];
});
stiffness: number;
damping: number;
bodyA: Body;
bodyB: Body;
applyForce(): void;
}
export class LinearSpring extends Spring {
localAnchorA: number[];
localAnchorB: number[];
restLength: number;
setWorldAnchorA(worldAnchorA: number[]): void;
setWorldAnchorB(worldAnchorB: number[]): void;
getWorldAnchorA(result: number[]): number[];
getWorldAnchorB(result: number[]): number[];
applyForce(): void;
}
export class RotationalSpring extends Spring {
constructor(bodyA: Body, bodyB: Body, options?: {
restAngle?: number;
stiffness?: number;
damping?: number;
});
restAngle: number;
}
export class Capsule extends Shape {
constructor(length?: number, radius?: number);
length: number;
radius: number;
}
export class Circle extends Shape {
constructor(radius: number);
/**
* 半径
* @property radius
* @type {number}
*/
radius: number;
}
export class Convex extends Shape {
static triangleArea(a: number[], b: number[], c: number[]): number;
constructor(vertices: number[][], axes?: number[]);
vertices: number[][];
axes: number[];
centerOfMass: number[];
triangles: number[];
boundingRadius: number;
projectOntoLocalAxis(localAxis: number[], result: number[]): void;
projectOntoWorldAxis(localAxis: number[], shapeOffset: number[], shapeAngle: number, result: number[]): void;
updateCenterOfMass(): void;
}
export class Heightfield extends Shape {
constructor(data: number[], options?: {
minValue?: number;
maxValue?: number;
elementWidth: number;
});
data: number[];
maxValue: number;
minValue: number;
elementWidth: number;
}
export class Shape {
static idCounter: number;
static CIRCLE: number;
static PARTICLE: number;
static PLANE: number;
static CONVEX: number;
static LINE: number;
static RECTANGLE: number;
static CAPSULE: number;
static HEIGHTFIELD: number;
constructor(type?: number);
type: number;
id: number;
boundingRadius: number;
collisionGroup: number;
collisionMask: number;
material: Material;
area: number;
sensor: boolean;
vertices: number[][]; //2015-05-12 ladeng6666
angle: number;
position: number[];
computeMomentOfInertia(mass: number): number;
updateBoundingRadius(): number;
updateArea(): void;
computeAABB(out: AABB, position: number[], angle: number): void;
}
export class Line extends Shape {
constructor(length?: number);
length: number;
}
export class Particle extends Shape {
}
export class Plane extends Shape {
}
export class Rectangle extends Shape {
constructor(width?: number, height?: number);
width: number;
height: number;
}
export class Solver extends EventEmitter {
static GS: number;
static ISLAND: number;
constructor(options?: {}, type?: number);
type: number;
equations: Equation[];
equationSortFunction: Equation; //Equation | boolean
solve(dy: number, world: World): void;
solveIsland(dy: number, island: Island): void;
sortEquations(): void;
addEquation(eq: Equation): void;
addEquations(eqs: Equation[]): void;
removeEquation(eq: Equation): void;
removeAllEquations(): void;
tolerance: number;
frictionIterations: number;
}
export class GSSolver extends Solver {
constructor(options?: {
iterations?: number;
tolerance?: number;
});
iterations: number;
tolerance: number;
useZeroRHS: boolean;
frictionIterations: number;
usedIterations: number;
solve(h: number, world: World): void;
}
export class OverlapKeeper {
constructor(bodyA: Body, shapeA: Shape, bodyB: Body, shapeB: Shape);
shapeA: Shape;
shapeB: Shape;
bodyA: Body;
bodyB: Body;
tick(): void;
setOverlapping(bodyA: Body, shapeA: Shape, bodyB: Body, shapeB: Body): void;
bodiesAreOverlapping(bodyA: Body, bodyB: Body): boolean;
set(bodyA: Body, shapeA: Shape, bodyB: Body, shapeB: Shape): void;
}
export class TupleDictionary {
data: number[];
keys: number[];
getKey(id1: number, id2: number): string;
getByKey(key: number): number;
get(i: number, j: number): number;
set(i: number, j: number, value: number): number;
reset(): void;
copy(dict: TupleDictionary): void;
}
export class Utils {
static appendArray<T>(a: Array<T>, b: Array<T>): Array<T>;
static splice<T>(array: Array<T>, index: number, howMany: number): void;
static extend(a: any, b: any): void;
static defaults(options: any, defaults: any): any;
}
export class Island {
equations: Equation[];
bodies: Body[];
reset(): void;
getBodies(result: any): Body[];
wantsToSleep(): boolean;
sleep(): boolean;
}
export class IslandManager extends Solver {
static getUnvisitedNode(nodes: Node[]): IslandNode; // IslandNode | boolean
equations: Equation[];
islands: Island[];
nodes: IslandNode[];
visit(node: IslandNode, bds: Body[], eqs: Equation[]): void;
bfs(root: IslandNode, bds: Body[], eqs: Equation[]): void;
split(world: World): Island[];
}
export class IslandNode {
constructor(body: Body);
body: Body;
neighbors: IslandNode[];
equations: Equation[];
visited: boolean;
reset(): void;
}
/**
* world,包含所有刚体
*
* @class World
* @constructor
* @param {Object} [options]
* @param {Solver} [options.solver] 默认值 GSSolver.
* @param {Array} [options.gravity] 默认值 [0,-9.78]
* @param {Broadphase} [options.broadphase] 默认值 NaiveBroadphase
* @param {Boolean} [options.islandSplit=false]
* @param {Boolean} [options.doProfiling=false]
* @extends EventEmitter
*
* @example
* var world = new World({
* gravity: [0, -9.81],
* broadphase: new SAPBroadphase()
* });
*/
export class World extends EventEmitter {
/**
* step() 执行之后调用
* @event postStep
*/
postStepEvent: {
type: string;
};
/**
* Body 加入时调用
* @event addBody
* @param {Body} body
*/
addBodyEvent: {
type: string;
};
/**
* Body移除时调用
* @event removeBody
* @param {Body} body
*/
removeBodyEvent: {
type: string;
};
/**
* Spring 加入时调用
* @event addSpring
* @param {Spring} spring
*/
addSpringEvent: {
type: string;
};
/**
* 当两个刚体第一次碰撞时调用。调用时碰撞步骤已经完成
* @event impact
* @param {Body} bodyA
* @param {Body} bodyB
*/
impactEvent: {
type: string;
bodyA: Body;
bodyB: Body;
shapeA: Shape;
shapeB: Shape;
contactEquation: ContactEquation;
};
/**
* 当 Broadphase 手机对碰之后被调用
* @event postBroadphase
* @param {Array} 对碰数组
*/
postBroadphaseEvent: {
type: string;
pairs: Body[];
};
/**
* 当两个形状重叠时调用
* @event beginContact
* @param {Shape} shapeA
* @param {Shape} shapeB
* @param {Body} bodyA
* @param {Body} bodyB
* @param {Array} contactEquations
*/
beginContactEvent: {
type: string;
shapeA: Shape;
shapeB: Shape;
bodyA: Body;
bodyB: Body;
contactEquations: ContactEquation[];
};
/**
* 当两个形状停止重叠时调用
* @event endContact
* @param {Shape} shapeA
* @param {Shape} shapeB
* @param {Body} bodyA
* @param {Body} bodyB
* @param {Array} contactEquations
*/
endContactEvent: {
type: string;
shapeA: Shape;
shapeB: Shape;
bodyA: Body;
bodyB: Body;
};
/**
* Fired just before equations are added to the solver to be solved. Can be used to control what equations goes into the solver.
* @event preSolve
* @param {Array} contactEquations An array of contacts to be solved.
* @param {Array} frictionEquations An array of friction equations to be solved.
*/
preSolveEvent: {
type: string;
contactEquations: ContactEquation[];
frictionEquations: FrictionEquation[];
};
/**
* 从不让刚体睡眠
* @static
* @property {number} NO_SLEEPING
*/
static NO_SLEEPING: number;
/**
* 刚体睡眠
* @static
* @property {number} BODY_SLEEPING
*/
static BODY_SLEEPING: number;
/**
* 取消激活在接触中的刚体,如果所有刚体都接近睡眠。必须设置 World.islandSplit
* @static
* @property {number} ISLAND_SLEEPING
*/
static ISLAND_SLEEPING: number;
constructor(options?: {
solver?: Solver;
gravity?: number[];
broadphase?: Broadphase;
islandSplit?: boolean;
doProfiling?: boolean;
});
/**
* 所有 Spring
* @property springs
* @type {Array}
*/
springs: Spring[];
/**
* 所有 Body
* @property {Array} bodies
*/
bodies: Body[];
/**
* 所使用的求解器,以满足约束条件和接触。 默认值是 GSSolver
* @property {Solver} solver
*/
solver: Solver;
/**
* @property narrowphase
* @type {Narrowphase}
*/
narrowphase: Narrowphase;
/**
* The island manager of this world.
* @property {IslandManager} islandManager
*/
islandManager: IslandManager;
/**
* 重力。在每个 step() 开始对所有刚体生效
*
* @property gravity
* @type {Array}
*/
gravity: number[];
/**
* 重力摩擦
* @property {Number} frictionGravity
*/
frictionGravity: number;
/**
* 设置为true,frictionGravity 会被自动设置为 gravity 长度.
* @property {Boolean} useWorldGravityAsFrictionGravity
*/
useWorldGravityAsFrictionGravity: boolean;
/**
* @property broadphase
* @type {Broadphase}
*/
broadphase: Broadphase;
/**
* 用户添加限制
*
* @property constraints
* @type {Array}
*/
constraints: Constraint[];
/**
* 默认材料,defaultContactMaterial 时使用
* @property {Material} defaultMaterial
*/
defaultMaterial: Material;
/**
* 使用的默认接触材料,如果没有接触材料被设置为碰撞的材料
* @property {ContactMaterial} defaultContactMaterial
*/
defaultContactMaterial: ContactMaterial;
/**
* 设置自动使用弹簧力
* @property applySpringForces
* @type {Boolean}
*/
applySpringForces: boolean;
/**
* 设置自动使用阻尼
* @property applyDamping
* @type {Boolean}
*/
applyDamping: boolean;
/**
* 设置自动使用重力
* @property applyGravity
* @type {Boolean}
*/
applyGravity: boolean;
/**
* 使用约束求解
* @property solveConstraints<|fim▁hole|> * @type {Boolean}
*/
solveConstraints: boolean;
/**
* 接触材料
* @property contactMaterials
* @type {Array}
*/
contactMaterials: ContactMaterial[];
/**
* 世界时间
* @property time
* @type {Number}
*/
time: number;
/**
* 是否正在 step 阶段
* @property {Boolean} stepping
*/
stepping: boolean;
/**
* 是否启用岛内分裂
* @property {Boolean} islandSplit
*/
islandSplit: boolean;
/**
* 设置为true,world会派发 impact 事件,关闭可以提高性能
* @property emitImpactEvent
* @type {Boolean}
*/
emitImpactEvent: boolean;
/**
* 刚体睡眠策略。取值是 World.NO_SLEEPING,World.BODY_SLEEPING,World.ISLAND_SLEEPING 之一
* @property sleepMode
* @type {number}
* @default World.NO_SLEEPING
*/
sleepMode: number;
/**
* 添加约束
* @method addConstraint
* @param {Constraint} c
*/
addConstraint(c: Constraint): void;
/**
* 添加触点材料
* @method addContactMaterial
* @param {ContactMaterial} contactMaterial
*/
addContactMaterial(contactMaterial: ContactMaterial): void;
/**
* 移除触点材料
* @method removeContactMaterial
* @param {ContactMaterial} cm
*/
removeContactMaterial(cm: ContactMaterial): void;
/**
* 通过2个材料获得触点材料
* @method getContactMaterial
* @param {Material} materialA
* @param {Material} materialB
* @return {ContactMaterial} 获得的触点材料或者false
*/
getContactMaterial(materialA: Material, materialB: Material): ContactMaterial;
/**
* 移除约束
* @method removeConstraint
* @param {Constraint} c
*/
removeConstraint(c: Constraint): void;
/**
* 使物理系统向前经过一定时间
*
* @method step
* @param {Number} dt 时长
* @param {Number} [timeSinceLastCalled=0]
* @param {Number} [maxSubSteps=10]
*
* @example
* var world = new World();
* world.step(0.01);
*/
step(dt: number, timeSinceLastCalled?: number, maxSubSteps?: number): void;
/**
* 添加一个 Spring
*
* @method addSpring
* @param {Spring} s
*/
addSpring(s: Spring): void;
/**
* 移除一个 Spring
*
* @method removeSpring
* @param {Spring} s
*/
removeSpring(s: Spring): void;
/**
* 添加一个 Body
*
* @method addBody
* @param {Body} body
*
* @example
* var world = new World(),
* body = new Body();
* world.addBody(body);
*/
addBody(body: Body): void;
/**
* 移除一个 Body。如果在 step()阶段调用,将会在阶段之后移除
*
* @method removeBody
* @param {Body} body
*/
removeBody(body: Body): void;
/**
* 通过id获取一个 Body
* @method getBodyById
* @return {Body|Boolean} 得到的刚体或者false
*/
getBodyByID(id: number): Body;
/**
* 两个刚体之间禁用碰撞
* @method disableCollision
* @param {Body} bodyA
* @param {Body} bodyB
*/
disableBodyCollision(bodyA: Body, bodyB: Body): void;
/**
* 两个刚体之间启用碰撞
* @method enableCollision
* @param {Body} bodyA
* @param {Body} bodyB
*/
enableBodyCollision(bodyA: Body, bodyB: Body): void;
/**
* 重置 world
* @method clear
*/
clear(): void;
/**
* 获得克隆
* @method clone
* @return {World}
*/
clone(): World;
/**
* [ladeng6666] 检测world中,与一组全局坐标点worldPoint,与一组刚体是否发生碰撞,并返回碰撞的刚体列表
* @param {Array} worldPoint 一组全局要检测的全局坐标点.
* @param {Array} bodies 要检测的刚体列表.
* @param {number} precision 检测的精确度.
*/
hitTest(worldPoint: number[], bodies: Body[], precision?: number): Body[];
//functions below were added by ladeng6666
/*raycastAll(from: number[], to: number[], options: { collisionMask?: number; collisionGroup?: number; skipBackfaces?: boolean; checkCollisionResponse?:boolean}, callback: Function): void;
raycastAny(from: number[], to: number[], options: Object, result: RayCastResult): void;
raycastClosed(from: number[], to: number[], options: Object, callback: Function): void;
*/
raycast(result: RaycastResult, ray: Ray);
}
export class RaycastResult {
constructor();
body: Body;
fraction: number;
shape: Shape;
faceIndex: number;
isStopped: boolean;
normal: number[];
getHitPoint(out:number[],ray:Ray):number[];
getHitDistance(ray:Ray): number;
hasHit(): boolean;
reset();
stop();
}
export class Ray {
static ANY: number;
static CLOSEST: number;
static ALL: number;
constructor(options?: {
to?: number[];
from?: number[];
mode?: number;
callback?: Function;
collisionMask?: number;
collisionGroup?: number;
checkCollisionResponse?: boolean;
skipBackfaces?: boolean;
direction?: number[];
length?: number;
});
to: number[];
from: number[];
mode: number;
callback: Function;
collisionMask: number;
collisionGroup: number;
checkCollisionResponse: boolean;
skipBackfaces: boolean;
direction: number[];
length: number;
}
}<|fim▁end|> | |
<|file_name|>font.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Specified values for font properties
use Atom;
use app_units::Au;
use byteorder::{BigEndian, ByteOrder};
use cssparser::{Parser, Token};
#[cfg(feature = "gecko")]
use gecko_bindings::bindings;
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use parser::{Parse, ParserContext};
use properties::longhands::system_font::SystemFont;
use std::fmt::{self, Write};
use style_traits::{CssWriter, KeywordsCollectFn, ParseError};
use style_traits::{SpecifiedValueInfo, StyleParseErrorKind, ToCss};
use style_traits::values::SequenceWriter;
use values::CustomIdent;
use values::computed::{Angle as ComputedAngle, Percentage as ComputedPercentage};
use values::computed::{font as computed, Context, Length, NonNegativeLength, ToComputedValue};
use values::computed::font::{FamilyName, FontFamilyList, FontStyleAngle, SingleFontFamily};
use values::generics::NonNegative;
use values::generics::font::{KeywordSize, VariationValue};
use values::generics::font::{self as generics, FeatureTagValue, FontSettings, FontTag};
use values::specified::{AllowQuirks, Angle, Integer, LengthOrPercentage, NoCalcLength, Number, Percentage};
use values::specified::length::{FontBaseSize, AU_PER_PT, AU_PER_PX};
// FIXME(emilio): The system font code is copy-pasta, and should be cleaned up.
macro_rules! system_font_methods {
($ty:ident, $field:ident) => {
system_font_methods!($ty);
fn compute_system(&self, _context: &Context) -> <$ty as ToComputedValue>::ComputedValue {
debug_assert!(matches!(*self, $ty::System(..)));
#[cfg(feature = "gecko")]
{
_context.cached_system_font.as_ref().unwrap().$field.clone()
}
#[cfg(feature = "servo")]
{
unreachable!()
}
}
};
($ty:ident) => {
/// Get a specified value that represents a system font.
pub fn system_font(f: SystemFont) -> Self {
$ty::System(f)
}
/// Retreive a SystemFont from the specified value.
pub fn get_system(&self) -> Option<SystemFont> {
if let $ty::System(s) = *self {
Some(s)
} else {
None
}
}
}
}
const DEFAULT_SCRIPT_MIN_SIZE_PT: u32 = 8;
const DEFAULT_SCRIPT_SIZE_MULTIPLIER: f64 = 0.71;
/// The minimum font-weight value per:
///
/// https://drafts.csswg.org/css-fonts-4/#font-weight-numeric-values
pub const MIN_FONT_WEIGHT: f32 = 1.;
/// The maximum font-weight value per:
///
/// https://drafts.csswg.org/css-fonts-4/#font-weight-numeric-values
pub const MAX_FONT_WEIGHT: f32 = 1000.;
/// A specified font-weight value.
///
/// https://drafts.csswg.org/css-fonts-4/#propdef-font-weight
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
pub enum FontWeight {
/// `<font-weight-absolute>`
Absolute(AbsoluteFontWeight),
/// Bolder variant
Bolder,
/// Lighter variant
Lighter,
/// System font variant.
#[css(skip)]
System(SystemFont),
}
impl FontWeight {
system_font_methods!(FontWeight, font_weight);
/// `normal`
#[inline]
pub fn normal() -> Self {
FontWeight::Absolute(AbsoluteFontWeight::Normal)
}
/// Get a specified FontWeight from a gecko keyword
pub fn from_gecko_keyword(kw: u32) -> Self {
debug_assert!(kw % 100 == 0);
debug_assert!(kw as f32 <= MAX_FONT_WEIGHT);
FontWeight::Absolute(AbsoluteFontWeight::Weight(Number::new(kw as f32)))
}
}
impl Parse for FontWeight {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontWeight, ParseError<'i>> {
if let Ok(absolute) = input.try(|input| AbsoluteFontWeight::parse(context, input)) {
return Ok(FontWeight::Absolute(absolute));
}
Ok(try_match_ident_ignore_ascii_case! { input,
"bolder" => FontWeight::Bolder,
"lighter" => FontWeight::Lighter,
})
}
}
impl ToComputedValue for FontWeight {
type ComputedValue = computed::FontWeight;
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
FontWeight::Absolute(ref abs) => abs.compute(),
FontWeight::Bolder => context
.builder
.get_parent_font()
.clone_font_weight()
.bolder(),
FontWeight::Lighter => context
.builder
.get_parent_font()
.clone_font_weight()
.lighter(),
FontWeight::System(_) => self.compute_system(context),
}
}
#[inline]
fn from_computed_value(computed: &computed::FontWeight) -> Self {
FontWeight::Absolute(AbsoluteFontWeight::Weight(Number::from_computed_value(
&computed.0,
)))
}
}
/// An absolute font-weight value for a @font-face rule.
///
/// https://drafts.csswg.org/css-fonts-4/#font-weight-absolute-values
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
pub enum AbsoluteFontWeight {
/// A `<number>`, with the additional constraints specified in:
///
/// https://drafts.csswg.org/css-fonts-4/#font-weight-numeric-values
Weight(Number),
/// Normal font weight. Same as 400.
Normal,
/// Bold font weight. Same as 700.
Bold,
}
impl AbsoluteFontWeight {
/// Returns the computed value for this absolute font weight.
pub fn compute(&self) -> computed::FontWeight {
match *self {
AbsoluteFontWeight::Weight(weight) => {
computed::FontWeight(weight.get().max(MIN_FONT_WEIGHT).min(MAX_FONT_WEIGHT))
},
AbsoluteFontWeight::Normal => computed::FontWeight::normal(),
AbsoluteFontWeight::Bold => computed::FontWeight::bold(),
}
}
}
impl Parse for AbsoluteFontWeight {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
if let Ok(number) = input.try(|input| Number::parse(context, input)) {
// We could add another AllowedNumericType value, but it doesn't
// seem worth it just for a single property with such a weird range,
// so we do the clamping here manually.
if !number.was_calc() &&
(number.get() < MIN_FONT_WEIGHT || number.get() > MAX_FONT_WEIGHT)
{
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
return Ok(AbsoluteFontWeight::Weight(number));
}
Ok(try_match_ident_ignore_ascii_case! { input,
"normal" => AbsoluteFontWeight::Normal,
"bold" => AbsoluteFontWeight::Bold,
})
}
}
/// The specified value of the `font-style` property, without the system font
/// crap.
pub type SpecifiedFontStyle = generics::FontStyle<Angle>;
impl ToCss for SpecifiedFontStyle {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
match *self {
generics::FontStyle::Normal => dest.write_str("normal"),
generics::FontStyle::Italic => dest.write_str("italic"),
generics::FontStyle::Oblique(ref angle) => {
dest.write_str("oblique")?;
if *angle != Self::default_angle() {
dest.write_char(' ')?;
angle.to_css(dest)?;
}
Ok(())
},
}
}
}
impl Parse for SpecifiedFontStyle {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Ok(try_match_ident_ignore_ascii_case! { input,
"normal" => generics::FontStyle::Normal,
"italic" => generics::FontStyle::Italic,
"oblique" => {
let angle = input.try(|input| Self::parse_angle(context, input))
.unwrap_or_else(|_| Self::default_angle());
generics::FontStyle::Oblique(angle)
}
})
}
}
impl ToComputedValue for SpecifiedFontStyle {
type ComputedValue = computed::FontStyle;
fn to_computed_value(&self, _: &Context) -> Self::ComputedValue {
match *self {
generics::FontStyle::Normal => generics::FontStyle::Normal,
generics::FontStyle::Italic => generics::FontStyle::Italic,
generics::FontStyle::Oblique(ref angle) => {
generics::FontStyle::Oblique(FontStyleAngle(Self::compute_angle(angle)))
},
}
}
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
match *computed {
generics::FontStyle::Normal => generics::FontStyle::Normal,
generics::FontStyle::Italic => generics::FontStyle::Italic,
generics::FontStyle::Oblique(ref angle) => {
generics::FontStyle::Oblique(Angle::from_computed_value(&angle.0))
},
}
}
}
/// The default angle for `font-style: oblique`.
///
/// NOTE(emilio): As of right now this diverges from the spec, which specifies
/// 20, because it's not updated yet to account for the resolution in:
///
/// https://github.com/w3c/csswg-drafts/issues/2295
pub const DEFAULT_FONT_STYLE_OBLIQUE_ANGLE_DEGREES: f32 = 14.;
/// From https://drafts.csswg.org/css-fonts-4/#valdef-font-style-oblique-angle:
///
/// Values less than -90deg or values greater than 90deg are
/// invalid and are treated as parse errors.
///
/// The maximum angle value that `font-style: oblique` should compute to.
pub const FONT_STYLE_OBLIQUE_MAX_ANGLE_DEGREES: f32 = 90.;
/// The minimum angle value that `font-style: oblique` should compute to.
pub const FONT_STYLE_OBLIQUE_MIN_ANGLE_DEGREES: f32 = -90.;
impl SpecifiedFontStyle {
/// Gets a clamped angle from a specified Angle.
pub fn compute_angle(angle: &Angle) -> ComputedAngle {
ComputedAngle::Deg(
angle
.degrees()
.max(FONT_STYLE_OBLIQUE_MIN_ANGLE_DEGREES)
.min(FONT_STYLE_OBLIQUE_MAX_ANGLE_DEGREES),
)
}
/// Parse a suitable angle for font-style: oblique.
pub fn parse_angle<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Angle, ParseError<'i>> {
let angle = Angle::parse(context, input)?;
if angle.was_calc() {
return Ok(angle);
}
let degrees = angle.degrees();
if degrees < FONT_STYLE_OBLIQUE_MIN_ANGLE_DEGREES ||
degrees > FONT_STYLE_OBLIQUE_MAX_ANGLE_DEGREES
{
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
return Ok(angle);
}
/// The default angle for `font-style: oblique`.
pub fn default_angle() -> Angle {
Angle::from_degrees(
DEFAULT_FONT_STYLE_OBLIQUE_ANGLE_DEGREES,
/* was_calc = */ false,
)
}
}
/// The specified value of the `font-style` property.
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
#[allow(missing_docs)]
pub enum FontStyle {
Specified(SpecifiedFontStyle),
#[css(skip)]
System(SystemFont),
}
impl FontStyle {
/// Return the `normal` value.
#[inline]
pub fn normal() -> Self {
FontStyle::Specified(generics::FontStyle::Normal)
}
system_font_methods!(FontStyle, font_style);
}
impl ToComputedValue for FontStyle {
type ComputedValue = computed::FontStyle;
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
FontStyle::Specified(ref specified) => specified.to_computed_value(context),
FontStyle::System(..) => self.compute_system(context),
}
}
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
FontStyle::Specified(SpecifiedFontStyle::from_computed_value(computed))
}
}
impl Parse for FontStyle {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Ok(FontStyle::Specified(SpecifiedFontStyle::parse(
context, input,
)?))
}
}
/// A value for the `font-stretch` property.
///
/// https://drafts.csswg.org/css-fonts-4/#font-stretch-prop
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
pub enum FontStretch {
Stretch(Percentage),
Keyword(FontStretchKeyword),
#[css(skip)]
System(SystemFont),
}
/// A keyword value for `font-stretch`.
#[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss)]
#[allow(missing_docs)]
pub enum FontStretchKeyword {
Normal,
Condensed,
UltraCondensed,
ExtraCondensed,
SemiCondensed,
SemiExpanded,
Expanded,
ExtraExpanded,
UltraExpanded,
}
impl FontStretchKeyword {
/// Resolves the value of the keyword as specified in:
///
/// https://drafts.csswg.org/css-fonts-4/#font-stretch-prop
pub fn compute(&self) -> ComputedPercentage {
use self::FontStretchKeyword::*;
ComputedPercentage(match *self {
UltraCondensed => 0.5,
ExtraCondensed => 0.625,
Condensed => 0.75,
SemiCondensed => 0.875,
Normal => 1.,
SemiExpanded => 1.125,
Expanded => 1.25,
ExtraExpanded => 1.5,
UltraExpanded => 2.,
})
}
/// Does the opposite operation to `compute`, in order to serialize keywords
/// if possible.
pub fn from_percentage(percentage: f32) -> Option<Self> {
use self::FontStretchKeyword::*;
// NOTE(emilio): Can't use `match` because of rust-lang/rust#41620.
if percentage == 0.5 {
return Some(UltraCondensed);
}
if percentage == 0.625 {
return Some(ExtraCondensed);
}
if percentage == 0.75 {
return Some(Condensed);
}
if percentage == 0.875 {
return Some(SemiCondensed);
}
if percentage == 1. {
return Some(Normal);
}
if percentage == 1.125 {
return Some(SemiExpanded);
}
if percentage == 1.25 {
return Some(Expanded);
}
if percentage == 1.5 {
return Some(ExtraExpanded);
}
if percentage == 2. {
return Some(UltraExpanded);
}
None
}
}
impl FontStretch {
/// `normal`.
pub fn normal() -> Self {
FontStretch::Keyword(FontStretchKeyword::Normal)
}
system_font_methods!(FontStretch, font_stretch);
}
impl Parse for FontStretch {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// From https://drafts.csswg.org/css-fonts-4/#font-stretch-prop:
//
// Values less than 0% are not allowed and are treated as parse
// errors.
if let Ok(percentage) = input.try(|input| Percentage::parse_non_negative(context, input)) {
return Ok(FontStretch::Stretch(percentage));
}
Ok(FontStretch::Keyword(FontStretchKeyword::parse(input)?))
}
}
impl ToComputedValue for FontStretch {
type ComputedValue = computed::FontStretch;
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
FontStretch::Stretch(ref percentage) => {
computed::FontStretch(NonNegative(percentage.to_computed_value(context)))
},
FontStretch::Keyword(ref kw) => computed::FontStretch(NonNegative(kw.compute())),
FontStretch::System(_) => self.compute_system(context),
}
}
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
FontStretch::Stretch(Percentage::from_computed_value(&(computed.0).0))
}
}
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
/// A specified font-size value
pub enum FontSize {
/// A length; e.g. 10px.
Length(LengthOrPercentage),
/// A keyword value, along with a ratio and absolute offset.
/// The ratio in any specified keyword value
/// will be 1 (with offset 0), but we cascade keywordness even
/// after font-relative (percent and em) values
/// have been applied, which is where the ratio
/// comes in. The offset comes in if we cascaded a calc value,
/// where the font-relative portion (em and percentage) will
/// go into the ratio, and the remaining units all computed together
/// will go into the offset.
/// See bug 1355707.
Keyword(KeywordInfo),
/// font-size: smaller
Smaller,
/// font-size: larger
Larger,
/// Derived from a specified system font.
#[css(skip)]
System(SystemFont),
}
impl From<LengthOrPercentage> for FontSize {
fn from(other: LengthOrPercentage) -> Self {
FontSize::Length(other)
}
}
/// Specifies a prioritized list of font family names or generic family names.
#[derive(Clone, Debug, Eq, Hash, PartialEq, ToCss)]
pub enum FontFamily {
/// List of `font-family`
#[css(comma)]
Values(#[css(iterable)] FontFamilyList),
/// System font
#[css(skip)]
System(SystemFont),
}
impl FontFamily {
system_font_methods!(FontFamily, font_family);
/// Parse a specified font-family value
pub fn parse_specified<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
input
.parse_comma_separated(|input| SingleFontFamily::parse(input))
.map(|v| FontFamily::Values(FontFamilyList::new(v.into_boxed_slice())))
}
#[cfg(feature = "gecko")]
/// Return the generic ID if it is a single generic font
pub fn single_generic(&self) -> Option<u8> {
match *self {
FontFamily::Values(ref values) => values.single_generic(),
_ => None,
}
}
}
impl ToComputedValue for FontFamily {
type ComputedValue = computed::FontFamily;
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
FontFamily::Values(ref v) => computed::FontFamily(v.clone()),
FontFamily::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontFamily) -> Self {
FontFamily::Values(other.0.clone())
}
}
#[cfg(feature = "gecko")]
impl MallocSizeOf for FontFamily {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
match *self {
FontFamily::Values(ref v) => {
// Although a SharedFontList object is refcounted, we always
// attribute its size to the specified value.
unsafe { bindings::Gecko_SharedFontList_SizeOfIncludingThis(v.0.get()) }
},
FontFamily::System(_) => 0,
}
}
}
impl Parse for FontFamily {
/// <family-name>#
/// <family-name> = <string> | [ <ident>+ ]
/// TODO: <generic-family>
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontFamily, ParseError<'i>> {
FontFamily::parse_specified(input)
}
}
impl SpecifiedValueInfo for FontFamily {}
/// `FamilyName::parse` is based on `SingleFontFamily::parse` and not the other way around
/// because we want the former to exclude generic family keywords.
impl Parse for FamilyName {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
match SingleFontFamily::parse(input) {
Ok(SingleFontFamily::FamilyName(name)) => Ok(name),
Ok(SingleFontFamily::Generic(_)) => {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
},
Err(e) => Err(e),
}
}
}
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
/// Preserve the readability of text when font fallback occurs
pub enum FontSizeAdjust {
/// None variant
None,
/// Number variant
Number(Number),
/// system font
#[css(skip)]
System(SystemFont),
}
impl FontSizeAdjust {
#[inline]
/// Default value of font-size-adjust
pub fn none() -> Self {
FontSizeAdjust::None
}
system_font_methods!(FontSizeAdjust, font_size_adjust);
}
impl ToComputedValue for FontSizeAdjust {
type ComputedValue = computed::FontSizeAdjust;
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
FontSizeAdjust::None => computed::FontSizeAdjust::None,
FontSizeAdjust::Number(ref n) => {
computed::FontSizeAdjust::Number(n.to_computed_value(context))
},
FontSizeAdjust::System(_) => self.compute_system(context),
}
}
fn from_computed_value(computed: &computed::FontSizeAdjust) -> Self {
match *computed {
computed::FontSizeAdjust::None => FontSizeAdjust::None,
computed::FontSizeAdjust::Number(ref v) => {
FontSizeAdjust::Number(Number::from_computed_value(v))
},
}
}
}
impl Parse for FontSizeAdjust {
/// none | <number>
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontSizeAdjust, ParseError<'i>> {
if input
.try(|input| input.expect_ident_matching("none"))
.is_ok()
{
return Ok(FontSizeAdjust::None);
}
Ok(FontSizeAdjust::Number(Number::parse_non_negative(
context, input,
)?))
}
}
/// Additional information for specified keyword-derived font sizes.
pub type KeywordInfo = generics::KeywordInfo<NonNegativeLength>;
impl KeywordInfo {
/// Computes the final size for this font-size keyword, accounting for
/// text-zoom.
pub fn to_computed_value(&self, context: &Context) -> NonNegativeLength {
let base = context.maybe_zoom_text(self.kw.to_computed_value(context));
base.scale_by(self.factor) + context.maybe_zoom_text(self.offset)
}
/// Given a parent keyword info (self), apply an additional factor/offset to it
pub fn compose(self, factor: f32, offset: NonNegativeLength) -> Self {
KeywordInfo {
kw: self.kw,
factor: self.factor * factor,
offset: self.offset.scale_by(factor) + offset,
}
}
}
/// This is the ratio applied for font-size: larger
/// and smaller by both Firefox and Chrome
const LARGER_FONT_SIZE_RATIO: f32 = 1.2;
/// The default font size.
pub const FONT_MEDIUM_PX: i32 = 16;
#[cfg(feature = "servo")]
impl ToComputedValue for KeywordSize {
type ComputedValue = NonNegativeLength;
#[inline]
fn to_computed_value(&self, _: &Context) -> NonNegativeLength {
// https://drafts.csswg.org/css-fonts-3/#font-size-prop
match *self {
KeywordSize::XXSmall => Au::from_px(FONT_MEDIUM_PX) * 3 / 5,
KeywordSize::XSmall => Au::from_px(FONT_MEDIUM_PX) * 3 / 4,
KeywordSize::Small => Au::from_px(FONT_MEDIUM_PX) * 8 / 9,
KeywordSize::Medium => Au::from_px(FONT_MEDIUM_PX),
KeywordSize::Large => Au::from_px(FONT_MEDIUM_PX) * 6 / 5,
KeywordSize::XLarge => Au::from_px(FONT_MEDIUM_PX) * 3 / 2,
KeywordSize::XXLarge => Au::from_px(FONT_MEDIUM_PX) * 2,
KeywordSize::XXXLarge => Au::from_px(FONT_MEDIUM_PX) * 3,
}.into()
}
#[inline]
fn from_computed_value(_: &NonNegativeLength) -> Self {
unreachable!()
}
}
#[cfg(feature = "gecko")]
impl ToComputedValue for KeywordSize {
type ComputedValue = NonNegativeLength;
#[inline]
fn to_computed_value(&self, cx: &Context) -> NonNegativeLength {
use context::QuirksMode;
use values::specified::length::au_to_int_px;
// The tables in this function are originally from
// nsRuleNode::CalcFontPointSize in Gecko:
//
// https://dxr.mozilla.org/mozilla-central/rev/35fbf14b9/layout/style/nsRuleNode.cpp#3262-3336
// Mapping from base size and HTML size to pixels
// The first index is (base_size - 9), the second is the
// HTML size. "0" is CSS keyword xx-small, not HTML size 0,
// since HTML size 0 is the same as 1.
//
// xxs xs s m l xl xxl -
// - 0/1 2 3 4 5 6 7
static FONT_SIZE_MAPPING: [[i32; 8]; 8] = [
[9, 9, 9, 9, 11, 14, 18, 27],
[9, 9, 9, 10, 12, 15, 20, 30],
[9, 9, 10, 11, 13, 17, 22, 33],
[9, 9, 10, 12, 14, 18, 24, 36],
[9, 10, 12, 13, 16, 20, 26, 39],
[9, 10, 12, 14, 17, 21, 28, 42],
[9, 10, 13, 15, 18, 23, 30, 45],
[9, 10, 13, 16, 18, 24, 32, 48],
];
// This table gives us compatibility with WinNav4 for the default fonts only.
// In WinNav4, the default fonts were:
//
// Times/12pt == Times/16px at 96ppi
// Courier/10pt == Courier/13px at 96ppi
//
// xxs xs s m l xl xxl -
// - 1 2 3 4 5 6 7
static QUIRKS_FONT_SIZE_MAPPING: [[i32; 8]; 8] = [
[9, 9, 9, 9, 11, 14, 18, 28],
[9, 9, 9, 10, 12, 15, 20, 31],
[9, 9, 9, 11, 13, 17, 22, 34],
[9, 9, 10, 12, 14, 18, 24, 37],
[9, 9, 10, 13, 16, 20, 26, 40],
[9, 9, 11, 14, 17, 21, 28, 42],
[9, 10, 12, 15, 17, 23, 30, 45],
[9, 10, 13, 16, 18, 24, 32, 48],
];
static FONT_SIZE_FACTORS: [i32; 8] = [60, 75, 89, 100, 120, 150, 200, 300];
let ref gecko_font = cx.style().get_font().gecko();
let base_size = unsafe {
Atom::with(gecko_font.mLanguage.mRawPtr, |atom| {
cx.font_metrics_provider
.get_size(atom, gecko_font.mGenericID)
.0
})
};
let base_size_px = au_to_int_px(base_size as f32);
let html_size = self.html_size() as usize;
if base_size_px >= 9 && base_size_px <= 16 {
let mapping = if cx.quirks_mode == QuirksMode::Quirks {
QUIRKS_FONT_SIZE_MAPPING
} else {
FONT_SIZE_MAPPING
};
Au::from_px(mapping[(base_size_px - 9) as usize][html_size]).into()
} else {
Au(FONT_SIZE_FACTORS[html_size] * base_size / 100).into()
}
}
#[inline]
fn from_computed_value(_: &NonNegativeLength) -> Self {
unreachable!()
}
}
impl FontSize {
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-a-legacy-font-size>
pub fn from_html_size(size: u8) -> Self {
FontSize::Keyword(
match size {
// If value is less than 1, let it be 1.
0 | 1 => KeywordSize::XSmall,
2 => KeywordSize::Small,
3 => KeywordSize::Medium,
4 => KeywordSize::Large,
5 => KeywordSize::XLarge,
6 => KeywordSize::XXLarge,
// If value is greater than 7, let it be 7.
_ => KeywordSize::XXXLarge,
}.into(),
)
}
/// Compute it against a given base font size
pub fn to_computed_value_against(
&self,
context: &Context,
base_size: FontBaseSize,
) -> computed::FontSize {
use values::specified::length::FontRelativeLength;
let compose_keyword = |factor| {
context
.style()
.get_parent_font()<|fim▁hole|> let mut info = None;
let size = match *self {
FontSize::Length(LengthOrPercentage::Length(NoCalcLength::FontRelative(value))) => {
if let FontRelativeLength::Em(em) = value {
// If the parent font was keyword-derived, this is too.
// Tack the em unit onto the factor
info = compose_keyword(em);
}
value.to_computed_value(context, base_size).into()
},
FontSize::Length(LengthOrPercentage::Length(NoCalcLength::ServoCharacterWidth(
value,
))) => value.to_computed_value(base_size.resolve(context)).into(),
FontSize::Length(LengthOrPercentage::Length(NoCalcLength::Absolute(ref l))) => {
context.maybe_zoom_text(l.to_computed_value(context).into())
},
FontSize::Length(LengthOrPercentage::Length(ref l)) => {
l.to_computed_value(context).into()
},
FontSize::Length(LengthOrPercentage::Percentage(pc)) => {
// If the parent font was keyword-derived, this is too.
// Tack the % onto the factor
info = compose_keyword(pc.0);
base_size.resolve(context).scale_by(pc.0).into()
},
FontSize::Length(LengthOrPercentage::Calc(ref calc)) => {
let parent = context.style().get_parent_font().clone_font_size();
// if we contain em/% units and the parent was keyword derived, this is too
// Extract the ratio/offset and compose it
if (calc.em.is_some() || calc.percentage.is_some()) && parent.keyword_info.is_some()
{
let ratio = calc.em.unwrap_or(0.) + calc.percentage.map_or(0., |pc| pc.0);
// Compute it, but shave off the font-relative part (em, %).
//
// This will mean that other font-relative units like ex and
// ch will be computed against the old parent font even when
// the font changes.
//
// There's no particular "right answer" for what to do here,
// Gecko recascades as if the font had changed, we instead
// track the changes and reapply, which means that we carry
// over old computed ex/ch values whilst Gecko recomputes
// new ones.
//
// This is enough of an edge case to not really matter.
let abs = calc
.to_computed_value_zoomed(
context,
FontBaseSize::InheritedStyleButStripEmUnits,
).length_component();
info = parent.keyword_info.map(|i| i.compose(ratio, abs.into()));
}
let calc = calc.to_computed_value_zoomed(context, base_size);
calc.to_used_value(Some(base_size.resolve(context)))
.unwrap()
.into()
},
FontSize::Keyword(i) => {
// As a specified keyword, this is keyword derived
info = Some(i);
i.to_computed_value(context)
},
FontSize::Smaller => {
info = compose_keyword(1. / LARGER_FONT_SIZE_RATIO);
FontRelativeLength::Em(1. / LARGER_FONT_SIZE_RATIO)
.to_computed_value(context, base_size)
.into()
},
FontSize::Larger => {
info = compose_keyword(LARGER_FONT_SIZE_RATIO);
FontRelativeLength::Em(LARGER_FONT_SIZE_RATIO)
.to_computed_value(context, base_size)
.into()
},
FontSize::System(_) => {
#[cfg(feature = "servo")]
{
unreachable!()
}
#[cfg(feature = "gecko")]
{
context.cached_system_font.as_ref().unwrap().font_size.size
}
},
};
computed::FontSize {
size: size,
keyword_info: info,
}
}
}
impl ToComputedValue for FontSize {
type ComputedValue = computed::FontSize;
#[inline]
fn to_computed_value(&self, context: &Context) -> computed::FontSize {
self.to_computed_value_against(context, FontBaseSize::InheritedStyle)
}
#[inline]
fn from_computed_value(computed: &computed::FontSize) -> Self {
FontSize::Length(LengthOrPercentage::Length(
ToComputedValue::from_computed_value(&computed.size.0),
))
}
}
impl FontSize {
system_font_methods!(FontSize);
/// Get initial value for specified font size.
#[inline]
pub fn medium() -> Self {
FontSize::Keyword(KeywordInfo::medium())
}
/// Parses a font-size, with quirks.
pub fn parse_quirky<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
allow_quirks: AllowQuirks,
) -> Result<FontSize, ParseError<'i>> {
if let Ok(lop) =
input.try(|i| LengthOrPercentage::parse_non_negative_quirky(context, i, allow_quirks))
{
return Ok(FontSize::Length(lop));
}
if let Ok(kw) = input.try(KeywordSize::parse) {
return Ok(FontSize::Keyword(kw.into()));
}
try_match_ident_ignore_ascii_case! { input,
"smaller" => Ok(FontSize::Smaller),
"larger" => Ok(FontSize::Larger),
}
}
#[allow(unused_mut)]
/// Cascade `font-size` with specified value
pub fn cascade_specified_font_size(
context: &mut Context,
specified_value: &FontSize,
mut computed: computed::FontSize,
) {
// we could use clone_language and clone_font_family() here but that's
// expensive. Do it only in gecko mode for now.
#[cfg(feature = "gecko")]
{
// if the language or generic changed, we need to recalculate
// the font size from the stored font-size origin information.
if context.builder.get_font().gecko().mLanguage.mRawPtr !=
context.builder.get_parent_font().gecko().mLanguage.mRawPtr ||
context.builder.get_font().gecko().mGenericID !=
context.builder.get_parent_font().gecko().mGenericID
{
if let Some(info) = computed.keyword_info {
computed.size = info.to_computed_value(context);
}
}
}
let device = context.builder.device;
let mut font = context.builder.take_font();
let parent_unconstrained = {
let parent_font = context.builder.get_parent_font();
font.apply_font_size(computed, parent_font, device)
};
context.builder.put_font(font);
if let Some(parent) = parent_unconstrained {
let new_unconstrained = specified_value
.to_computed_value_against(context, FontBaseSize::Custom(Au::from(parent)));
context
.builder
.mutate_font()
.apply_unconstrained_font_size(new_unconstrained.size);
}
}
}
impl Parse for FontSize {
/// <length> | <percentage> | <absolute-size> | <relative-size>
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontSize, ParseError<'i>> {
FontSize::parse_quirky(context, input, AllowQuirks::No)
}
}
bitflags! {
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
/// Flags of variant alternates in bit
struct VariantAlternatesParsingFlags: u8 {
/// None of variant alternates enabled
const NORMAL = 0;
/// Historical forms
const HISTORICAL_FORMS = 0x01;
/// Stylistic Alternates
const STYLISTIC = 0x02;
/// Stylistic Sets
const STYLESET = 0x04;
/// Character Variant
const CHARACTER_VARIANT = 0x08;
/// Swash glyphs
const SWASH = 0x10;
/// Ornaments glyphs
const ORNAMENTS = 0x20;
/// Annotation forms
const ANNOTATION = 0x40;
}
}
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
/// Set of variant alternates
pub enum VariantAlternates {
/// Enables display of stylistic alternates
#[css(function)]
Stylistic(CustomIdent),
/// Enables display with stylistic sets
#[css(comma, function)]
Styleset(#[css(iterable)] Box<[CustomIdent]>),
/// Enables display of specific character variants
#[css(comma, function)]
CharacterVariant(#[css(iterable)] Box<[CustomIdent]>),
/// Enables display of swash glyphs
#[css(function)]
Swash(CustomIdent),
/// Enables replacement of default glyphs with ornaments
#[css(function)]
Ornaments(CustomIdent),
/// Enables display of alternate annotation forms
#[css(function)]
Annotation(CustomIdent),
/// Enables display of historical forms
HistoricalForms,
}
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
/// List of Variant Alternates
pub struct VariantAlternatesList(
#[css(if_empty = "normal", iterable)] pub Box<[VariantAlternates]>,
);
impl VariantAlternatesList {
/// Returns the length of all variant alternates.
pub fn len(&self) -> usize {
self.0.iter().fold(0, |acc, alternate| match *alternate {
VariantAlternates::Swash(_) |
VariantAlternates::Stylistic(_) |
VariantAlternates::Ornaments(_) |
VariantAlternates::Annotation(_) => acc + 1,
VariantAlternates::Styleset(ref slice) |
VariantAlternates::CharacterVariant(ref slice) => acc + slice.len(),
_ => acc,
})
}
}
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
/// Control over the selection of these alternate glyphs
pub enum FontVariantAlternates {
/// Use alternative glyph from value
Value(VariantAlternatesList),
/// Use system font glyph
#[css(skip)]
System(SystemFont),
}
impl FontVariantAlternates {
#[inline]
/// Get initial specified value with VariantAlternatesList
pub fn get_initial_specified_value() -> Self {
FontVariantAlternates::Value(VariantAlternatesList(vec![].into_boxed_slice()))
}
system_font_methods!(FontVariantAlternates, font_variant_alternates);
}
impl ToComputedValue for FontVariantAlternates {
type ComputedValue = computed::FontVariantAlternates;
fn to_computed_value(&self, context: &Context) -> computed::FontVariantAlternates {
match *self {
FontVariantAlternates::Value(ref v) => v.clone(),
FontVariantAlternates::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontVariantAlternates) -> Self {
FontVariantAlternates::Value(other.clone())
}
}
impl Parse for FontVariantAlternates {
/// normal |
/// [ stylistic(<feature-value-name>) ||
/// historical-forms ||
/// styleset(<feature-value-name> #) ||
/// character-variant(<feature-value-name> #) ||
/// swash(<feature-value-name>) ||
/// ornaments(<feature-value-name>) ||
/// annotation(<feature-value-name>) ]
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontVariantAlternates, ParseError<'i>> {
let mut alternates = Vec::new();
if input
.try(|input| input.expect_ident_matching("normal"))
.is_ok()
{
return Ok(FontVariantAlternates::Value(VariantAlternatesList(
alternates.into_boxed_slice(),
)));
}
let mut parsed_alternates = VariantAlternatesParsingFlags::empty();
macro_rules! check_if_parsed(
($input:expr, $flag:path) => (
if parsed_alternates.contains($flag) {
return Err($input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
parsed_alternates |= $flag;
)
);
while let Ok(_) = input.try(|input| {
// FIXME: remove clone() when lifetimes are non-lexical
match input.next()?.clone() {
Token::Ident(ref value) if value.eq_ignore_ascii_case("historical-forms") => {
check_if_parsed!(input, VariantAlternatesParsingFlags::HISTORICAL_FORMS);
alternates.push(VariantAlternates::HistoricalForms);
Ok(())
},
Token::Function(ref name) => input.parse_nested_block(|i| {
match_ignore_ascii_case! { &name,
"swash" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::SWASH);
let location = i.current_source_location();
let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?;
alternates.push(VariantAlternates::Swash(ident));
Ok(())
},
"stylistic" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::STYLISTIC);
let location = i.current_source_location();
let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?;
alternates.push(VariantAlternates::Stylistic(ident));
Ok(())
},
"ornaments" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::ORNAMENTS);
let location = i.current_source_location();
let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?;
alternates.push(VariantAlternates::Ornaments(ident));
Ok(())
},
"annotation" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::ANNOTATION);
let location = i.current_source_location();
let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?;
alternates.push(VariantAlternates::Annotation(ident));
Ok(())
},
"styleset" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::STYLESET);
let idents = i.parse_comma_separated(|i| {
let location = i.current_source_location();
CustomIdent::from_ident(location, i.expect_ident()?, &[])
})?;
alternates.push(VariantAlternates::Styleset(idents.into_boxed_slice()));
Ok(())
},
"character-variant" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::CHARACTER_VARIANT);
let idents = i.parse_comma_separated(|i| {
let location = i.current_source_location();
CustomIdent::from_ident(location, i.expect_ident()?, &[])
})?;
alternates.push(VariantAlternates::CharacterVariant(idents.into_boxed_slice()));
Ok(())
},
_ => return Err(i.new_custom_error(StyleParseErrorKind::UnspecifiedError)),
}
}),
_ => Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)),
}
}) {}
if parsed_alternates.is_empty() {
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
Ok(FontVariantAlternates::Value(VariantAlternatesList(
alternates.into_boxed_slice(),
)))
}
}
macro_rules! impl_variant_east_asian {
{
$(
$(#[$($meta:tt)+])*
$ident:ident / $css:expr => $gecko:ident = $value:expr,
)+
} => {
bitflags! {
#[derive(MallocSizeOf)]
/// Vairants for east asian variant
pub struct VariantEastAsian: u16 {
/// None of the features
const NORMAL = 0;
$(
$(#[$($meta)+])*
const $ident = $value;
)+
}
}
impl ToCss for VariantEastAsian {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.is_empty() {
return dest.write_str("normal");
}
let mut writer = SequenceWriter::new(dest, " ");
$(
if self.intersects(VariantEastAsian::$ident) {
writer.raw_item($css)?;
}
)+
Ok(())
}
}
/// Asserts that all variant-east-asian matches its NS_FONT_VARIANT_EAST_ASIAN_* value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_variant_east_asian_matches() {
use gecko_bindings::structs;
$(
debug_assert_eq!(structs::$gecko as u16, VariantEastAsian::$ident.bits());
)+
}
impl SpecifiedValueInfo for VariantEastAsian {
fn collect_completion_keywords(f: KeywordsCollectFn) {
f(&["normal", $($css,)+]);
}
}
}
}
impl_variant_east_asian! {
/// Enables rendering of JIS78 forms (OpenType feature: jp78)
JIS78 / "jis78" => NS_FONT_VARIANT_EAST_ASIAN_JIS78 = 0x01,
/// Enables rendering of JIS83 forms (OpenType feature: jp83).
JIS83 / "jis83" => NS_FONT_VARIANT_EAST_ASIAN_JIS83 = 0x02,
/// Enables rendering of JIS90 forms (OpenType feature: jp90).
JIS90 / "jis90" => NS_FONT_VARIANT_EAST_ASIAN_JIS90 = 0x04,
/// Enables rendering of JIS2004 forms (OpenType feature: jp04).
JIS04 / "jis04" => NS_FONT_VARIANT_EAST_ASIAN_JIS04 = 0x08,
/// Enables rendering of simplified forms (OpenType feature: smpl).
SIMPLIFIED / "simplified" => NS_FONT_VARIANT_EAST_ASIAN_SIMPLIFIED = 0x10,
/// Enables rendering of traditional forms (OpenType feature: trad).
TRADITIONAL / "traditional" => NS_FONT_VARIANT_EAST_ASIAN_TRADITIONAL = 0x20,
/// Enables rendering of full-width variants (OpenType feature: fwid).
FULL_WIDTH / "full-width" => NS_FONT_VARIANT_EAST_ASIAN_FULL_WIDTH = 0x40,
/// Enables rendering of proportionally-spaced variants (OpenType feature: pwid).
PROPORTIONAL_WIDTH / "proportional-width" => NS_FONT_VARIANT_EAST_ASIAN_PROP_WIDTH = 0x80,
/// Enables display of ruby variant glyphs (OpenType feature: ruby).
RUBY / "ruby" => NS_FONT_VARIANT_EAST_ASIAN_RUBY = 0x100,
}
#[cfg(feature = "gecko")]
impl VariantEastAsian {
/// Obtain a specified value from a Gecko keyword value
///
/// Intended for use with presentation attributes, not style structs
pub fn from_gecko_keyword(kw: u16) -> Self {
Self::from_bits_truncate(kw)
}
/// Transform into gecko keyword
pub fn to_gecko_keyword(self) -> u16 {
self.bits()
}
}
#[cfg(feature = "gecko")]
impl_gecko_keyword_conversions!(VariantEastAsian, u16);
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss)]
/// Allows control of glyph substitution and sizing in East Asian text.
pub enum FontVariantEastAsian {
/// Value variant with `variant-east-asian`
Value(VariantEastAsian),
/// System font variant
#[css(skip)]
System(SystemFont),
}
impl FontVariantEastAsian {
#[inline]
/// Get default `font-variant-east-asian` with `empty` variant
pub fn empty() -> Self {
FontVariantEastAsian::Value(VariantEastAsian::empty())
}
system_font_methods!(FontVariantEastAsian, font_variant_east_asian);
}
impl ToComputedValue for FontVariantEastAsian {
type ComputedValue = computed::FontVariantEastAsian;
fn to_computed_value(&self, context: &Context) -> computed::FontVariantEastAsian {
match *self {
FontVariantEastAsian::Value(ref v) => v.clone(),
FontVariantEastAsian::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontVariantEastAsian) -> Self {
FontVariantEastAsian::Value(other.clone())
}
}
impl Parse for FontVariantEastAsian {
/// normal | [ <east-asian-variant-values> || <east-asian-width-values> || ruby ]
/// <east-asian-variant-values> = [ jis78 | jis83 | jis90 | jis04 | simplified | traditional ]
/// <east-asian-width-values> = [ full-width | proportional-width ]
fn parse<'i, 't>(
_context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontVariantEastAsian, ParseError<'i>> {
let mut result = VariantEastAsian::empty();
if input
.try(|input| input.expect_ident_matching("normal"))
.is_ok()
{
return Ok(FontVariantEastAsian::Value(result));
}
while let Ok(flag) = input.try(|input| {
Ok(
match_ignore_ascii_case! { &input.expect_ident().map_err(|_| ())?,
"jis78" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::JIS78),
"jis83" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::JIS83),
"jis90" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::JIS90),
"jis04" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::JIS04),
"simplified" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::SIMPLIFIED),
"traditional" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::TRADITIONAL),
"full-width" =>
exclusive_value!((result, VariantEastAsian::FULL_WIDTH |
VariantEastAsian::PROPORTIONAL_WIDTH
) => VariantEastAsian::FULL_WIDTH),
"proportional-width" =>
exclusive_value!((result, VariantEastAsian::FULL_WIDTH |
VariantEastAsian::PROPORTIONAL_WIDTH
) => VariantEastAsian::PROPORTIONAL_WIDTH),
"ruby" =>
exclusive_value!((result, VariantEastAsian::RUBY) => VariantEastAsian::RUBY),
_ => return Err(()),
},
)
}) {
result.insert(flag);
}
if !result.is_empty() {
Ok(FontVariantEastAsian::Value(result))
} else {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
}
macro_rules! impl_variant_ligatures {
{
$(
$(#[$($meta:tt)+])*
$ident:ident / $css:expr => $gecko:ident = $value:expr,
)+
} => {
bitflags! {
#[derive(MallocSizeOf)]
/// Variants of ligatures
pub struct VariantLigatures: u16 {
/// Specifies that common default features are enabled
const NORMAL = 0;
$(
$(#[$($meta)+])*
const $ident = $value;
)+
}
}
impl ToCss for VariantLigatures {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.is_empty() {
return dest.write_str("normal");
}
if self.contains(VariantLigatures::NONE) {
return dest.write_str("none");
}
let mut writer = SequenceWriter::new(dest, " ");
$(
if self.intersects(VariantLigatures::$ident) {
writer.raw_item($css)?;
}
)+
Ok(())
}
}
/// Asserts that all variant-east-asian matches its NS_FONT_VARIANT_EAST_ASIAN_* value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_variant_ligatures_matches() {
use gecko_bindings::structs;
$(
debug_assert_eq!(structs::$gecko as u16, VariantLigatures::$ident.bits());
)+
}
impl SpecifiedValueInfo for VariantLigatures {
fn collect_completion_keywords(f: KeywordsCollectFn) {
f(&["normal", $($css,)+]);
}
}
}
}
impl_variant_ligatures! {
/// Specifies that all types of ligatures and contextual forms
/// covered by this property are explicitly disabled
NONE / "none" => NS_FONT_VARIANT_LIGATURES_NONE = 0x01,
/// Enables display of common ligatures
COMMON_LIGATURES / "common-ligatures" => NS_FONT_VARIANT_LIGATURES_COMMON = 0x02,
/// Disables display of common ligatures
NO_COMMON_LIGATURES / "no-common-ligatures" => NS_FONT_VARIANT_LIGATURES_NO_COMMON = 0x04,
/// Enables display of discretionary ligatures
DISCRETIONARY_LIGATURES / "discretionary-ligatures" => NS_FONT_VARIANT_LIGATURES_DISCRETIONARY = 0x08,
/// Disables display of discretionary ligatures
NO_DISCRETIONARY_LIGATURES / "no-discretionary-ligatures" => NS_FONT_VARIANT_LIGATURES_NO_DISCRETIONARY = 0x10,
/// Enables display of historical ligatures
HISTORICAL_LIGATURES / "historical-ligatures" => NS_FONT_VARIANT_LIGATURES_HISTORICAL = 0x20,
/// Disables display of historical ligatures
NO_HISTORICAL_LIGATURES / "no-historical-ligatures" => NS_FONT_VARIANT_LIGATURES_NO_HISTORICAL = 0x40,
/// Enables display of contextual alternates
CONTEXTUAL / "contextual" => NS_FONT_VARIANT_LIGATURES_CONTEXTUAL = 0x80,
/// Disables display of contextual alternates
NO_CONTEXTUAL / "no-contextual" => NS_FONT_VARIANT_LIGATURES_NO_CONTEXTUAL = 0x100,
}
#[cfg(feature = "gecko")]
impl VariantLigatures {
/// Obtain a specified value from a Gecko keyword value
///
/// Intended for use with presentation attributes, not style structs
pub fn from_gecko_keyword(kw: u16) -> Self {
Self::from_bits_truncate(kw)
}
/// Transform into gecko keyword
pub fn to_gecko_keyword(self) -> u16 {
self.bits()
}
}
#[cfg(feature = "gecko")]
impl_gecko_keyword_conversions!(VariantLigatures, u16);
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss)]
/// Ligatures and contextual forms are ways of combining glyphs
/// to produce more harmonized forms
pub enum FontVariantLigatures {
/// Value variant with `variant-ligatures`
Value(VariantLigatures),
/// System font variant
#[css(skip)]
System(SystemFont),
}
impl FontVariantLigatures {
system_font_methods!(FontVariantLigatures, font_variant_ligatures);
/// Default value of `font-variant-ligatures` as `empty`
#[inline]
pub fn empty() -> FontVariantLigatures {
FontVariantLigatures::Value(VariantLigatures::empty())
}
#[inline]
/// Get `none` variant of `font-variant-ligatures`
pub fn none() -> FontVariantLigatures {
FontVariantLigatures::Value(VariantLigatures::NONE)
}
}
impl ToComputedValue for FontVariantLigatures {
type ComputedValue = computed::FontVariantLigatures;
fn to_computed_value(&self, context: &Context) -> computed::FontVariantLigatures {
match *self {
FontVariantLigatures::Value(ref v) => v.clone(),
FontVariantLigatures::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontVariantLigatures) -> Self {
FontVariantLigatures::Value(other.clone())
}
}
impl Parse for FontVariantLigatures {
/// normal | none |
/// [ <common-lig-values> ||
/// <discretionary-lig-values> ||
/// <historical-lig-values> ||
/// <contextual-alt-values> ]
/// <common-lig-values> = [ common-ligatures | no-common-ligatures ]
/// <discretionary-lig-values> = [ discretionary-ligatures | no-discretionary-ligatures ]
/// <historical-lig-values> = [ historical-ligatures | no-historical-ligatures ]
/// <contextual-alt-values> = [ contextual | no-contextual ]
fn parse<'i, 't>(
_context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontVariantLigatures, ParseError<'i>> {
let mut result = VariantLigatures::empty();
if input
.try(|input| input.expect_ident_matching("normal"))
.is_ok()
{
return Ok(FontVariantLigatures::Value(result));
}
if input
.try(|input| input.expect_ident_matching("none"))
.is_ok()
{
return Ok(FontVariantLigatures::Value(VariantLigatures::NONE));
}
while let Ok(flag) = input.try(|input| {
Ok(
match_ignore_ascii_case! { &input.expect_ident().map_err(|_| ())?,
"common-ligatures" =>
exclusive_value!((result, VariantLigatures::COMMON_LIGATURES |
VariantLigatures::NO_COMMON_LIGATURES
) => VariantLigatures::COMMON_LIGATURES),
"no-common-ligatures" =>
exclusive_value!((result, VariantLigatures::COMMON_LIGATURES |
VariantLigatures::NO_COMMON_LIGATURES
) => VariantLigatures::NO_COMMON_LIGATURES),
"discretionary-ligatures" =>
exclusive_value!((result, VariantLigatures::DISCRETIONARY_LIGATURES |
VariantLigatures::NO_DISCRETIONARY_LIGATURES
) => VariantLigatures::DISCRETIONARY_LIGATURES),
"no-discretionary-ligatures" =>
exclusive_value!((result, VariantLigatures::DISCRETIONARY_LIGATURES |
VariantLigatures::NO_DISCRETIONARY_LIGATURES
) => VariantLigatures::NO_DISCRETIONARY_LIGATURES),
"historical-ligatures" =>
exclusive_value!((result, VariantLigatures::HISTORICAL_LIGATURES |
VariantLigatures::NO_HISTORICAL_LIGATURES
) => VariantLigatures::HISTORICAL_LIGATURES),
"no-historical-ligatures" =>
exclusive_value!((result, VariantLigatures::HISTORICAL_LIGATURES |
VariantLigatures::NO_HISTORICAL_LIGATURES
) => VariantLigatures::NO_HISTORICAL_LIGATURES),
"contextual" =>
exclusive_value!((result, VariantLigatures::CONTEXTUAL |
VariantLigatures::NO_CONTEXTUAL
) => VariantLigatures::CONTEXTUAL),
"no-contextual" =>
exclusive_value!((result, VariantLigatures::CONTEXTUAL |
VariantLigatures::NO_CONTEXTUAL
) => VariantLigatures::NO_CONTEXTUAL),
_ => return Err(()),
},
)
}) {
result.insert(flag);
}
if !result.is_empty() {
Ok(FontVariantLigatures::Value(result))
} else {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
}
macro_rules! impl_variant_numeric {
{
$(
$(#[$($meta:tt)+])*
$ident:ident / $css:expr => $gecko:ident = $value:expr,
)+
} => {
bitflags! {
#[derive(MallocSizeOf)]
/// Vairants of numeric values
pub struct VariantNumeric: u8 {
/// None of other variants are enabled.
const NORMAL = 0;
$(
$(#[$($meta)+])*
const $ident = $value;
)+
}
}
impl ToCss for VariantNumeric {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.is_empty() {
return dest.write_str("normal");
}
let mut writer = SequenceWriter::new(dest, " ");
$(
if self.intersects(VariantNumeric::$ident) {
writer.raw_item($css)?;
}
)+
Ok(())
}
}
/// Asserts that all variant-east-asian matches its NS_FONT_VARIANT_EAST_ASIAN_* value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_variant_numeric_matches() {
use gecko_bindings::structs;
$(
debug_assert_eq!(structs::$gecko as u8, VariantNumeric::$ident.bits());
)+
}
impl SpecifiedValueInfo for VariantNumeric {
fn collect_completion_keywords(f: KeywordsCollectFn) {
f(&["normal", $($css,)+]);
}
}
}
}
impl_variant_numeric! {
/// Enables display of lining numerals.
LINING_NUMS / "lining-nums" => NS_FONT_VARIANT_NUMERIC_LINING = 0x01,
/// Enables display of old-style numerals.
OLDSTYLE_NUMS / "oldstyle-nums" => NS_FONT_VARIANT_NUMERIC_OLDSTYLE = 0x02,
/// Enables display of proportional numerals.
PROPORTIONAL_NUMS / "proportional-nums" => NS_FONT_VARIANT_NUMERIC_PROPORTIONAL = 0x04,
/// Enables display of tabular numerals.
TABULAR_NUMS / "tabular-nums" => NS_FONT_VARIANT_NUMERIC_TABULAR = 0x08,
/// Enables display of lining diagonal fractions.
DIAGONAL_FRACTIONS / "diagonal-fractions" => NS_FONT_VARIANT_NUMERIC_DIAGONAL_FRACTIONS = 0x10,
/// Enables display of lining stacked fractions.
STACKED_FRACTIONS / "stacked-fractions" => NS_FONT_VARIANT_NUMERIC_STACKED_FRACTIONS = 0x20,
/// Enables display of letter forms used with ordinal numbers.
ORDINAL / "ordinal" => NS_FONT_VARIANT_NUMERIC_ORDINAL = 0x80,
/// Enables display of slashed zeros.
SLASHED_ZERO / "slashed-zero" => NS_FONT_VARIANT_NUMERIC_SLASHZERO = 0x40,
}
#[cfg(feature = "gecko")]
impl VariantNumeric {
/// Obtain a specified value from a Gecko keyword value
///
/// Intended for use with presentation attributes, not style structs
pub fn from_gecko_keyword(kw: u8) -> Self {
Self::from_bits_truncate(kw)
}
/// Transform into gecko keyword
pub fn to_gecko_keyword(self) -> u8 {
self.bits()
}
}
#[cfg(feature = "gecko")]
impl_gecko_keyword_conversions!(VariantNumeric, u8);
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss)]
/// Specifies control over numerical forms.
pub enum FontVariantNumeric {
/// Value variant with `variant-numeric`
Value(VariantNumeric),
/// System font
#[css(skip)]
System(SystemFont),
}
impl FontVariantNumeric {
#[inline]
/// Default value of `font-variant-numeric` as `empty`
pub fn empty() -> FontVariantNumeric {
FontVariantNumeric::Value(VariantNumeric::empty())
}
system_font_methods!(FontVariantNumeric, font_variant_numeric);
}
impl ToComputedValue for FontVariantNumeric {
type ComputedValue = computed::FontVariantNumeric;
fn to_computed_value(&self, context: &Context) -> computed::FontVariantNumeric {
match *self {
FontVariantNumeric::Value(ref v) => v.clone(),
FontVariantNumeric::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontVariantNumeric) -> Self {
FontVariantNumeric::Value(other.clone())
}
}
impl Parse for FontVariantNumeric {
/// normal |
/// [ <numeric-figure-values> ||
/// <numeric-spacing-values> ||
/// <numeric-fraction-values> ||
/// ordinal ||
/// slashed-zero ]
/// <numeric-figure-values> = [ lining-nums | oldstyle-nums ]
/// <numeric-spacing-values> = [ proportional-nums | tabular-nums ]
/// <numeric-fraction-values> = [ diagonal-fractions | stacked-fractions ]
fn parse<'i, 't>(
_context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontVariantNumeric, ParseError<'i>> {
let mut result = VariantNumeric::empty();
if input
.try(|input| input.expect_ident_matching("normal"))
.is_ok()
{
return Ok(FontVariantNumeric::Value(result));
}
while let Ok(flag) = input.try(|input| {
Ok(
match_ignore_ascii_case! { &input.expect_ident().map_err(|_| ())?,
"ordinal" =>
exclusive_value!((result, VariantNumeric::ORDINAL) => VariantNumeric::ORDINAL),
"slashed-zero" =>
exclusive_value!((result, VariantNumeric::SLASHED_ZERO) => VariantNumeric::SLASHED_ZERO),
"lining-nums" =>
exclusive_value!((result, VariantNumeric::LINING_NUMS |
VariantNumeric::OLDSTYLE_NUMS
) => VariantNumeric::LINING_NUMS),
"oldstyle-nums" =>
exclusive_value!((result, VariantNumeric::LINING_NUMS |
VariantNumeric::OLDSTYLE_NUMS
) => VariantNumeric::OLDSTYLE_NUMS),
"proportional-nums" =>
exclusive_value!((result, VariantNumeric::PROPORTIONAL_NUMS |
VariantNumeric::TABULAR_NUMS
) => VariantNumeric::PROPORTIONAL_NUMS),
"tabular-nums" =>
exclusive_value!((result, VariantNumeric::PROPORTIONAL_NUMS |
VariantNumeric::TABULAR_NUMS
) => VariantNumeric::TABULAR_NUMS),
"diagonal-fractions" =>
exclusive_value!((result, VariantNumeric::DIAGONAL_FRACTIONS |
VariantNumeric::STACKED_FRACTIONS
) => VariantNumeric::DIAGONAL_FRACTIONS),
"stacked-fractions" =>
exclusive_value!((result, VariantNumeric::DIAGONAL_FRACTIONS |
VariantNumeric::STACKED_FRACTIONS
) => VariantNumeric::STACKED_FRACTIONS),
_ => return Err(()),
},
)
}) {
result.insert(flag);
}
if !result.is_empty() {
Ok(FontVariantNumeric::Value(result))
} else {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
}
/// This property provides low-level control over OpenType or TrueType font features.
pub type SpecifiedFontFeatureSettings = FontSettings<FeatureTagValue<Integer>>;
/// Define initial settings that apply when the font defined by an @font-face
/// rule is rendered.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
pub enum FontFeatureSettings {
/// Value of `FontSettings`
Value(SpecifiedFontFeatureSettings),
/// System font
#[css(skip)]
System(SystemFont),
}
impl FontFeatureSettings {
#[inline]
/// Get default value of `font-feature-settings` as normal
pub fn normal() -> FontFeatureSettings {
FontFeatureSettings::Value(FontSettings::normal())
}
system_font_methods!(FontFeatureSettings, font_feature_settings);
}
impl ToComputedValue for FontFeatureSettings {
type ComputedValue = computed::FontFeatureSettings;
fn to_computed_value(&self, context: &Context) -> computed::FontFeatureSettings {
match *self {
FontFeatureSettings::Value(ref v) => v.to_computed_value(context),
FontFeatureSettings::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontFeatureSettings) -> Self {
FontFeatureSettings::Value(ToComputedValue::from_computed_value(other))
}
}
impl Parse for FontFeatureSettings {
/// normal | <feature-tag-value>#
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontFeatureSettings, ParseError<'i>> {
SpecifiedFontFeatureSettings::parse(context, input).map(FontFeatureSettings::Value)
}
}
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]
/// Whether user agents are allowed to synthesize bold or oblique font faces
/// when a font family lacks bold or italic faces
pub struct FontSynthesis {
/// If a `font-weight` is requested that the font family does not contain,
/// the user agent may synthesize the requested weight from the weights
/// that do exist in the font family.
#[css(represents_keyword)]
pub weight: bool,
/// If a font-style is requested that the font family does not contain,
/// the user agent may synthesize the requested style from the normal face in the font family.
#[css(represents_keyword)]
pub style: bool,
}
impl FontSynthesis {
#[inline]
/// Get the default value of font-synthesis
pub fn get_initial_value() -> Self {
FontSynthesis {
weight: true,
style: true,
}
}
}
impl Parse for FontSynthesis {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontSynthesis, ParseError<'i>> {
let mut result = FontSynthesis {
weight: false,
style: false,
};
try_match_ident_ignore_ascii_case! { input,
"none" => Ok(result),
"weight" => {
result.weight = true;
if input.try(|input| input.expect_ident_matching("style")).is_ok() {
result.style = true;
}
Ok(result)
},
"style" => {
result.style = true;
if input.try(|input| input.expect_ident_matching("weight")).is_ok() {
result.weight = true;
}
Ok(result)
},
}
}
}
impl ToCss for FontSynthesis {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.weight && self.style {
dest.write_str("weight style")
} else if self.style {
dest.write_str("style")
} else if self.weight {
dest.write_str("weight")
} else {
dest.write_str("none")
}
}
}
#[cfg(feature = "gecko")]
impl From<u8> for FontSynthesis {
fn from(bits: u8) -> FontSynthesis {
use gecko_bindings::structs;
FontSynthesis {
weight: bits & structs::NS_FONT_SYNTHESIS_WEIGHT as u8 != 0,
style: bits & structs::NS_FONT_SYNTHESIS_STYLE as u8 != 0,
}
}
}
#[cfg(feature = "gecko")]
impl From<FontSynthesis> for u8 {
fn from(v: FontSynthesis) -> u8 {
use gecko_bindings::structs;
let mut bits: u8 = 0;
if v.weight {
bits |= structs::NS_FONT_SYNTHESIS_WEIGHT as u8;
}
if v.style {
bits |= structs::NS_FONT_SYNTHESIS_STYLE as u8;
}
bits
}
}
#[derive(Clone, Debug, Eq, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
/// Allows authors to explicitly specify the language system of the font,
/// overriding the language system implied by the content language
pub enum FontLanguageOverride {
/// When rendering with OpenType fonts,
/// the content language of the element is
/// used to infer the OpenType language system
Normal,
/// Single three-letter case-sensitive OpenType language system tag,
/// specifies the OpenType language system to be used instead of
/// the language system implied by the language of the element
Override(Box<str>),
/// Use system font
#[css(skip)]
System(SystemFont),
}
impl FontLanguageOverride {
#[inline]
/// Get default value with `normal`
pub fn normal() -> FontLanguageOverride {
FontLanguageOverride::Normal
}
system_font_methods!(FontLanguageOverride, font_language_override);
}
impl ToComputedValue for FontLanguageOverride {
type ComputedValue = computed::FontLanguageOverride;
#[inline]
fn to_computed_value(&self, context: &Context) -> computed::FontLanguageOverride {
match *self {
FontLanguageOverride::Normal => computed::FontLanguageOverride(0),
FontLanguageOverride::Override(ref lang) => {
if lang.is_empty() || lang.len() > 4 || !lang.is_ascii() {
return computed::FontLanguageOverride(0);
}
let mut computed_lang = lang.to_string();
while computed_lang.len() < 4 {
computed_lang.push(' ');
}
let bytes = computed_lang.into_bytes();
computed::FontLanguageOverride(BigEndian::read_u32(&bytes))
},
FontLanguageOverride::System(_) => self.compute_system(context),
}
}
#[inline]
fn from_computed_value(computed: &computed::FontLanguageOverride) -> Self {
if computed.0 == 0 {
return FontLanguageOverride::Normal;
}
let mut buf = [0; 4];
BigEndian::write_u32(&mut buf, computed.0);
FontLanguageOverride::Override(
if cfg!(debug_assertions) {
String::from_utf8(buf.to_vec()).unwrap()
} else {
unsafe { String::from_utf8_unchecked(buf.to_vec()) }
}.into_boxed_str(),
)
}
}
impl Parse for FontLanguageOverride {
/// normal | <string>
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontLanguageOverride, ParseError<'i>> {
if input
.try(|input| input.expect_ident_matching("normal"))
.is_ok()
{
return Ok(FontLanguageOverride::Normal);
}
let string = input.expect_string()?;
Ok(FontLanguageOverride::Override(
string.as_ref().to_owned().into_boxed_str(),
))
}
}
/// This property provides low-level control over OpenType or TrueType font
/// variations.
pub type SpecifiedFontVariationSettings = FontSettings<VariationValue<Number>>;
/// Define initial settings that apply when the font defined by an @font-face
/// rule is rendered.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
pub enum FontVariationSettings {
/// Value of `FontSettings`
Value(SpecifiedFontVariationSettings),
/// System font
#[css(skip)]
System(SystemFont),
}
impl FontVariationSettings {
#[inline]
/// Get default value of `font-variation-settings` as normal
pub fn normal() -> FontVariationSettings {
FontVariationSettings::Value(FontSettings::normal())
}
system_font_methods!(FontVariationSettings, font_variation_settings);
}
impl ToComputedValue for FontVariationSettings {
type ComputedValue = computed::FontVariationSettings;
fn to_computed_value(&self, context: &Context) -> computed::FontVariationSettings {
match *self {
FontVariationSettings::Value(ref v) => v.to_computed_value(context),
FontVariationSettings::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontVariationSettings) -> Self {
FontVariationSettings::Value(ToComputedValue::from_computed_value(other))
}
}
impl Parse for FontVariationSettings {
/// normal | <variation-tag-value>#
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontVariationSettings, ParseError<'i>> {
SpecifiedFontVariationSettings::parse(context, input).map(FontVariationSettings::Value)
}
}
fn parse_one_feature_value<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Integer, ParseError<'i>> {
if let Ok(integer) = input.try(|i| Integer::parse_non_negative(context, i)) {
return Ok(integer);
}
try_match_ident_ignore_ascii_case! { input,
"on" => Ok(Integer::new(1)),
"off" => Ok(Integer::new(0)),
}
}
impl Parse for FeatureTagValue<Integer> {
/// https://drafts.csswg.org/css-fonts-4/#feature-tag-value
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let tag = FontTag::parse(context, input)?;
let value = input
.try(|i| parse_one_feature_value(context, i))
.unwrap_or_else(|_| Integer::new(1));
Ok(Self { tag, value })
}
}
impl Parse for VariationValue<Number> {
/// This is the `<string> <number>` part of the font-variation-settings
/// syntax.
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let tag = FontTag::parse(context, input)?;
let value = Number::parse(context, input)?;
Ok(Self { tag, value })
}
}
#[derive(
Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
/// text-zoom. Enable if true, disable if false
pub struct XTextZoom(#[css(skip)] pub bool);
impl Parse for XTextZoom {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<XTextZoom, ParseError<'i>> {
debug_assert!(
false,
"Should be set directly by presentation attributes only."
);
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
/// Internal property that reflects the lang attribute
pub struct XLang(#[css(skip)] pub Atom);
impl XLang {
#[inline]
/// Get default value for `-x-lang`
pub fn get_initial_value() -> XLang {
XLang(atom!(""))
}
}
impl Parse for XLang {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<XLang, ParseError<'i>> {
debug_assert!(
false,
"Should be set directly by presentation attributes only."
);
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss)]
/// Specifies the minimum font size allowed due to changes in scriptlevel.
/// Ref: https://wiki.mozilla.org/MathML:mstyle
pub struct MozScriptMinSize(pub NoCalcLength);
impl MozScriptMinSize {
#[inline]
/// Calculate initial value of -moz-script-min-size.
pub fn get_initial_value() -> Length {
Length::new(DEFAULT_SCRIPT_MIN_SIZE_PT as f32 * (AU_PER_PT / AU_PER_PX))
}
}
impl Parse for MozScriptMinSize {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<MozScriptMinSize, ParseError<'i>> {
debug_assert!(
false,
"Should be set directly by presentation attributes only."
);
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss)]
/// Changes the scriptlevel in effect for the children.
/// Ref: https://wiki.mozilla.org/MathML:mstyle
///
/// The main effect of scriptlevel is to control the font size.
/// https://www.w3.org/TR/MathML3/chapter3.html#presm.scriptlevel
pub enum MozScriptLevel {
/// Change `font-size` relatively.
Relative(i32),
/// Change `font-size` absolutely.
///
/// Should only be serialized by presentation attributes, so even though
/// serialization for this would look the same as for the `Relative`
/// variant, it is unexposed, so no big deal.
#[css(function)]
MozAbsolute(i32),
/// Change `font-size` automatically.
Auto,
}
impl Parse for MozScriptLevel {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<MozScriptLevel, ParseError<'i>> {
// We don't bother to handle calc here.
if let Ok(i) = input.try(|i| i.expect_integer()) {
return Ok(MozScriptLevel::Relative(i));
}
input.expect_ident_matching("auto")?;
Ok(MozScriptLevel::Auto)
}
}
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
/// Specifies the multiplier to be used to adjust font size
/// due to changes in scriptlevel.
///
/// Ref: https://www.w3.org/TR/MathML3/chapter3.html#presm.mstyle.attrs
pub struct MozScriptSizeMultiplier(pub f32);
impl MozScriptSizeMultiplier {
#[inline]
/// Get default value of `-moz-script-size-multiplier`
pub fn get_initial_value() -> MozScriptSizeMultiplier {
MozScriptSizeMultiplier(DEFAULT_SCRIPT_SIZE_MULTIPLIER as f32)
}
}
impl Parse for MozScriptSizeMultiplier {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<MozScriptSizeMultiplier, ParseError<'i>> {
debug_assert!(
false,
"Should be set directly by presentation attributes only."
);
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
impl From<f32> for MozScriptSizeMultiplier {
fn from(v: f32) -> Self {
MozScriptSizeMultiplier(v)
}
}
impl From<MozScriptSizeMultiplier> for f32 {
fn from(v: MozScriptSizeMultiplier) -> f32 {
v.0
}
}<|fim▁end|> | .clone_font_size()
.keyword_info
.map(|i| i.compose(factor, Au(0).into()))
}; |
<|file_name|>callResult.ts<|end_file_name|><|fim▁begin|>export class CallResult {<|fim▁hole|> public LoggingUser: string = "";
public Priority: string = "";
public PriorityCss: string = "";
public State: string = "";
public StateCss: string = "";
public Timestamp: string = "";
public Color: string = "";
public Address: string = "";
}<|fim▁end|> | public Id: string = "";
public Name: string = ""; |
<|file_name|>base_class.py<|end_file_name|><|fim▁begin|>"""Base class for IKEA TRADFRI."""
from __future__ import annotations
from collections.abc import Callable
from functools import wraps
import logging
from typing import Any
from pytradfri.command import Command
from pytradfri.device import Device
from pytradfri.device.air_purifier import AirPurifier
from pytradfri.device.air_purifier_control import AirPurifierControl
from pytradfri.device.blind import Blind
from pytradfri.device.blind_control import BlindControl
from pytradfri.device.light import Light
from pytradfri.device.light_control import LightControl
from pytradfri.device.signal_repeater_control import SignalRepeaterControl
from pytradfri.device.socket import Socket
from pytradfri.device.socket_control import SocketControl
from pytradfri.error import PytradfriError<|fim▁hole|>from homeassistant.core import callback
from homeassistant.helpers.entity import DeviceInfo, Entity
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
def handle_error(
func: Callable[[Command | list[Command]], Any]
) -> Callable[[str], Any]:
"""Handle tradfri api call error."""
@wraps(func)
async def wrapper(command: Command | list[Command]) -> None:
"""Decorate api call."""
try:
await func(command)
except PytradfriError as err:
_LOGGER.error("Unable to execute command %s: %s", command, err)
return wrapper
class TradfriBaseClass(Entity):
"""Base class for IKEA TRADFRI.
All devices and groups should ultimately inherit from this class.
"""
_attr_should_poll = False
def __init__(
self,
device: Device,
api: Callable[[Command | list[Command]], Any],
gateway_id: str,
) -> None:
"""Initialize a device."""
self._api = handle_error(api)
self._device: Device = device
self._device_control: BlindControl | LightControl | SocketControl | SignalRepeaterControl | AirPurifierControl | None = (
None
)
self._device_data: Socket | Light | Blind | AirPurifier | None = None
self._gateway_id = gateway_id
self._refresh(device)
@callback
def _async_start_observe(self, exc: Exception | None = None) -> None:
"""Start observation of device."""
if exc:
self.async_write_ha_state()
_LOGGER.warning("Observation failed for %s", self._attr_name, exc_info=exc)
try:
cmd = self._device.observe(
callback=self._observe_update,
err_callback=self._async_start_observe,
duration=0,
)
self.hass.async_create_task(self._api(cmd))
except PytradfriError as err:
_LOGGER.warning("Observation failed, trying again", exc_info=err)
self._async_start_observe()
async def async_added_to_hass(self) -> None:
"""Start thread when added to hass."""
self._async_start_observe()
@callback
def _observe_update(self, device: Device) -> None:
"""Receive new state data for this device."""
self._refresh(device)
self.async_write_ha_state()
def _refresh(self, device: Device) -> None:
"""Refresh the device data."""
self._device = device
self._attr_name = device.name
class TradfriBaseDevice(TradfriBaseClass):
"""Base class for a TRADFRI device.
All devices should inherit from this class.
"""
@property
def device_info(self) -> DeviceInfo:
"""Return the device info."""
info = self._device.device_info
return DeviceInfo(
identifiers={(DOMAIN, self._device.id)},
manufacturer=info.manufacturer,
model=info.model_number,
name=self._attr_name,
sw_version=info.firmware_version,
via_device=(DOMAIN, self._gateway_id),
)
def _refresh(self, device: Device) -> None:
"""Refresh the device data."""
super()._refresh(device)
self._attr_available = device.reachable<|fim▁end|> | |
<|file_name|>http-client.js<|end_file_name|><|fim▁begin|>import {HttpClientConfiguration} from './http-client-configuration';
import {RequestInit, Interceptor} from './interfaces';
import 'core-js';
/**
* An HTTP client based on the Fetch API.
*
* @constructor
*/
export class HttpClient {
activeRequestCount: number = 0;
isRequesting: boolean = false;
interceptors: Interceptor[] = [];
isConfigured: boolean = false;
baseUrl: string = '';
defaults: RequestInit = null;
/**
* Configure this client with default settings to be used by all requests.
*
* @param config - A function that takes a config argument,
* or a config object, or a string to use as the client's baseUrl.
* @chainable
*/
configure(config: string|RequestInit|(config: HttpClientConfiguration) => void): HttpClient {
let normalizedConfig;
if (typeof config === 'string') {
normalizedConfig = { baseUrl: config };
} else if (typeof config === 'object') {
normalizedConfig = { defaults: config };
} else if (typeof config === 'function') {
normalizedConfig = new HttpClientConfiguration();
config(normalizedConfig);
} else {
throw new Error('invalid config');
}
let defaults = normalizedConfig.defaults;
if (defaults && defaults.headers instanceof Headers) {
// Headers instances are not iterable in all browsers. Require a plain
// object here to allow default headers to be merged into request headers.
throw new Error('Default headers must be a plain object.');
}
this.baseUrl = normalizedConfig.baseUrl;
this.defaults = defaults;
this.interceptors.push(...normalizedConfig.interceptors || []);
this.isConfigured = true;
return this;
}
/**
* Starts the process of fetching a resource. Default configuration parameters
* will be applied to the Request. The constructed Request will be passed to
* registered request interceptors before being sent. The Response will be passed
* to registered Response interceptors before it is returned.
*
* See also https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API
*
* @param input - The resource that you wish to fetch. Either a
* Request object, or a string containing the URL of the resource.
* @param - An options object containing settings to be applied to
* the Request.
*/
fetch(input: Request|string, init?: RequestInit): Promise<Response> {
this::trackRequestStart();
let request = Promise.resolve().then(() => this::buildRequest(input, init, this.defaults));
let promise = processRequest(request, this.interceptors)
.then(result => {
let response = null;
if (result instanceof Response) {
response = result;
} else if (result instanceof Request) {
response = fetch(result);
} else {
throw new Error(`An invalid result was returned by the interceptor chain. Expected a Request or Response instance, but got [${result}]`);
}
return processResponse(response, this.interceptors);
});
return this::trackRequestEndWith(promise);
}
}
function trackRequestStart() {
this.isRequesting = !!(++this.activeRequestCount);
}
function trackRequestEnd() {
this.isRequesting = !!(--this.activeRequestCount);
}
function trackRequestEndWith(promise) {
let handle = this::trackRequestEnd;
promise.then(handle, handle);
return promise;
}
function buildRequest(input, init = {}) {
let defaults = this.defaults || {};
let source;
let url;
let body;
if (input instanceof Request) {
if (!this.isConfigured) {
// don't copy the request if there are no defaults configured
return input;
}
source = input;
url = input.url;
body = input.blob();
} else {
source = init;
url = input;
body = init.body;
}
let requestInit = Object.assign({}, defaults, source, { body });
let request = new Request((this.baseUrl || '') + url, requestInit);
setDefaultHeaders(request.headers, defaults.headers);
return request;
}
function setDefaultHeaders(headers, defaultHeaders) {
for (let name in defaultHeaders || {}) {
if (defaultHeaders.hasOwnProperty(name) && !headers.has(name)) {<|fim▁hole|> }
}
function processRequest(request, interceptors) {
return applyInterceptors(request, interceptors, 'request', 'requestError');
}
function processResponse(response, interceptors) {
return applyInterceptors(response, interceptors, 'response', 'responseError');
}
function applyInterceptors(input, interceptors, successName, errorName) {
return (interceptors || [])
.reduce((chain, interceptor) => {
let successHandler = interceptor[successName];
let errorHandler = interceptor[errorName];
return chain.then(
successHandler && interceptor::successHandler,
errorHandler && interceptor::errorHandler);
}, Promise.resolve(input));
}<|fim▁end|> | headers.set(name, defaultHeaders[name]);
} |
<|file_name|>breadcrumb.py<|end_file_name|><|fim▁begin|><|fim▁hole|>class Breadcrumb:
def __init__(self, text, url):
self.text = text
self.url = url<|fim▁end|> | |
<|file_name|>delivery.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Vaucher
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from lxml import etree
from openerp import models, fields, api
class PostlogisticsLicense(models.Model):
_name = 'postlogistics.license'
_description = 'PostLogistics Franking License'
_order = 'sequence'
name = fields.Char(string='Description',
translate=True,
required=True)
number = fields.Char(string='Number',
required=True)
company_id = fields.Many2one(comodel_name='res.company',
string='Company',
required=True)
sequence = fields.Integer(
string='Sequence',
help="Gives the sequence on company to define priority on license "
"when multiple licenses are available for the same group of "
"service."
)
class PostlogisticsServiceGroup(models.Model):
_name = 'postlogistics.service.group'
_description = 'PostLogistics Service Group'
name = fields.Char(string='Description', translate=True, required=True)
group_extid = fields.Integer(string='Group ID', required=True)
postlogistics_license_ids = fields.Many2many(
comodel_name='postlogistics.license',
relation='postlogistics_license_service_groups_rel',
column1='license_id',
column2='group_id',
string='PostLogistics Franking License')
_sql_constraints = [
('group_extid_uniq', 'unique(group_extid)',
"A service group ID must be unique.")
]
POSTLOGISTIC_TYPES = [
('label_layout', 'Label Layout'),
('output_format', 'Output Format'),
('resolution', 'Output Resolution'),
('basic', 'Basic Service'),
('additional', 'Additional Service'),
('delivery', 'Delivery Instructions')
]
class DeliveryCarrierTemplateOption(models.Model):
""" Set name translatable and add service group """
_inherit = 'delivery.carrier.template.option'
name = fields.Char(translate=True)
postlogistics_service_group_id = fields.Many2one(
comodel_name='postlogistics.service.group',
string='PostLogistics Service Group',
)
postlogistics_type = fields.Selection(
selection=POSTLOGISTIC_TYPES,
string="PostLogistics option type",
)
# relation tables to manage compatiblity between basic services
# and other services
postlogistics_basic_service_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
relation='postlogistics_compatibility_service_rel',
column1='service_id',
column2='basic_service_id',
string="Basic Services",
domain=[('postlogistics_type', '=', 'basic')],
help="List of basic service for which this service is compatible",
)
postlogistics_additonial_service_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
relation='postlogistics_compatibility_service_rel',
column1='basic_service_id',
column2='service_id',
string="Compatible Additional Services",
domain=[('postlogistics_type', '=', 'additional')],
)
postlogistics_delivery_instruction_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
relation='postlogistics_compatibility_service_rel',
column1='basic_service_id',
column2='service_id',
string="Compatible Delivery Instructions",
domain=[('postlogistics_type', '=', 'delivery')],
)
class DeliveryCarrierOption(models.Model):
""" Set name translatable and add service group """
_inherit = 'delivery.carrier.option'
name = fields.Char(translate=True)
def fields_view_get(self, cr, uid, view_id=None, view_type='form',
context=None, toolbar=False, submenu=False):
_super = super(DeliveryCarrierOption, self)
result = _super.fields_view_get(cr, uid, view_id=view_id,
view_type=view_type, context=context,
toolbar=toolbar, submenu=submenu)
xmlid = 'delivery_carrier_label_postlogistics.postlogistics'
ref = self.pool['ir.model.data'].xmlid_to_object
postlogistics_partner = ref(cr, uid, xmlid, context=context)
if context.get('default_carrier_id'):
carrier_obj = self.pool['delivery.carrier']
carrier = carrier_obj.browse(cr, uid,
context['default_carrier_id'],
context=context)
if carrier.partner_id == postlogistics_partner:
arch = result['arch']
doc = etree.fromstring(arch)
for node in doc.xpath("//field[@name='tmpl_option_id']"):
node.set(
'domain',
"[('partner_id', '=', %s), "
" ('id', 'in', parent.allowed_option_ids[0][2])]" %
postlogistics_partner.id
)
result['arch'] = etree.tostring(doc)
return result
class DeliveryCarrier(models.Model):
""" Add service group """
_inherit = 'delivery.carrier'<|fim▁hole|> @api.model
def _get_carrier_type_selection(self):
""" Add postlogistics carrier type """
res = super(DeliveryCarrier, self)._get_carrier_type_selection()
res.append(('postlogistics', 'Postlogistics'))
return res
@api.depends('partner_id',
'available_option_ids',
'available_option_ids.tmpl_option_id',
'available_option_ids.postlogistics_type',
)
def _get_basic_service_ids(self):
""" Search in all options for PostLogistics basic services if set """
xmlid = 'delivery_carrier_label_postlogistics.postlogistics'
postlogistics_partner = self.env.ref(xmlid)
for carrier in self:
if carrier.partner_id != postlogistics_partner:
continue
options = carrier.available_option_ids.filtered(
lambda option: option.postlogistics_type == 'basic'
).mapped('tmpl_option_id')
if not options:
continue
self.postlogistics_basic_service_ids = options
@api.depends('partner_id',
'postlogistics_service_group_id',
'postlogistics_basic_service_ids',
'postlogistics_basic_service_ids',
'available_option_ids',
'available_option_ids.postlogistics_type',
)
def _get_allowed_option_ids(self):
""" Return a list of possible options
A domain would be too complicated.
We do this to ensure the user first select a basic service. And
then he adds additional services.
"""
option_template_obj = self.env['delivery.carrier.template.option']
xmlid = 'delivery_carrier_label_postlogistics.postlogistics'
postlogistics_partner = self.env.ref(xmlid)
for carrier in self:
allowed = option_template_obj.browse()
if carrier.partner_id != postlogistics_partner:
continue
service_group = carrier.postlogistics_service_group_id
if service_group:
basic_services = carrier.postlogistics_basic_service_ids
services = option_template_obj.search(
[('postlogistics_service_group_id', '=', service_group.id)]
)
allowed |= services
if basic_services:
related_services = option_template_obj.search(
[('postlogistics_basic_service_ids', 'in',
basic_services.ids)]
)
allowed |= related_services
# Allows to set multiple optional single option in order to
# let the user select them
single_option_types = [
'label_layout',
'output_format',
'resolution',
]
selected_single_options = [
opt.tmpl_option_id.postlogistics_type
for opt in carrier.available_option_ids
if opt.postlogistics_type in single_option_types and
opt.mandatory]
if selected_single_options != single_option_types:
services = option_template_obj.search(
[('postlogistics_type', 'in', single_option_types),
('postlogistics_type', 'not in',
selected_single_options)],
)
allowed |= services
carrier.allowed_option_ids = allowed
postlogistics_license_id = fields.Many2one(
comodel_name='postlogistics.license',
string='PostLogistics Franking License',
)
postlogistics_service_group_id = fields.Many2one(
comodel_name='postlogistics.service.group',
string='PostLogistics Service Group',
help="Service group defines the available options for "
"this delivery method.",
)
postlogistics_basic_service_ids = fields.One2many(
comodel_name='delivery.carrier.template.option',
compute='_get_basic_service_ids',
string='PostLogistics Service Group',
help="Basic Service defines the available "
"additional options for this delivery method",
)
allowed_option_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
compute='_get_allowed_option_ids',
string='Allowed options',
help="Compute allowed options according to selected options.",
)<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.