prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>list40_c.hpp<|end_file_name|><|fim▁begin|>#ifndef AUTOBOOST_MPL_LIST_LIST40_C_HPP_INCLUDED
#define AUTOBOOST_MPL_LIST_LIST40_C_HPP_INCLUDED
// Copyright Aleksey Gurtovoy 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// See http://www.boost.org/libs/mpl for documentation.
// $Id$
// $Date$
// $Revision$
#if !defined(AUTOBOOST_MPL_PREPROCESSING_MODE)
# include <autoboost/mpl/list/list30_c.hpp>
#endif
#include <autoboost/mpl/aux_/config/use_preprocessed.hpp>
#if !defined(AUTOBOOST_MPL_CFG_NO_PREPROCESSED_HEADERS) \
&& !defined(AUTOBOOST_MPL_PREPROCESSING_MODE)
# define AUTOBOOST_MPL_PREPROCESSED_HEADER list40_c.hpp
# include <autoboost/mpl/list/aux_/include_preprocessed.hpp>
#else
# include <autoboost/preprocessor/iterate.hpp><|fim▁hole|>namespace autoboost { namespace mpl {
# define AUTOBOOST_PP_ITERATION_PARAMS_1 \
(3,(31, 40, <autoboost/mpl/list/aux_/numbered_c.hpp>))
# include AUTOBOOST_PP_ITERATE()
}}
#endif // AUTOBOOST_MPL_CFG_NO_PREPROCESSED_HEADERS
#endif // AUTOBOOST_MPL_LIST_LIST40_C_HPP_INCLUDED<|fim▁end|>
| |
<|file_name|>validation.py<|end_file_name|><|fim▁begin|>import re
import mimetypes
<|fim▁hole|>def validate_markdown_flavour(value):
return value in ('markdown', 'markdown_strict', 'markdown_phpextra',
'markdown_github', 'markdown_mmd', 'commonmark')
def validate_mimetype_image(value):
# Empty string is also valid
if not value:
return True
mimetype = mimetypes.guess_type(value)[0]
if mimetype is None:
return False
else:
return mimetype.startswith('image')
def validate_subproject(value):
regex = '(http|https)://(www.|)github.com/([\w\d\.]+)/([\w\d\.]+)(.git|)'
return re.match(regex, value['url']) != None<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import azure.cli.command_modules.consumption._help # pylint: disable=unused-import<|fim▁hole|> import azure.cli.command_modules.consumption._params # pylint: disable=redefined-outer-name, unused-variable
def load_commands():
import azure.cli.command_modules.consumption.commands # pylint: disable=redefined-outer-name, unused-variable<|fim▁end|>
|
def load_params(_):
|
<|file_name|>comp-3766.component.spec.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/<|fim▁hole|>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { Comp3766Component } from './comp-3766.component';
describe('Comp3766Component', () => {
let component: Comp3766Component;
let fixture: ComponentFixture<Comp3766Component>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ Comp3766Component ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(Comp3766Component);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|>
| |
<|file_name|>intron_exon_reads.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
"""
Example from pybedtools documentation (:ref:`third example`) to count \
reads in introns and exons using multiple CPUs.
"""
from __future__ import print_function
import pybedtools
import argparse
import os
import sys
import multiprocessing
def featuretype_filter(feature, featuretype):
"""
Only passes features with the specified *featuretype*
"""
if feature[2] == featuretype:
return True
return False
<|fim▁hole|>
def subset_featuretypes(featuretype):
return g.filter(featuretype_filter, featuretype).saveas()
def count_reads_in_features(features):
"""
Callback function to count reads in features
"""
return features.intersect(abam=bam,
b=features.fn,
s=stranded,
bed=True,
stream=True).count()
def main():
"""
Third quick example from the documentation -- count reads introns and
exons, in parallel
"""
ap = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),
usage=__doc__)
ap.add_argument('--gff', required=True,
help='GFF or GTF file containing annotations')
ap.add_argument('--bam', required=True,
help='BAM file containing reads to be counted')
ap.add_argument('--stranded', action='store_true',
help='Use strand-specific merging and overlap. '
'Default is to ignore strand')
ap.add_argument('--no-parallel', dest='noparallel', action='store_true',
help='Disables parallel computation')
ap.add_argument('-o', '--output',
help='Optional file to which results will be written; '
'default is stdout')
ap.add_argument('-v', '--verbose', action='store_true',
help='Verbose (goes to stderr)')
args = ap.parse_args()
gff = args.gff
bam = args.bam
stranded = args.stranded
parallel = not args.noparallel
# Some GFF files have invalid entries -- like chromosomes with negative
# coords or features of length = 0. This line removes them and saves the
# result in a tempfile
g = pybedtools.BedTool(gff).remove_invalid().saveas()
# Decide which version of map to use. If parallel, we only need 3
# processes.
pool = multiprocessing.Pool(processes=3)
# Get separate files for introns and exons in parallel (if specified)
featuretypes = ('intron', 'exon')
introns, exons = pool.map(subset_featuretypes, featuretypes)
# Perform some genome algebra to get unique and shared regions
exon_only = exons.subtract(introns).merge().remove_invalid().saveas()
intron_only = introns.subtract(exons).merge().remove_invalid().saveas()
intron_and_exon = exons\
.intersect(introns).merge().remove_invalid().saveas()
# Do intersections with BAM file in parallel
features = (exon_only, intron_only, intron_and_exon)
results = pool.map(count_reads_in_features, features)
labels = (' exon only:',
' intron only:',
'intron and exon:')
for label, reads in zip(labels, results):
print('%s %s' % (label, reads))
pybedtools.cleanup(verbose=False)
if __name__ == "__main__":
main()<|fim▁end|>
| |
<|file_name|>buffer.ts<|end_file_name|><|fim▁begin|>// Specifically test buffer module regression.
import {
Buffer as ImportedBuffer,
SlowBuffer as ImportedSlowBuffer,
transcode,
TranscodeEncoding,
constants,
kMaxLength,
kStringMaxLength,
Blob,
} from 'buffer';
const utf8Buffer = new Buffer('test');
const base64Buffer = new Buffer('', 'base64');
const octets: Uint8Array = new Uint8Array(123);
const octetBuffer = new Buffer(octets);
const sharedBuffer = new Buffer(octets.buffer);
const copiedBuffer = new Buffer(utf8Buffer);
console.log(Buffer.isBuffer(octetBuffer));
console.log(Buffer.isEncoding('utf8'));
console.log(Buffer.byteLength('xyz123'));
console.log(Buffer.byteLength('xyz123', 'ascii'));
const result1 = Buffer.concat([utf8Buffer, base64Buffer] as ReadonlyArray<Uint8Array>);
const result2 = Buffer.concat([utf8Buffer, base64Buffer] as ReadonlyArray<Uint8Array>, 9999999);
// Module constants
{
const value1: number = constants.MAX_LENGTH;
const value2: number = constants.MAX_STRING_LENGTH;
const value3: number = kMaxLength;
const value4: number = kStringMaxLength;
}
// Class Methods: Buffer.swap16(), Buffer.swa32(), Buffer.swap64()
{
const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]);
buf.swap16();
buf.swap32();
buf.swap64();
}
// Class Method: Buffer.from(data)
{
// Array
const buf1: Buffer = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72] as ReadonlyArray<number>);
// Buffer
const buf2: Buffer = Buffer.from(buf1, 1, 2);
// String
const buf3: Buffer = Buffer.from('this is a tést');
// ArrayBuffer
const arrUint16: Uint16Array = new Uint16Array(2);
arrUint16[0] = 5000;
arrUint16[1] = 4000;
const buf4: Buffer = Buffer.from(arrUint16.buffer);
const arrUint8: Uint8Array = new Uint8Array(2);
const buf5: Buffer = Buffer.from(arrUint8);
const buf6: Buffer = Buffer.from(buf1);
const sb: SharedArrayBuffer = {} as any;
const buf7: Buffer = Buffer.from(sb);
// $ExpectError
Buffer.from({});
}
// Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]])
{
const arr: Uint16Array = new Uint16Array(2);
arr[0] = 5000;
arr[1] = 4000;
let buf: Buffer;
buf = Buffer.from(arr.buffer, 1);
buf = Buffer.from(arr.buffer, 0, 1);
// $ExpectError
Buffer.from("this is a test", 1, 1);
// Ideally passing a normal Buffer would be a type error too, but it's not
// since Buffer is assignable to ArrayBuffer currently
}
// Class Method: Buffer.from(str[, encoding])
{
const buf2: Buffer = Buffer.from('7468697320697320612074c3a97374', 'hex');
/* tslint:disable-next-line no-construct */
Buffer.from(new String("DEADBEEF"), "hex");
// $ExpectError
Buffer.from(buf2, 'hex');
}
// Class Method: Buffer.from(object, [, byteOffset[, length]]) (Implicit coercion)
{
const pseudoBuf = { valueOf() { return Buffer.from([1, 2, 3]); } };
let buf: Buffer = Buffer.from(pseudoBuf);
const pseudoString = { valueOf() { return "Hello"; }};
buf = Buffer.from(pseudoString);
buf = Buffer.from(pseudoString, "utf-8");
// $ExpectError
Buffer.from(pseudoString, 1, 2);
const pseudoArrayBuf = { valueOf() { return new Uint16Array(2); } };
buf = Buffer.from(pseudoArrayBuf, 1, 1);
}
// Class Method: Buffer.alloc(size[, fill[, encoding]])
{
const buf1: Buffer = Buffer.alloc(5);
const buf2: Buffer = Buffer.alloc(5, 'a');
const buf3: Buffer = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
}
// Class Method: Buffer.allocUnsafe(size)
{
const buf: Buffer = Buffer.allocUnsafe(5);
}
// Class Method: Buffer.allocUnsafeSlow(size)
{
const buf: Buffer = Buffer.allocUnsafeSlow(10);
}
// Class Method byteLenght
{
let len: number;
len = Buffer.byteLength("foo");
len = Buffer.byteLength("foo", "utf8");
const b = Buffer.from("bar");
len = Buffer.byteLength(b);
len = Buffer.byteLength(b, "utf16le");
const ab = new ArrayBuffer(15);
len = Buffer.byteLength(ab);
len = Buffer.byteLength(ab, "ascii");
const dv = new DataView(ab);
len = Buffer.byteLength(dv);
len = Buffer.byteLength(dv, "utf16le");
}
// Class Method poolSize
{
let s: number;
s = Buffer.poolSize;
Buffer.poolSize = 4096;
}
// Test that TS 1.6 works with the 'as Buffer' annotation
// on isBuffer.
let a: Buffer | number;
a = new Buffer(10);
if (Buffer.isBuffer(a)) {
a.writeUInt8(3, 4);
}
// write* methods return offsets.
const b = new Buffer(16);
let result: number = b.writeUInt32LE(0, 0);
result = b.writeUInt16LE(0, 4);
result = b.writeUInt8(0, 6);
result = b.writeInt8(0, 7);
result = b.writeDoubleLE(0, 8);
result = b.write('asd');
result = b.write('asd', 'hex');
result = b.write('asd', 123, 'hex');
result = b.write('asd', 123, 123, 'hex');
// fill returns the input buffer.
b.fill('a').fill('b');
{
const buffer = new Buffer('123');
let index: number;
index = buffer.indexOf("23");
index = buffer.indexOf("23", 1);
index = buffer.indexOf("23", 1, "utf8");
index = buffer.indexOf(23);
index = buffer.indexOf(buffer);
}
{
const buffer = new Buffer('123');
let index: number;
index = buffer.lastIndexOf("23");
index = buffer.lastIndexOf("23", 1);
index = buffer.lastIndexOf("23", 1, "utf8");
index = buffer.lastIndexOf(23);
index = buffer.lastIndexOf(buffer);
}
{
const buffer = new Buffer('123');
const val: [number, number] = [1, 1];
/* comment out for --target es5
for (let entry of buffer.entries()) {
val = entry;
}
*/
}
{
const buffer = new Buffer('123');
let includes: boolean;
includes = buffer.includes("23");
includes = buffer.includes("23", 1);
includes = buffer.includes("23", 1, "utf8");
includes = buffer.includes(23);
includes = buffer.includes(23, 1);
includes = buffer.includes(23, 1, "utf8");
includes = buffer.includes(buffer);
includes = buffer.includes(buffer, 1);
includes = buffer.includes(buffer, 1, "utf8");
}
{
const buffer = new Buffer('123');
const val = 1;
/* comment out for --target es5
for (let key of buffer.keys()) {
val = key;
}
*/
}
{
const buffer = new Buffer('123');
const val = 1;
/* comment out for --target es5
for (let value of buffer.values()) {
val = value;
}
*/
}
<|fim▁hole|> b.writeUInt8(0, 6);
const sb = new ImportedSlowBuffer(43);
b.writeUInt8(0, 6);
}
// Buffer has Uint8Array's buffer field (an ArrayBuffer).
{
const buffer = new Buffer('123');
const octets = new Uint8Array(buffer.buffer);
}
// Inherited from Uint8Array but return buffer
{
const b = Buffer.from('asd');
let res: Buffer = b.reverse();
res = b.subarray();
res = b.subarray(1);
res = b.subarray(1, 2);
}
// Buffer module, transcode function
{
transcode(Buffer.from('€'), 'utf8', 'ascii'); // $ExpectType Buffer
const source: TranscodeEncoding = 'utf8';
const target: TranscodeEncoding = 'ascii';
transcode(Buffer.from('€'), source, target); // $ExpectType Buffer
}
{
const a = Buffer.alloc(1000);
a.writeBigInt64BE(123n);
a.writeBigInt64LE(123n);
a.writeBigUInt64BE(123n);
a.writeBigUInt64LE(123n);
let b: bigint = a.readBigInt64BE(123);
b = a.readBigInt64LE(123);
b = a.readBigUInt64LE(123);
b = a.readBigUInt64BE(123);
}
async () => {
const blob = new Blob(['asd', Buffer.from('test'), new Blob(['dummy'])], {
type: 'application/javascript',
encoding: 'base64',
});
blob.size; // $ExpectType number
blob.type; // $ExpectType string
blob.arrayBuffer(); // $ExpectType Promise<ArrayBuffer>
blob.text(); // $ExpectType Promise<string>
blob.slice(); // $ExpectType Blob
blob.slice(1); // $ExpectType Blob
blob.slice(1, 2); // $ExpectType Blob
blob.slice(1, 2, 'other'); // $ExpectType Blob
};
{
atob(btoa('test')); // $ExpectType string
}<|fim▁end|>
|
// Imported Buffer from buffer module works properly
{
const b = new ImportedBuffer('123');
|
<|file_name|>velocity.js<|end_file_name|><|fim▁begin|>/*! VelocityJS.org (1.2.1). (C) 2014 Julian Shapiro. MIT @license: en.wikipedia.org/wiki/MIT_License */
/*************************
Velocity jQuery Shim
*************************/
/*! VelocityJS.org jQuery Shim (1.0.1). (C) 2014 The jQuery Foundation. MIT @license: en.wikipedia.org/wiki/MIT_License. */
/* This file contains the jQuery functions that Velocity relies on, thereby removing Velocity's dependency on a full copy of jQuery, and allowing it to work in any environment. */
/* These shimmed functions are only used if jQuery isn't present. If both this shim and jQuery are loaded, Velocity defaults to jQuery proper. */
/* Browser support: Using this shim instead of jQuery proper removes support for IE8. */
;(function (window) {
/***************
Setup
***************/
/* If jQuery is already loaded, there's no point in loading this shim. */
if (window.jQuery) {
return;
}
/* jQuery base. */
var $ = function (selector, context) {
return new $.fn.init(selector, context);
};
/********************
Private Methods
********************/
/* jQuery */
$.isWindow = function (obj) {
/* jshint eqeqeq: false */
return obj != null && obj == obj.window;
};
/* jQuery */
$.type = function (obj) {
if (obj == null) {
return obj + "";
}
return typeof obj === "object" || typeof obj === "function" ?
class2type[toString.call(obj)] || "object" :
typeof obj;
};
/* jQuery */
$.isArray = Array.isArray || function (obj) {
return $.type(obj) === "array";
};
/* jQuery */
function isArraylike (obj) {
var length = obj.length,
type = $.type(obj);
if (type === "function" || $.isWindow(obj)) {
return false;
}
if (obj.nodeType === 1 && length) {
return true;
}
return type === "array" || length === 0 || typeof length === "number" && length > 0 && (length - 1) in obj;
}
/***************
$ Methods
***************/
/* jQuery: Support removed for IE<9. */
$.isPlainObject = function (obj) {
var key;
if (!obj || $.type(obj) !== "object" || obj.nodeType || $.isWindow(obj)) {
return false;
}
try {
if (obj.constructor &&
!hasOwn.call(obj, "constructor") &&
!hasOwn.call(obj.constructor.prototype, "isPrototypeOf")) {
return false;
}
} catch (e) {
return false;
}
for (key in obj) {}
return key === undefined || hasOwn.call(obj, key);
};
/* jQuery */
$.each = function(obj, callback, args) {
var value,
i = 0,
length = obj.length,
isArray = isArraylike(obj);
if (args) {
if (isArray) {
for (; i < length; i++) {
value = callback.apply(obj[i], args);
if (value === false) {
break;
}
}
} else {
for (i in obj) {
value = callback.apply(obj[i], args);
if (value === false) {
break;
}
}
}
} else {
if (isArray) {
for (; i < length; i++) {
value = callback.call(obj[i], i, obj[i]);
if (value === false) {
break;
}
}
} else {
for (i in obj) {
value = callback.call(obj[i], i, obj[i]);
if (value === false) {
break;
}
}
}
}
return obj;
};
/* Custom */
$.data = function (node, key, value) {
/* $.getData() */
if (value === undefined) {
var id = node[$.expando],
store = id && cache[id];
if (key === undefined) {
return store;
} else if (store) {
if (key in store) {
return store[key];
}
}
/* $.setData() */
} else if (key !== undefined) {
var id = node[$.expando] || (node[$.expando] = ++$.uuid);
cache[id] = cache[id] || {};
cache[id][key] = value;
return value;
}
};
/* Custom */
$.removeData = function (node, keys) {
var id = node[$.expando],
store = id && cache[id];
if (store) {
$.each(keys, function(_, key) {
delete store[key];
});
}
};
/* jQuery */
$.extend = function () {
var src, copyIsArray, copy, name, options, clone,
target = arguments[0] || {},
i = 1,
length = arguments.length,
deep = false;
if (typeof target === "boolean") {
deep = target;
target = arguments[i] || {};
i++;
}
if (typeof target !== "object" && $.type(target) !== "function") {
target = {};
}
if (i === length) {
target = this;
i--;
}
for (; i < length; i++) {
if ((options = arguments[i]) != null) {
for (name in options) {
src = target[name];
copy = options[name];
if (target === copy) {
continue;
}
if (deep && copy && ($.isPlainObject(copy) || (copyIsArray = $.isArray(copy)))) {
if (copyIsArray) {
copyIsArray = false;
clone = src && $.isArray(src) ? src : [];
} else {
clone = src && $.isPlainObject(src) ? src : {};
}
target[name] = $.extend(deep, clone, copy);
} else if (copy !== undefined) {
target[name] = copy;
}
}
}
}
return target;
};
/* jQuery 1.4.3 */
$.queue = function (elem, type, data) {
function $makeArray (arr, results) {
var ret = results || [];
if (arr != null) {
if (isArraylike(Object(arr))) {
/* $.merge */
(function(first, second) {
var len = +second.length,
j = 0,
i = first.length;
while (j < len) {
first[i++] = second[j++];
}
if (len !== len) {
while (second[j] !== undefined) {
first[i++] = second[j++];
}
}
first.length = i;
return first;
})(ret, typeof arr === "string" ? [arr] : arr);
} else {
[].push.call(ret, arr);
}
}
return ret;
}
if (!elem) {
return;
}
type = (type || "fx") + "queue";
var q = $.data(elem, type);
if (!data) {
return q || [];
}
if (!q || $.isArray(data)) {
q = $.data(elem, type, $makeArray(data));
} else {
q.push(data);
}
return q;
};
/* jQuery 1.4.3 */
$.dequeue = function (elems, type) {
/* Custom: Embed element iteration. */
$.each(elems.nodeType ? [ elems ] : elems, function(i, elem) {
type = type || "fx";
var queue = $.queue(elem, type),
fn = queue.shift();
if (fn === "inprogress") {
fn = queue.shift();
}
if (fn) {
if (type === "fx") {
queue.unshift("inprogress");
}
fn.call(elem, function() {
$.dequeue(elem, type);
});
}
});
};
/******************
$.fn Methods
******************/
/* jQuery */
$.fn = $.prototype = {
init: function (selector) {
/* Just return the element wrapped inside an array; don't proceed with the actual jQuery node wrapping process. */
if (selector.nodeType) {
this[0] = selector;
return this;
} else {
throw new Error("Not a DOM node.");
}
},
offset: function () {
/* jQuery altered code: Dropped disconnected DOM node checking. */
var box = this[0].getBoundingClientRect ? this[0].getBoundingClientRect() : { top: 0, left: 0 };
return {
top: box.top + (window.pageYOffset || document.scrollTop || 0) - (document.clientTop || 0),
left: box.left + (window.pageXOffset || document.scrollLeft || 0) - (document.clientLeft || 0)
};
},
position: function () {
/* jQuery */
function offsetParent() {
var offsetParent = this.offsetParent || document;
while (offsetParent && (!offsetParent.nodeType.toLowerCase === "html" && offsetParent.style.position === "static")) {
offsetParent = offsetParent.offsetParent;
}
return offsetParent || document;
}
/* Zepto */
var elem = this[0],
offsetParent = offsetParent.apply(elem),
offset = this.offset(),
parentOffset = /^(?:body|html)$/i.test(offsetParent.nodeName) ? { top: 0, left: 0 } : $(offsetParent).offset()
offset.top -= parseFloat(elem.style.marginTop) || 0;
offset.left -= parseFloat(elem.style.marginLeft) || 0;
if (offsetParent.style) {
parentOffset.top += parseFloat(offsetParent.style.borderTopWidth) || 0
parentOffset.left += parseFloat(offsetParent.style.borderLeftWidth) || 0
}
return {
top: offset.top - parentOffset.top,
left: offset.left - parentOffset.left
};
}
};
/**********************
Private Variables
**********************/
/* For $.data() */
var cache = {};
$.expando = "velocity" + (new Date().getTime());
$.uuid = 0;
/* For $.queue() */
var class2type = {},
hasOwn = class2type.hasOwnProperty,
toString = class2type.toString;
var types = "Boolean Number String Function Array Date RegExp Object Error".split(" ");
for (var i = 0; i < types.length; i++) {
class2type["[object " + types[i] + "]"] = types[i].toLowerCase();
}
/* Makes $(node) possible, without having to call init. */
$.fn.init.prototype = $.fn;
/* Globalize Velocity onto the window, and assign its Utilities property. */
window.Velocity = { Utilities: $ };
})(window);
/******************
Velocity.js
******************/
;(function (factory) {
/* CommonJS module. */
if (typeof module === "object" && typeof module.exports === "object") {
module.exports = factory();
/* AMD module. */
} else if (typeof define === "function" && define.amd) {
define(factory);
/* Browser globals. */
} else {
factory();
}
}(function() {
return function (global, window, document, undefined) {
/***************
Summary
***************/
/*
- CSS: CSS stack that works independently from the rest of Velocity.
- animate(): Core animation method that iterates over the targeted elements and queues the incoming call onto each element individually.
- Pre-Queueing: Prepare the element for animation by instantiating its data cache and processing the call's options.
- Queueing: The logic that runs once the call has reached its point of execution in the element's $.queue() stack.
Most logic is placed here to avoid risking it becoming stale (if the element's properties have changed).
- Pushing: Consolidation of the tween data followed by its push onto the global in-progress calls container.
- tick(): The single requestAnimationFrame loop responsible for tweening all in-progress calls.
- completeCall(): Handles the cleanup process for each Velocity call.
*/
/*********************
Helper Functions
*********************/
/* IE detection. Gist: https://gist.github.com/julianshapiro/9098609 */
var IE = (function() {
if (document.documentMode) {
return document.documentMode;
} else {
for (var i = 7; i > 4; i--) {
var div = document.createElement("div");
div.innerHTML = "<!--[if IE " + i + "]><span></span><![endif]-->";
if (div.getElementsByTagName("span").length) {
div = null;
return i;
}
}
}
return undefined;
})();
/* rAF shim. Gist: https://gist.github.com/julianshapiro/9497513 */
var rAFShim = (function() {
var timeLast = 0;
return window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || function(callback) {
var timeCurrent = (new Date()).getTime(),
timeDelta;
/* Dynamically set delay on a per-tick basis to match 60fps. */
/* Technique by Erik Moller. MIT license: https://gist.github.com/paulirish/1579671 */
timeDelta = Math.max(0, 16 - (timeCurrent - timeLast));
timeLast = timeCurrent + timeDelta;
return setTimeout(function() { callback(timeCurrent + timeDelta); }, timeDelta);
};
})();
/* Array compacting. Copyright Lo-Dash. MIT License: https://github.com/lodash/lodash/blob/master/LICENSE.txt */
function compactSparseArray (array) {
var index = -1,
length = array ? array.length : 0,
result = [];
while (++index < length) {
var value = array[index];
if (value) {
result.push(value);
}
}
return result;
}
function sanitizeElements (elements) {
/* Unwrap jQuery/Zepto objects. */
if (Type.isWrapped(elements)) {
elements = [].slice.call(elements);
/* Wrap a single element in an array so that $.each() can iterate with the element instead of its node's children. */
} else if (Type.isNode(elements)) {
elements = [ elements ];
}
return elements;
}
var Type = {
isString: function (variable) {
return (typeof variable === "string");
},
isArray: Array.isArray || function (variable) {
return Object.prototype.toString.call(variable) === "[object Array]";
},
isFunction: function (variable) {
return Object.prototype.toString.call(variable) === "[object Function]";
},
isNode: function (variable) {
return variable && variable.nodeType;
},
/* Copyright Martin Bohm. MIT License: https://gist.github.com/Tomalak/818a78a226a0738eaade */
isNodeList: function (variable) {
return typeof variable === "object" &&
/^\[object (HTMLCollection|NodeList|Object)\]$/.test(Object.prototype.toString.call(variable)) &&
variable.length !== undefined &&
(variable.length === 0 || (typeof variable[0] === "object" && variable[0].nodeType > 0));
},
/* Determine if variable is a wrapped jQuery or Zepto element. */
isWrapped: function (variable) {
return variable && (variable.jquery || (window.Zepto && window.Zepto.zepto.isZ(variable)));
},
isSVG: function (variable) {
return window.SVGElement && (variable instanceof window.SVGElement);
},
isEmptyObject: function (variable) {
for (var name in variable) {
return false;
}
return true;
}
};
/*****************
Dependencies
*****************/
var $,
isJQuery = false;
if (global.fn && global.fn.jquery) {
$ = global;
isJQuery = true;
} else {
$ = window.Velocity.Utilities;
}
if (IE <= 8 && !isJQuery) {
throw new Error("Velocity: IE8 and below require jQuery to be loaded before Velocity.");
} else if (IE <= 7) {
/* Revert to jQuery's $.animate(), and lose Velocity's extra features. */
jQuery.fn.velocity = jQuery.fn.animate;
/* Now that $.fn.velocity is aliased, abort this Velocity declaration. */
return;
}
/*****************
Constants
*****************/
var DURATION_DEFAULT = 400,
EASING_DEFAULT = "swing";
/*************
State
*************/
var Velocity = {
/* Container for page-wide Velocity state data. */
State: {
/* Detect mobile devices to determine if mobileHA should be turned on. */
isMobile: /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent),
/* The mobileHA option's behavior changes on older Android devices (Gingerbread, versions 2.3.3-2.3.7). */
isAndroid: /Android/i.test(navigator.userAgent),
isGingerbread: /Android 2\.3\.[3-7]/i.test(navigator.userAgent),
isChrome: window.chrome,
isFirefox: /Firefox/i.test(navigator.userAgent),
/* Create a cached element for re-use when checking for CSS property prefixes. */
prefixElement: document.createElement("div"),
/* Cache every prefix match to avoid repeating lookups. */
prefixMatches: {},
/* Cache the anchor used for animating window scrolling. */
scrollAnchor: null,
/* Cache the browser-specific property names associated with the scroll anchor. */
scrollPropertyLeft: null,
scrollPropertyTop: null,
/* Keep track of whether our RAF tick is running. */
isTicking: false,
/* Container for every in-progress call to Velocity. */
calls: []
},
/* Velocity's custom CSS stack. Made global for unit testing. */
CSS: { /* Defined below. */ },
/* A shim of the jQuery utility functions used by Velocity -- provided by Velocity's optional jQuery shim. */
Utilities: $,
/* Container for the user's custom animation redirects that are referenced by name in place of the properties map argument. */
Redirects: { /* Manually registered by the user. */ },
Easings: { /* Defined below. */ },
/* Attempt to use ES6 Promises by default. Users can override this with a third-party promises library. */
Promise: window.Promise,
/* Velocity option defaults, which can be overriden by the user. */
defaults: {
queue: "",
duration: DURATION_DEFAULT,
easing: EASING_DEFAULT,
begin: undefined,
complete: undefined,
progress: undefined,
display: undefined,
visibility: undefined,
loop: false,
delay: false,
mobileHA: true,
/* Advanced: Set to false to prevent property values from being cached between consecutive Velocity-initiated chain calls. */
_cacheValues: true
},
/* A design goal of Velocity is to cache data wherever possible in order to avoid DOM requerying. Accordingly, each element has a data cache. */
init: function (element) {
$.data(element, "velocity", {
/* Store whether this is an SVG element, since its properties are retrieved and updated differently than standard HTML elements. */
isSVG: Type.isSVG(element),
/* Keep track of whether the element is currently being animated by Velocity.
This is used to ensure that property values are not transferred between non-consecutive (stale) calls. */
isAnimating: false,
/* A reference to the element's live computedStyle object. Learn more here: https://developer.mozilla.org/en/docs/Web/API/window.getComputedStyle */
computedStyle: null,
/* Tween data is cached for each animation on the element so that data can be passed across calls --
in particular, end values are used as subsequent start values in consecutive Velocity calls. */
tweensContainer: null,
/* The full root property values of each CSS hook being animated on this element are cached so that:
1) Concurrently-animating hooks sharing the same root can have their root values' merged into one while tweening.
2) Post-hook-injection root values can be transferred over to consecutively chained Velocity calls as starting root values. */
rootPropertyValueCache: {},
/* A cache for transform updates, which must be manually flushed via CSS.flushTransformCache(). */
transformCache: {}
});
},
/* A parallel to jQuery's $.css(), used for getting/setting Velocity's hooked CSS properties. */
hook: null, /* Defined below. */
/* Velocity-wide animation time remapping for testing purposes. */
mock: false,
version: { major: 1, minor: 2, patch: 1 },
/* Set to 1 or 2 (most verbose) to output debug info to console. */
debug: false
};
/* Retrieve the appropriate scroll anchor and property name for the browser: https://developer.mozilla.org/en-US/docs/Web/API/Window.scrollY */
if (window.pageYOffset !== undefined) {
Velocity.State.scrollAnchor = window;
Velocity.State.scrollPropertyLeft = "pageXOffset";
Velocity.State.scrollPropertyTop = "pageYOffset";
} else {
Velocity.State.scrollAnchor = document.documentElement || document.body.parentNode || document.body;
Velocity.State.scrollPropertyLeft = "scrollLeft";
Velocity.State.scrollPropertyTop = "scrollTop";
}
/* Shorthand alias for jQuery's $.data() utility. */
function Data (element) {
/* Hardcode a reference to the plugin name. */
var response = $.data(element, "velocity");
/* jQuery <=1.4.2 returns null instead of undefined when no match is found. We normalize this behavior. */
return response === null ? undefined : response;
};
/**************
Easing
**************/
/* Step easing generator. */
function generateStep (steps) {
return function (p) {
return Math.round(p * steps) * (1 / steps);
};
}
/* Bezier curve function generator. Copyright Gaetan Renaudeau. MIT License: http://en.wikipedia.org/wiki/MIT_License */
function generateBezier (mX1, mY1, mX2, mY2) {
var NEWTON_ITERATIONS = 4,
NEWTON_MIN_SLOPE = 0.001,
SUBDIVISION_PRECISION = 0.0000001,
SUBDIVISION_MAX_ITERATIONS = 10,
kSplineTableSize = 11,
kSampleStepSize = 1.0 / (kSplineTableSize - 1.0),
float32ArraySupported = "Float32Array" in window;
/* Must contain four arguments. */
if (arguments.length !== 4) {
return false;
}
/* Arguments must be numbers. */
for (var i = 0; i < 4; ++i) {
if (typeof arguments[i] !== "number" || isNaN(arguments[i]) || !isFinite(arguments[i])) {
return false;
}
}
/* X values must be in the [0, 1] range. */
mX1 = Math.min(mX1, 1);
mX2 = Math.min(mX2, 1);
mX1 = Math.max(mX1, 0);
mX2 = Math.max(mX2, 0);
var mSampleValues = float32ArraySupported ? new Float32Array(kSplineTableSize) : new Array(kSplineTableSize);
function A (aA1, aA2) { return 1.0 - 3.0 * aA2 + 3.0 * aA1; }
function B (aA1, aA2) { return 3.0 * aA2 - 6.0 * aA1; }
function C (aA1) { return 3.0 * aA1; }
function calcBezier (aT, aA1, aA2) {
return ((A(aA1, aA2)*aT + B(aA1, aA2))*aT + C(aA1))*aT;
}
function getSlope (aT, aA1, aA2) {
return 3.0 * A(aA1, aA2)*aT*aT + 2.0 * B(aA1, aA2) * aT + C(aA1);
}
function newtonRaphsonIterate (aX, aGuessT) {
for (var i = 0; i < NEWTON_ITERATIONS; ++i) {
var currentSlope = getSlope(aGuessT, mX1, mX2);
if (currentSlope === 0.0) return aGuessT;
var currentX = calcBezier(aGuessT, mX1, mX2) - aX;
aGuessT -= currentX / currentSlope;
}
return aGuessT;
}
function calcSampleValues () {
for (var i = 0; i < kSplineTableSize; ++i) {
mSampleValues[i] = calcBezier(i * kSampleStepSize, mX1, mX2);
}
}
function binarySubdivide (aX, aA, aB) {
var currentX, currentT, i = 0;
do {
currentT = aA + (aB - aA) / 2.0;
currentX = calcBezier(currentT, mX1, mX2) - aX;
if (currentX > 0.0) {
aB = currentT;
} else {
aA = currentT;
}
} while (Math.abs(currentX) > SUBDIVISION_PRECISION && ++i < SUBDIVISION_MAX_ITERATIONS);
return currentT;
}
function getTForX (aX) {
var intervalStart = 0.0,
currentSample = 1,
lastSample = kSplineTableSize - 1;
for (; currentSample != lastSample && mSampleValues[currentSample] <= aX; ++currentSample) {
intervalStart += kSampleStepSize;
}
--currentSample;
var dist = (aX - mSampleValues[currentSample]) / (mSampleValues[currentSample+1] - mSampleValues[currentSample]),
guessForT = intervalStart + dist * kSampleStepSize,
initialSlope = getSlope(guessForT, mX1, mX2);
if (initialSlope >= NEWTON_MIN_SLOPE) {
return newtonRaphsonIterate(aX, guessForT);
} else if (initialSlope == 0.0) {
return guessForT;
} else {
return binarySubdivide(aX, intervalStart, intervalStart + kSampleStepSize);
}
}
var _precomputed = false;
function precompute() {
_precomputed = true;
if (mX1 != mY1 || mX2 != mY2) calcSampleValues();
}
var f = function (aX) {
if (!_precomputed) precompute();
if (mX1 === mY1 && mX2 === mY2) return aX;
if (aX === 0) return 0;
if (aX === 1) return 1;
return calcBezier(getTForX(aX), mY1, mY2);
};
f.getControlPoints = function() { return [{ x: mX1, y: mY1 }, { x: mX2, y: mY2 }]; };
var str = "generateBezier(" + [mX1, mY1, mX2, mY2] + ")";
f.toString = function () { return str; };
return f;
}
/* Runge-Kutta spring physics function generator. Adapted from Framer.js, copyright Koen Bok. MIT License: http://en.wikipedia.org/wiki/MIT_License */
/* Given a tension, friction, and duration, a simulation at 60FPS will first run without a defined duration in order to calculate the full path. A second pass
then adjusts the time delta -- using the relation between actual time and duration -- to calculate the path for the duration-constrained animation. */
var generateSpringRK4 = (function () {
function springAccelerationForState (state) {
return (-state.tension * state.x) - (state.friction * state.v);
}
function springEvaluateStateWithDerivative (initialState, dt, derivative) {
var state = {
x: initialState.x + derivative.dx * dt,
v: initialState.v + derivative.dv * dt,
tension: initialState.tension,
friction: initialState.friction
};
return { dx: state.v, dv: springAccelerationForState(state) };
}
function springIntegrateState (state, dt) {
var a = {
dx: state.v,
dv: springAccelerationForState(state)
},
b = springEvaluateStateWithDerivative(state, dt * 0.5, a),
c = springEvaluateStateWithDerivative(state, dt * 0.5, b),
d = springEvaluateStateWithDerivative(state, dt, c),
dxdt = 1.0 / 6.0 * (a.dx + 2.0 * (b.dx + c.dx) + d.dx),
dvdt = 1.0 / 6.0 * (a.dv + 2.0 * (b.dv + c.dv) + d.dv);
state.x = state.x + dxdt * dt;
state.v = state.v + dvdt * dt;
return state;
}
return function springRK4Factory (tension, friction, duration) {
var initState = {
x: -1,
v: 0,
tension: null,
friction: null
},
path = [0],
time_lapsed = 0,
tolerance = 1 / 10000,
DT = 16 / 1000,
have_duration, dt, last_state;
tension = parseFloat(tension) || 500;
friction = parseFloat(friction) || 20;
duration = duration || null;
initState.tension = tension;
initState.friction = friction;
have_duration = duration !== null;
/* Calculate the actual time it takes for this animation to complete with the provided conditions. */
if (have_duration) {
/* Run the simulation without a duration. */
time_lapsed = springRK4Factory(tension, friction);
/* Compute the adjusted time delta. */
dt = time_lapsed / duration * DT;
} else {
dt = DT;
}
while (true) {
/* Next/step function .*/
last_state = springIntegrateState(last_state || initState, dt);
/* Store the position. */
path.push(1 + last_state.x);
time_lapsed += 16;
/* If the change threshold is reached, break. */
if (!(Math.abs(last_state.x) > tolerance && Math.abs(last_state.v) > tolerance)) {
break;
}
}
/* If duration is not defined, return the actual time required for completing this animation. Otherwise, return a closure that holds the
computed path and returns a snapshot of the position according to a given percentComplete. */
return !have_duration ? time_lapsed : function(percentComplete) { return path[ (percentComplete * (path.length - 1)) | 0 ]; };
};
}());
/* jQuery easings. */
Velocity.Easings = {
linear: function(p) { return p; },
swing: function(p) { return 0.5 - Math.cos( p * Math.PI ) / 2 },
/* Bonus "spring" easing, which is a less exaggerated version of easeInOutElastic. */
spring: function(p) { return 1 - (Math.cos(p * 4.5 * Math.PI) * Math.exp(-p * 6)); }
};
/* CSS3 and Robert Penner easings. */
$.each(
[
[ "ease", [ 0.25, 0.1, 0.25, 1.0 ] ],
[ "ease-in", [ 0.42, 0.0, 1.00, 1.0 ] ],
[ "ease-out", [ 0.00, 0.0, 0.58, 1.0 ] ],
[ "ease-in-out", [ 0.42, 0.0, 0.58, 1.0 ] ],
[ "easeInSine", [ 0.47, 0, 0.745, 0.715 ] ],
[ "easeOutSine", [ 0.39, 0.575, 0.565, 1 ] ],
[ "easeInOutSine", [ 0.445, 0.05, 0.55, 0.95 ] ],
[ "easeInQuad", [ 0.55, 0.085, 0.68, 0.53 ] ],
[ "easeOutQuad", [ 0.25, 0.46, 0.45, 0.94 ] ],
[ "easeInOutQuad", [ 0.455, 0.03, 0.515, 0.955 ] ],
[ "easeInCubic", [ 0.55, 0.055, 0.675, 0.19 ] ],
[ "easeOutCubic", [ 0.215, 0.61, 0.355, 1 ] ],
[ "easeInOutCubic", [ 0.645, 0.045, 0.355, 1 ] ],
[ "easeInQuart", [ 0.895, 0.03, 0.685, 0.22 ] ],
[ "easeOutQuart", [ 0.165, 0.84, 0.44, 1 ] ],
[ "easeInOutQuart", [ 0.77, 0, 0.175, 1 ] ],
[ "easeInQuint", [ 0.755, 0.05, 0.855, 0.06 ] ],
[ "easeOutQuint", [ 0.23, 1, 0.32, 1 ] ],
[ "easeInOutQuint", [ 0.86, 0, 0.07, 1 ] ],
[ "easeInExpo", [ 0.95, 0.05, 0.795, 0.035 ] ],
[ "easeOutExpo", [ 0.19, 1, 0.22, 1 ] ],
[ "easeInOutExpo", [ 1, 0, 0, 1 ] ],
[ "easeInCirc", [ 0.6, 0.04, 0.98, 0.335 ] ],
[ "easeOutCirc", [ 0.075, 0.82, 0.165, 1 ] ],
[ "easeInOutCirc", [ 0.785, 0.135, 0.15, 0.86 ] ]
], function(i, easingArray) {
Velocity.Easings[easingArray[0]] = generateBezier.apply(null, easingArray[1]);
});
/* Determine the appropriate easing type given an easing input. */
function getEasing(value, duration) {
var easing = value;
/* The easing option can either be a string that references a pre-registered easing,
or it can be a two-/four-item array of integers to be converted into a bezier/spring function. */
if (Type.isString(value)) {
/* Ensure that the easing has been assigned to jQuery's Velocity.Easings object. */
if (!Velocity.Easings[value]) {
easing = false;
}
} else if (Type.isArray(value) && value.length === 1) {
easing = generateStep.apply(null, value);
} else if (Type.isArray(value) && value.length === 2) {
/* springRK4 must be passed the animation's duration. */
/* Note: If the springRK4 array contains non-numbers, generateSpringRK4() returns an easing
function generated with default tension and friction values. */
easing = generateSpringRK4.apply(null, value.concat([ duration ]));
} else if (Type.isArray(value) && value.length === 4) {
/* Note: If the bezier array contains non-numbers, generateBezier() returns false. */
easing = generateBezier.apply(null, value);
} else {
easing = false;
}
/* Revert to the Velocity-wide default easing type, or fall back to "swing" (which is also jQuery's default)
if the Velocity-wide default has been incorrectly modified. */
if (easing === false) {
if (Velocity.Easings[Velocity.defaults.easing]) {
easing = Velocity.defaults.easing;
} else {
easing = EASING_DEFAULT;
}
}
return easing;
}
/*****************
CSS Stack
*****************/
/* The CSS object is a highly condensed and performant CSS stack that fully replaces jQuery's.
It handles the validation, getting, and setting of both standard CSS properties and CSS property hooks. */
/* Note: A "CSS" shorthand is aliased so that our code is easier to read. */
var CSS = Velocity.CSS = {
/*************
RegEx
*************/
RegEx: {
isHex: /^#([A-f\d]{3}){1,2}$/i,
/* Unwrap a property value's surrounding text, e.g. "rgba(4, 3, 2, 1)" ==> "4, 3, 2, 1" and "rect(4px 3px 2px 1px)" ==> "4px 3px 2px 1px". */
valueUnwrap: /^[A-z]+\((.*)\)$/i,
wrappedValueAlreadyExtracted: /[0-9.]+ [0-9.]+ [0-9.]+( [0-9.]+)?/,
/* Split a multi-value property into an array of subvalues, e.g. "rgba(4, 3, 2, 1) 4px 3px 2px 1px" ==> [ "rgba(4, 3, 2, 1)", "4px", "3px", "2px", "1px" ]. */
valueSplit: /([A-z]+\(.+\))|(([A-z0-9#-.]+?)(?=\s|$))/ig
},
/************
Lists
************/
Lists: {
colors: [ "fill", "stroke", "stopColor", "color", "backgroundColor", "borderColor", "borderTopColor", "borderRightColor", "borderBottomColor", "borderLeftColor", "outlineColor" ],
transformsBase: [ "translateX", "translateY", "scale", "scaleX", "scaleY", "skewX", "skewY", "rotateZ" ],
transforms3D: [ "transformPerspective", "translateZ", "scaleZ", "rotateX", "rotateY" ]
},
/************
Hooks
************/
/* Hooks allow a subproperty (e.g. "boxShadowBlur") of a compound-value CSS property
(e.g. "boxShadow: X Y Blur Spread Color") to be animated as if it were a discrete property. */
/* Note: Beyond enabling fine-grained property animation, hooking is necessary since Velocity only
tweens properties with single numeric values; unlike CSS transitions, Velocity does not interpolate compound-values. */
Hooks: {
/********************
Registration
********************/
/* Templates are a concise way of indicating which subproperties must be individually registered for each compound-value CSS property. */
/* Each template consists of the compound-value's base name, its constituent subproperty names, and those subproperties' default values. */
templates: {
"textShadow": [ "Color X Y Blur", "black 0px 0px 0px" ],
"boxShadow": [ "Color X Y Blur Spread", "black 0px 0px 0px 0px" ],
"clip": [ "Top Right Bottom Left", "0px 0px 0px 0px" ],
"backgroundPosition": [ "X Y", "0% 0%" ],
"transformOrigin": [ "X Y Z", "50% 50% 0px" ],
"perspectiveOrigin": [ "X Y", "50% 50%" ]
},
/* A "registered" hook is one that has been converted from its template form into a live,
tweenable property. It contains data to associate it with its root property. */
registered: {
/* Note: A registered hook looks like this ==> textShadowBlur: [ "textShadow", 3 ],
which consists of the subproperty's name, the associated root property's name,
and the subproperty's position in the root's value. */
},
/* Convert the templates into individual hooks then append them to the registered object above. */
register: function () {
/* Color hooks registration: Colors are defaulted to white -- as opposed to black -- since colors that are
currently set to "transparent" default to their respective template below when color-animated,
and white is typically a closer match to transparent than black is. An exception is made for text ("color"),
which is almost always set closer to black than white. */
for (var i = 0; i < CSS.Lists.colors.length; i++) {
var rgbComponents = (CSS.Lists.colors[i] === "color") ? "0 0 0 1" : "255 255 255 1";
CSS.Hooks.templates[CSS.Lists.colors[i]] = [ "Red Green Blue Alpha", rgbComponents ];
}
var rootProperty,
hookTemplate,
hookNames;
/* In IE, color values inside compound-value properties are positioned at the end the value instead of at the beginning.
Thus, we re-arrange the templates accordingly. */
if (IE) {
for (rootProperty in CSS.Hooks.templates) {
hookTemplate = CSS.Hooks.templates[rootProperty];
hookNames = hookTemplate[0].split(" ");
var defaultValues = hookTemplate[1].match(CSS.RegEx.valueSplit);
if (hookNames[0] === "Color") {
/* Reposition both the hook's name and its default value to the end of their respective strings. */
hookNames.push(hookNames.shift());
defaultValues.push(defaultValues.shift());
/* Replace the existing template for the hook's root property. */
CSS.Hooks.templates[rootProperty] = [ hookNames.join(" "), defaultValues.join(" ") ];
}
}
}
/* Hook registration. */
for (rootProperty in CSS.Hooks.templates) {
hookTemplate = CSS.Hooks.templates[rootProperty];
hookNames = hookTemplate[0].split(" ");
for (var i in hookNames) {
var fullHookName = rootProperty + hookNames[i],
hookPosition = i;
/* For each hook, register its full name (e.g. textShadowBlur) with its root property (e.g. textShadow)
and the hook's position in its template's default value string. */
CSS.Hooks.registered[fullHookName] = [ rootProperty, hookPosition ];
}
}
},
/*****************************
Injection and Extraction
*****************************/
/* Look up the root property associated with the hook (e.g. return "textShadow" for "textShadowBlur"). */
/* Since a hook cannot be set directly (the browser won't recognize it), style updating for hooks is routed through the hook's root property. */
getRoot: function (property) {
var hookData = CSS.Hooks.registered[property];
if (hookData) {
return hookData[0];
} else {
/* If there was no hook match, return the property name untouched. */
return property;
}
},
/* Convert any rootPropertyValue, null or otherwise, into a space-delimited list of hook values so that
the targeted hook can be injected or extracted at its standard position. */
cleanRootPropertyValue: function(rootProperty, rootPropertyValue) {
/* If the rootPropertyValue is wrapped with "rgb()", "clip()", etc., remove the wrapping to normalize the value before manipulation. */
if (CSS.RegEx.valueUnwrap.test(rootPropertyValue)) {
rootPropertyValue = rootPropertyValue.match(CSS.RegEx.valueUnwrap)[1];
}
/* If rootPropertyValue is a CSS null-value (from which there's inherently no hook value to extract),
default to the root's default value as defined in CSS.Hooks.templates. */
/* Note: CSS null-values include "none", "auto", and "transparent". They must be converted into their
zero-values (e.g. textShadow: "none" ==> textShadow: "0px 0px 0px black") for hook manipulation to proceed. */
if (CSS.Values.isCSSNullValue(rootPropertyValue)) {
rootPropertyValue = CSS.Hooks.templates[rootProperty][1];
}
return rootPropertyValue;
},
/* Extracted the hook's value from its root property's value. This is used to get the starting value of an animating hook. */
extractValue: function (fullHookName, rootPropertyValue) {
var hookData = CSS.Hooks.registered[fullHookName];
if (hookData) {
var hookRoot = hookData[0],
hookPosition = hookData[1];
rootPropertyValue = CSS.Hooks.cleanRootPropertyValue(hookRoot, rootPropertyValue);
/* Split rootPropertyValue into its constituent hook values then grab the desired hook at its standard position. */
return rootPropertyValue.toString().match(CSS.RegEx.valueSplit)[hookPosition];
} else {
/* If the provided fullHookName isn't a registered hook, return the rootPropertyValue that was passed in. */
return rootPropertyValue;
}
},
/* Inject the hook's value into its root property's value. This is used to piece back together the root property
once Velocity has updated one of its individually hooked values through tweening. */
injectValue: function (fullHookName, hookValue, rootPropertyValue) {
var hookData = CSS.Hooks.registered[fullHookName];
if (hookData) {
var hookRoot = hookData[0],
hookPosition = hookData[1],
rootPropertyValueParts,
rootPropertyValueUpdated;
rootPropertyValue = CSS.Hooks.cleanRootPropertyValue(hookRoot, rootPropertyValue);
/* Split rootPropertyValue into its individual hook values, replace the targeted value with hookValue,
then reconstruct the rootPropertyValue string. */
rootPropertyValueParts = rootPropertyValue.toString().match(CSS.RegEx.valueSplit);
rootPropertyValueParts[hookPosition] = hookValue;
rootPropertyValueUpdated = rootPropertyValueParts.join(" ");
return rootPropertyValueUpdated;
} else {
/* If the provided fullHookName isn't a registered hook, return the rootPropertyValue that was passed in. */
return rootPropertyValue;
}
}
},
/*******************
Normalizations
*******************/
/* Normalizations standardize CSS property manipulation by pollyfilling browser-specific implementations (e.g. opacity)
and reformatting special properties (e.g. clip, rgba) to look like standard ones. */
Normalizations: {
/* Normalizations are passed a normalization target (either the property's name, its extracted value, or its injected value),
the targeted element (which may need to be queried), and the targeted property value. */
registered: {
clip: function (type, element, propertyValue) {
switch (type) {
case "name":
return "clip";
/* Clip needs to be unwrapped and stripped of its commas during extraction. */
case "extract":
var extracted;
/* If Velocity also extracted this value, skip extraction. */
if (CSS.RegEx.wrappedValueAlreadyExtracted.test(propertyValue)) {
extracted = propertyValue;
} else {
/* Remove the "rect()" wrapper. */
extracted = propertyValue.toString().match(CSS.RegEx.valueUnwrap);
/* Strip off commas. */
extracted = extracted ? extracted[1].replace(/,(\s+)?/g, " ") : propertyValue;
}
return extracted;
/* Clip needs to be re-wrapped during injection. */
case "inject":
return "rect(" + propertyValue + ")";
}
},
blur: function(type, element, propertyValue) {
switch (type) {
case "name":
return Velocity.State.isFirefox ? "filter" : "-webkit-filter";
case "extract":
var extracted = parseFloat(propertyValue);
/* If extracted is NaN, meaning the value isn't already extracted. */
if (!(extracted || extracted === 0)) {
var blurComponent = propertyValue.toString().match(/blur\(([0-9]+[A-z]+)\)/i);
/* If the filter string had a blur component, return just the blur value and unit type. */
if (blurComponent) {
extracted = blurComponent[1];
/* If the component doesn't exist, default blur to 0. */
} else {
extracted = 0;
}
}
return extracted;
/* Blur needs to be re-wrapped during injection. */
case "inject":
/* For the blur effect to be fully de-applied, it needs to be set to "none" instead of 0. */
if (!parseFloat(propertyValue)) {
return "none";
} else {
return "blur(" + propertyValue + ")";
}
}
},
/* <=IE8 do not support the standard opacity property. They use filter:alpha(opacity=INT) instead. */
opacity: function (type, element, propertyValue) {
if (IE <= 8) {
switch (type) {
case "name":
return "filter";
case "extract":
/* <=IE8 return a "filter" value of "alpha(opacity=\d{1,3})".
Extract the value and convert it to a decimal value to match the standard CSS opacity property's formatting. */
var extracted = propertyValue.toString().match(/alpha\(opacity=(.*)\)/i);
if (extracted) {
/* Convert to decimal value. */
propertyValue = extracted[1] / 100;
} else {
/* When extracting opacity, default to 1 since a null value means opacity hasn't been set. */
propertyValue = 1;
}
return propertyValue;
case "inject":
/* Opacified elements are required to have their zoom property set to a non-zero value. */
element.style.zoom = 1;
/* Setting the filter property on elements with certain font property combinations can result in a
highly unappealing ultra-bolding effect. There's no way to remedy this throughout a tween, but dropping the
value altogether (when opacity hits 1) at leasts ensures that the glitch is gone post-tweening. */
if (parseFloat(propertyValue) >= 1) {
return "";
} else {
/* As per the filter property's spec, convert the decimal value to a whole number and wrap the value. */
return "alpha(opacity=" + parseInt(parseFloat(propertyValue) * 100, 10) + ")";
}
}
/* With all other browsers, normalization is not required; return the same values that were passed in. */
} else {
switch (type) {
case "name":
return "opacity";
case "extract":
return propertyValue;
case "inject":
return propertyValue;
}
}
}
},
/*****************************
Batched Registrations
*****************************/
/* Note: Batched normalizations extend the CSS.Normalizations.registered object. */
register: function () {
/*****************
Transforms
*****************/
/* Transforms are the subproperties contained by the CSS "transform" property. Transforms must undergo normalization
so that they can be referenced in a properties map by their individual names. */
/* Note: When transforms are "set", they are actually assigned to a per-element transformCache. When all transform
setting is complete complete, CSS.flushTransformCache() must be manually called to flush the values to the DOM.
Transform setting is batched in this way to improve performance: the transform style only needs to be updated
once when multiple transform subproperties are being animated simultaneously. */
/* Note: IE9 and Android Gingerbread have support for 2D -- but not 3D -- transforms. Since animating unsupported
transform properties results in the browser ignoring the *entire* transform string, we prevent these 3D values
from being normalized for these browsers so that tweening skips these properties altogether
(since it will ignore them as being unsupported by the browser.) */
if (!(IE <= 9) && !Velocity.State.isGingerbread) {
/* Note: Since the standalone CSS "perspective" property and the CSS transform "perspective" subproperty
share the same name, the latter is given a unique token within Velocity: "transformPerspective". */
CSS.Lists.transformsBase = CSS.Lists.transformsBase.concat(CSS.Lists.transforms3D);
}
for (var i = 0; i < CSS.Lists.transformsBase.length; i++) {
/* Wrap the dynamically generated normalization function in a new scope so that transformName's value is
paired with its respective function. (Otherwise, all functions would take the final for loop's transformName.) */
(function() {
var transformName = CSS.Lists.transformsBase[i];
CSS.Normalizations.registered[transformName] = function (type, element, propertyValue) {
switch (type) {
/* The normalized property name is the parent "transform" property -- the property that is actually set in CSS. */
case "name":
return "transform";
/* Transform values are cached onto a per-element transformCache object. */
case "extract":
/* If this transform has yet to be assigned a value, return its null value. */
if (Data(element) === undefined || Data(element).transformCache[transformName] === undefined) {
/* Scale CSS.Lists.transformsBase default to 1 whereas all other transform properties default to 0. */
return /^scale/i.test(transformName) ? 1 : 0;
/* When transform values are set, they are wrapped in parentheses as per the CSS spec.
Thus, when extracting their values (for tween calculations), we strip off the parentheses. */
} else {
return Data(element).transformCache[transformName].replace(/[()]/g, "");
}
case "inject":
var invalid = false;
/* If an individual transform property contains an unsupported unit type, the browser ignores the *entire* transform property.
Thus, protect users from themselves by skipping setting for transform values supplied with invalid unit types. */
/* Switch on the base transform type; ignore the axis by removing the last letter from the transform's name. */
switch (transformName.substr(0, transformName.length - 1)) {
/* Whitelist unit types for each transform. */
case "translate":
invalid = !/(%|px|em|rem|vw|vh|\d)$/i.test(propertyValue);
break;
/* Since an axis-free "scale" property is supported as well, a little hack is used here to detect it by chopping off its last letter. */
case "scal":
case "scale":
/* Chrome on Android has a bug in which scaled elements blur if their initial scale
value is below 1 (which can happen with forcefeeding). Thus, we detect a yet-unset scale property
and ensure that its first value is always 1. More info: http://stackoverflow.com/questions/10417890/css3-animations-with-transform-causes-blurred-elements-on-webkit/10417962#10417962 */
if (Velocity.State.isAndroid && Data(element).transformCache[transformName] === undefined && propertyValue < 1) {
propertyValue = 1;
}
invalid = !/(\d)$/i.test(propertyValue);
break;
case "skew":
invalid = !/(deg|\d)$/i.test(propertyValue);
break;
case "rotate":
invalid = !/(deg|\d)$/i.test(propertyValue);
break;
}
if (!invalid) {
/* As per the CSS spec, wrap the value in parentheses. */
Data(element).transformCache[transformName] = "(" + propertyValue + ")";
}
/* Although the value is set on the transformCache object, return the newly-updated value for the calling code to process as normal. */
return Data(element).transformCache[transformName];
}
};
})();
}
/*************
Colors
*************/
/* Since Velocity only animates a single numeric value per property, color animation is achieved by hooking the individual RGBA components of CSS color properties.
Accordingly, color values must be normalized (e.g. "#ff0000", "red", and "rgb(255, 0, 0)" ==> "255 0 0 1") so that their components can be injected/extracted by CSS.Hooks logic. */
for (var i = 0; i < CSS.Lists.colors.length; i++) {
/* Wrap the dynamically generated normalization function in a new scope so that colorName's value is paired with its respective function.
(Otherwise, all functions would take the final for loop's colorName.) */
(function () {
var colorName = CSS.Lists.colors[i];
/* Note: In IE<=8, which support rgb but not rgba, color properties are reverted to rgb by stripping off the alpha component. */
CSS.Normalizations.registered[colorName] = function(type, element, propertyValue) {
switch (type) {
case "name":
return colorName;
/* Convert all color values into the rgb format. (Old IE can return hex values and color names instead of rgb/rgba.) */
case "extract":
var extracted;
/* If the color is already in its hookable form (e.g. "255 255 255 1") due to having been previously extracted, skip extraction. */
if (CSS.RegEx.wrappedValueAlreadyExtracted.test(propertyValue)) {
extracted = propertyValue;
} else {
var converted,
colorNames = {
black: "rgb(0, 0, 0)",
blue: "rgb(0, 0, 255)",
gray: "rgb(128, 128, 128)",
green: "rgb(0, 128, 0)",
red: "rgb(255, 0, 0)",
white: "rgb(255, 255, 255)"
};
/* Convert color names to rgb. */
if (/^[A-z]+$/i.test(propertyValue)) {
if (colorNames[propertyValue] !== undefined) {
converted = colorNames[propertyValue]
} else {
/* If an unmatched color name is provided, default to black. */
converted = colorNames.black;
}
/* Convert hex values to rgb. */
} else if (CSS.RegEx.isHex.test(propertyValue)) {
converted = "rgb(" + CSS.Values.hexToRgb(propertyValue).join(" ") + ")";
/* If the provided color doesn't match any of the accepted color formats, default to black. */
} else if (!(/^rgba?\(/i.test(propertyValue))) {
converted = colorNames.black;
}
/* Remove the surrounding "rgb/rgba()" string then replace commas with spaces and strip
repeated spaces (in case the value included spaces to begin with). */
extracted = (converted || propertyValue).toString().match(CSS.RegEx.valueUnwrap)[1].replace(/,(\s+)?/g, " ");
}
/* So long as this isn't <=IE8, add a fourth (alpha) component if it's missing and default it to 1 (visible). */
if (!(IE <= 8) && extracted.split(" ").length === 3) {
extracted += " 1";
}
return extracted;
case "inject":
/* If this is IE<=8 and an alpha component exists, strip it off. */
if (IE <= 8) {
if (propertyValue.split(" ").length === 4) {
propertyValue = propertyValue.split(/\s+/).slice(0, 3).join(" ");
}
/* Otherwise, add a fourth (alpha) component if it's missing and default it to 1 (visible). */
} else if (propertyValue.split(" ").length === 3) {
propertyValue += " 1";
}
/* Re-insert the browser-appropriate wrapper("rgb/rgba()"), insert commas, and strip off decimal units
on all values but the fourth (R, G, and B only accept whole numbers). */
return (IE <= 8 ? "rgb" : "rgba") + "(" + propertyValue.replace(/\s+/g, ",").replace(/\.(\d)+(?=,)/g, "") + ")";
}
};
})();
}
}
},
/************************
CSS Property Names
************************/
Names: {
/* Camelcase a property name into its JavaScript notation (e.g. "background-color" ==> "backgroundColor").
Camelcasing is used to normalize property names between and across calls. */
camelCase: function (property) {
return property.replace(/-(\w)/g, function (match, subMatch) {
return subMatch.toUpperCase();
});
},
/* For SVG elements, some properties (namely, dimensional ones) are GET/SET via the element's HTML attributes (instead of via CSS styles). */
SVGAttribute: function (property) {
var SVGAttributes = "width|height|x|y|cx|cy|r|rx|ry|x1|x2|y1|y2";
/* Certain browsers require an SVG transform to be applied as an attribute. (Otherwise, application via CSS is preferable due to 3D support.) */
if (IE || (Velocity.State.isAndroid && !Velocity.State.isChrome)) {
SVGAttributes += "|transform";
}
return new RegExp("^(" + SVGAttributes + ")$", "i").test(property);
},
/* Determine whether a property should be set with a vendor prefix. */
/* If a prefixed version of the property exists, return it. Otherwise, return the original property name.
If the property is not at all supported by the browser, return a false flag. */
prefixCheck: function (property) {
/* If this property has already been checked, return the cached value. */
if (Velocity.State.prefixMatches[property]) {
return [ Velocity.State.prefixMatches[property], true ];
} else {
var vendors = [ "", "Webkit", "Moz", "ms", "O" ];
for (var i = 0, vendorsLength = vendors.length; i < vendorsLength; i++) {
var propertyPrefixed;
if (i === 0) {
propertyPrefixed = property;
} else {
/* Capitalize the first letter of the property to conform to JavaScript vendor prefix notation (e.g. webkitFilter). */
propertyPrefixed = vendors[i] + property.replace(/^\w/, function(match) { return match.toUpperCase(); });
}
/* Check if the browser supports this property as prefixed. */
if (Type.isString(Velocity.State.prefixElement.style[propertyPrefixed])) {
/* Cache the match. */
Velocity.State.prefixMatches[property] = propertyPrefixed;
return [ propertyPrefixed, true ];
}
}
/* If the browser doesn't support this property in any form, include a false flag so that the caller can decide how to proceed. */
return [ property, false ];
}
}
},
/************************
CSS Property Values
************************/
Values: {
/* Hex to RGB conversion. Copyright Tim Down: http://stackoverflow.com/questions/5623838/rgb-to-hex-and-hex-to-rgb */
hexToRgb: function (hex) {
var shortformRegex = /^#?([a-f\d])([a-f\d])([a-f\d])$/i,
longformRegex = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i,
rgbParts;
hex = hex.replace(shortformRegex, function (m, r, g, b) {
return r + r + g + g + b + b;
});
rgbParts = longformRegex.exec(hex);
return rgbParts ? [ parseInt(rgbParts[1], 16), parseInt(rgbParts[2], 16), parseInt(rgbParts[3], 16) ] : [ 0, 0, 0 ];
},
isCSSNullValue: function (value) {
/* The browser defaults CSS values that have not been set to either 0 or one of several possible null-value strings.
Thus, we check for both falsiness and these special strings. */
/* Null-value checking is performed to default the special strings to 0 (for the sake of tweening) or their hook
templates as defined as CSS.Hooks (for the sake of hook injection/extraction). */
/* Note: Chrome returns "rgba(0, 0, 0, 0)" for an undefined color whereas IE returns "transparent". */
return (value == 0 || /^(none|auto|transparent|(rgba\(0, ?0, ?0, ?0\)))$/i.test(value));
},
/* Retrieve a property's default unit type. Used for assigning a unit type when one is not supplied by the user. */
getUnitType: function (property) {
if (/^(rotate|skew)/i.test(property)) {
return "deg";
} else if (/(^(scale|scaleX|scaleY|scaleZ|alpha|flexGrow|flexHeight|zIndex|fontWeight)$)|((opacity|red|green|blue|alpha)$)/i.test(property)) {
/* The above properties are unitless. */
return "";
} else {
/* Default to px for all other properties. */
return "px";
}
},
/* HTML elements default to an associated display type when they're not set to display:none. */
/* Note: This function is used for correctly setting the non-"none" display value in certain Velocity redirects, such as fadeIn/Out. */
getDisplayType: function (element) {
var tagName = element && element.tagName.toString().toLowerCase();
if (/^(b|big|i|small|tt|abbr|acronym|cite|code|dfn|em|kbd|strong|samp|var|a|bdo|br|img|map|object|q|script|span|sub|sup|button|input|label|select|textarea)$/i.test(tagName)) {
return "inline";
} else if (/^(li)$/i.test(tagName)) {
return "list-item";
} else if (/^(tr)$/i.test(tagName)) {
return "table-row";
} else if (/^(table)$/i.test(tagName)) {
return "table";
} else if (/^(tbody)$/i.test(tagName)) {
return "table-row-group";
/* Default to "block" when no match is found. */
} else {
return "block";
}
},
/* The class add/remove functions are used to temporarily apply a "velocity-animating" class to elements while they're animating. */
addClass: function (element, className) {
if (element.classList) {
element.classList.add(className);
} else {
element.className += (element.className.length ? " " : "") + className;
}
},
removeClass: function (element, className) {
if (element.classList) {
element.classList.remove(className);
} else {
element.className = element.className.toString().replace(new RegExp("(^|\\s)" + className.split(" ").join("|") + "(\\s|$)", "gi"), " ");
}
}
},
/****************************
Style Getting & Setting
****************************/
/* The singular getPropertyValue, which routes the logic for all normalizations, hooks, and standard CSS properties. */
getPropertyValue: function (element, property, rootPropertyValue, forceStyleLookup) {
/* Get an element's computed property value. */
/* Note: Retrieving the value of a CSS property cannot simply be performed by checking an element's
style attribute (which only reflects user-defined values). Instead, the browser must be queried for a property's
*computed* value. You can read more about getComputedStyle here: https://developer.mozilla.org/en/docs/Web/API/window.getComputedStyle */
function computePropertyValue (element, property) {
/* When box-sizing isn't set to border-box, height and width style values are incorrectly computed when an
element's scrollbars are visible (which expands the element's dimensions). Thus, we defer to the more accurate
offsetHeight/Width property, which includes the total dimensions for interior, border, padding, and scrollbar.
We subtract border and padding to get the sum of interior + scrollbar. */
var computedValue = 0;
/* IE<=8 doesn't support window.getComputedStyle, thus we defer to jQuery, which has an extensive array
of hacks to accurately retrieve IE8 property values. Re-implementing that logic here is not worth bloating the
codebase for a dying browser. The performance repercussions of using jQuery here are minimal since
Velocity is optimized to rarely (and sometimes never) query the DOM. Further, the $.css() codepath isn't that slow. */
if (IE <= 8) {
computedValue = $.css(element, property); /* GET */
/* All other browsers support getComputedStyle. The returned live object reference is cached onto its
associated element so that it does not need to be refetched upon every GET. */
} else {
/* Browsers do not return height and width values for elements that are set to display:"none". Thus, we temporarily
toggle display to the element type's default value. */
var toggleDisplay = false;
if (/^(width|height)$/.test(property) && CSS.getPropertyValue(element, "display") === 0) {
toggleDisplay = true;
CSS.setPropertyValue(element, "display", CSS.Values.getDisplayType(element));
}
function revertDisplay () {
if (toggleDisplay) {
CSS.setPropertyValue(element, "display", "none");
}
}
if (!forceStyleLookup) {
if (property === "height" && CSS.getPropertyValue(element, "boxSizing").toString().toLowerCase() !== "border-box") {
var contentBoxHeight = element.offsetHeight - (parseFloat(CSS.getPropertyValue(element, "borderTopWidth")) || 0) - (parseFloat(CSS.getPropertyValue(element, "borderBottomWidth")) || 0) - (parseFloat(CSS.getPropertyValue(element, "paddingTop")) || 0) - (parseFloat(CSS.getPropertyValue(element, "paddingBottom")) || 0);
revertDisplay();
return contentBoxHeight;
} else if (property === "width" && CSS.getPropertyValue(element, "boxSizing").toString().toLowerCase() !== "border-box") {
var contentBoxWidth = element.offsetWidth - (parseFloat(CSS.getPropertyValue(element, "borderLeftWidth")) || 0) - (parseFloat(CSS.getPropertyValue(element, "borderRightWidth")) || 0) - (parseFloat(CSS.getPropertyValue(element, "paddingLeft")) || 0) - (parseFloat(CSS.getPropertyValue(element, "paddingRight")) || 0);
revertDisplay();
return contentBoxWidth;
}
}
var computedStyle;
/* For elements that Velocity hasn't been called on directly (e.g. when Velocity queries the DOM on behalf
of a parent of an element its animating), perform a direct getComputedStyle lookup since the object isn't cached. */
if (Data(element) === undefined) {
computedStyle = window.getComputedStyle(element, null); /* GET */
/* If the computedStyle object has yet to be cached, do so now. */
} else if (!Data(element).computedStyle) {
computedStyle = Data(element).computedStyle = window.getComputedStyle(element, null); /* GET */
/* If computedStyle is cached, use it. */
} else {
computedStyle = Data(element).computedStyle;
}
/* IE and Firefox do not return a value for the generic borderColor -- they only return individual values for each border side's color.
Also, in all browsers, when border colors aren't all the same, a compound value is returned that Velocity isn't setup to parse.
So, as a polyfill for querying individual border side colors, we just return the top border's color and animate all borders from that value. */
if (property === "borderColor") {
property = "borderTopColor";
}
/* IE9 has a bug in which the "filter" property must be accessed from computedStyle using the getPropertyValue method
instead of a direct property lookup. The getPropertyValue method is slower than a direct lookup, which is why we avoid it by default. */
if (IE === 9 && property === "filter") {
computedValue = computedStyle.getPropertyValue(property); /* GET */
} else {
computedValue = computedStyle[property];
}
/* Fall back to the property's style value (if defined) when computedValue returns nothing,
which can happen when the element hasn't been painted. */
if (computedValue === "" || computedValue === null) {
computedValue = element.style[property];
}
revertDisplay();
}
/* For top, right, bottom, and left (TRBL) values that are set to "auto" on elements of "fixed" or "absolute" position,
defer to jQuery for converting "auto" to a numeric value. (For elements with a "static" or "relative" position, "auto" has the same
effect as being set to 0, so no conversion is necessary.) */
/* An example of why numeric conversion is necessary: When an element with "position:absolute" has an untouched "left"
property, which reverts to "auto", left's value is 0 relative to its parent element, but is often non-zero relative
to its *containing* (not parent) element, which is the nearest "position:relative" ancestor or the viewport (and always the viewport in the case of "position:fixed"). */
if (computedValue === "auto" && /^(top|right|bottom|left)$/i.test(property)) {
var position = computePropertyValue(element, "position"); /* GET */
/* For absolute positioning, jQuery's $.position() only returns values for top and left;
right and bottom will have their "auto" value reverted to 0. */
/* Note: A jQuery object must be created here since jQuery doesn't have a low-level alias for $.position().
Not a big deal since we're currently in a GET batch anyway. */
if (position === "fixed" || (position === "absolute" && /top|left/i.test(property))) {
/* Note: jQuery strips the pixel unit from its returned values; we re-add it here to conform with computePropertyValue's behavior. */
computedValue = $(element).position()[property] + "px"; /* GET */
}
}
return computedValue;
}
var propertyValue;
/* If this is a hooked property (e.g. "clipLeft" instead of the root property of "clip"),
extract the hook's value from a normalized rootPropertyValue using CSS.Hooks.extractValue(). */
if (CSS.Hooks.registered[property]) {
var hook = property,
hookRoot = CSS.Hooks.getRoot(hook);
/* If a cached rootPropertyValue wasn't passed in (which Velocity always attempts to do in order to avoid requerying the DOM),
query the DOM for the root property's value. */
if (rootPropertyValue === undefined) {
/* Since the browser is now being directly queried, use the official post-prefixing property name for this lookup. */
rootPropertyValue = CSS.getPropertyValue(element, CSS.Names.prefixCheck(hookRoot)[0]); /* GET */
}
/* If this root has a normalization registered, peform the associated normalization extraction. */
if (CSS.Normalizations.registered[hookRoot]) {
rootPropertyValue = CSS.Normalizations.registered[hookRoot]("extract", element, rootPropertyValue);
}
/* Extract the hook's value. */
propertyValue = CSS.Hooks.extractValue(hook, rootPropertyValue);
/* If this is a normalized property (e.g. "opacity" becomes "filter" in <=IE8) or "translateX" becomes "transform"),
normalize the property's name and value, and handle the special case of transforms. */
/* Note: Normalizing a property is mutually exclusive from hooking a property since hook-extracted values are strictly
numerical and therefore do not require normalization extraction. */
} else if (CSS.Normalizations.registered[property]) {
var normalizedPropertyName,
normalizedPropertyValue;
normalizedPropertyName = CSS.Normalizations.registered[property]("name", element);
/* Transform values are calculated via normalization extraction (see below), which checks against the element's transformCache.
At no point do transform GETs ever actually query the DOM; initial stylesheet values are never processed.
This is because parsing 3D transform matrices is not always accurate and would bloat our codebase;
thus, normalization extraction defaults initial transform values to their zero-values (e.g. 1 for scaleX and 0 for translateX). */
if (normalizedPropertyName !== "transform") {
normalizedPropertyValue = computePropertyValue(element, CSS.Names.prefixCheck(normalizedPropertyName)[0]); /* GET */
/* If the value is a CSS null-value and this property has a hook template, use that zero-value template so that hooks can be extracted from it. */
if (CSS.Values.isCSSNullValue(normalizedPropertyValue) && CSS.Hooks.templates[property]) {
normalizedPropertyValue = CSS.Hooks.templates[property][1];
}
}
propertyValue = CSS.Normalizations.registered[property]("extract", element, normalizedPropertyValue);
}
/* If a (numeric) value wasn't produced via hook extraction or normalization, query the DOM. */
if (!/^[\d-]/.test(propertyValue)) {
/* For SVG elements, dimensional properties (which SVGAttribute() detects) are tweened via
their HTML attribute values instead of their CSS style values. */
if (Data(element) && Data(element).isSVG && CSS.Names.SVGAttribute(property)) {
/* Since the height/width attribute values must be set manually, they don't reflect computed values.
Thus, we use use getBBox() to ensure we always get values for elements with undefined height/width attributes. */
if (/^(height|width)$/i.test(property)) {
/* Firefox throws an error if .getBBox() is called on an SVG that isn't attached to the DOM. */
try {
propertyValue = element.getBBox()[property];
} catch (error) {
propertyValue = 0;
}
/* Otherwise, access the attribute value directly. */
} else {
propertyValue = element.getAttribute(property);
}
} else {
propertyValue = computePropertyValue(element, CSS.Names.prefixCheck(property)[0]); /* GET */
}
}
/* Since property lookups are for animation purposes (which entails computing the numeric delta between start and end values),
convert CSS null-values to an integer of value 0. */
if (CSS.Values.isCSSNullValue(propertyValue)) {
propertyValue = 0;
}
if (Velocity.debug >= 2) console.log("Get " + property + ": " + propertyValue);
return propertyValue;
},
/* The singular setPropertyValue, which routes the logic for all normalizations, hooks, and standard CSS properties. */
setPropertyValue: function(element, property, propertyValue, rootPropertyValue, scrollData) {
var propertyName = property;
/* In order to be subjected to call options and element queueing, scroll animation is routed through Velocity as if it were a standard CSS property. */
if (property === "scroll") {
/* If a container option is present, scroll the container instead of the browser window. */
if (scrollData.container) {
scrollData.container["scroll" + scrollData.direction] = propertyValue;
/* Otherwise, Velocity defaults to scrolling the browser window. */
} else {
if (scrollData.direction === "Left") {
window.scrollTo(propertyValue, scrollData.alternateValue);
} else {
window.scrollTo(scrollData.alternateValue, propertyValue);
}
}
} else {
/* Transforms (translateX, rotateZ, etc.) are applied to a per-element transformCache object, which is manually flushed via flushTransformCache().
Thus, for now, we merely cache transforms being SET. */
if (CSS.Normalizations.registered[property] && CSS.Normalizations.registered[property]("name", element) === "transform") {
/* Perform a normalization injection. */
/* Note: The normalization logic handles the transformCache updating. */
CSS.Normalizations.registered[property]("inject", element, propertyValue);
propertyName = "transform";
propertyValue = Data(element).transformCache[property];
} else {
/* Inject hooks. */
if (CSS.Hooks.registered[property]) {
var hookName = property,
hookRoot = CSS.Hooks.getRoot(property);
/* If a cached rootPropertyValue was not provided, query the DOM for the hookRoot's current value. */
rootPropertyValue = rootPropertyValue || CSS.getPropertyValue(element, hookRoot); /* GET */
propertyValue = CSS.Hooks.injectValue(hookName, propertyValue, rootPropertyValue);
property = hookRoot;
}
/* Normalize names and values. */
if (CSS.Normalizations.registered[property]) {
propertyValue = CSS.Normalizations.registered[property]("inject", element, propertyValue);
property = CSS.Normalizations.registered[property]("name", element);
}
/* Assign the appropriate vendor prefix before performing an official style update. */
propertyName = CSS.Names.prefixCheck(property)[0];
/* A try/catch is used for IE<=8, which throws an error when "invalid" CSS values are set, e.g. a negative width.
Try/catch is avoided for other browsers since it incurs a performance overhead. */
if (IE <= 8) {
try {
element.style[propertyName] = propertyValue;
} catch (error) { if (Velocity.debug) console.log("Browser does not support [" + propertyValue + "] for [" + propertyName + "]"); }
/* SVG elements have their dimensional properties (width, height, x, y, cx, etc.) applied directly as attributes instead of as styles. */
/* Note: IE8 does not support SVG elements, so it's okay that we skip it for SVG animation. */
} else if (Data(element) && Data(element).isSVG && CSS.Names.SVGAttribute(property)) {
/* Note: For SVG attributes, vendor-prefixed property names are never used. */
/* Note: Not all CSS properties can be animated via attributes, but the browser won't throw an error for unsupported properties. */
element.setAttribute(property, propertyValue);
} else {
element.style[propertyName] = propertyValue;
}
if (Velocity.debug >= 2) console.log("Set " + property + " (" + propertyName + "): " + propertyValue);
}
}
/* Return the normalized property name and value in case the caller wants to know how these values were modified before being applied to the DOM. */
return [ propertyName, propertyValue ];
},
/* To increase performance by batching transform updates into a single SET, transforms are not directly applied to an element until flushTransformCache() is called. */
/* Note: Velocity applies transform properties in the same order that they are chronogically introduced to the element's CSS styles. */
flushTransformCache: function(element) {
var transformString = "";
/* Certain browsers require that SVG transforms be applied as an attribute. However, the SVG transform attribute takes a modified version of CSS's transform string
(units are dropped and, except for skewX/Y, subproperties are merged into their master property -- e.g. scaleX and scaleY are merged into scale(X Y). */
if ((IE || (Velocity.State.isAndroid && !Velocity.State.isChrome)) && Data(element).isSVG) {
/* Since transform values are stored in their parentheses-wrapped form, we use a helper function to strip out their numeric values.
Further, SVG transform properties only take unitless (representing pixels) values, so it's okay that parseFloat() strips the unit suffixed to the float value. */
function getTransformFloat (transformProperty) {
return parseFloat(CSS.getPropertyValue(element, transformProperty));
}
/* Create an object to organize all the transforms that we'll apply to the SVG element. To keep the logic simple,
we process *all* transform properties -- even those that may not be explicitly applied (since they default to their zero-values anyway). */
var SVGTransforms = {
translate: [ getTransformFloat("translateX"), getTransformFloat("translateY") ],
skewX: [ getTransformFloat("skewX") ], skewY: [ getTransformFloat("skewY") ],
/* If the scale property is set (non-1), use that value for the scaleX and scaleY values
(this behavior mimics the result of animating all these properties at once on HTML elements). */
scale: getTransformFloat("scale") !== 1 ? [ getTransformFloat("scale"), getTransformFloat("scale") ] : [ getTransformFloat("scaleX"), getTransformFloat("scaleY") ],
/* Note: SVG's rotate transform takes three values: rotation degrees followed by the X and Y values
defining the rotation's origin point. We ignore the origin values (default them to 0). */
rotate: [ getTransformFloat("rotateZ"), 0, 0 ]
};
/* Iterate through the transform properties in the user-defined property map order.
(This mimics the behavior of non-SVG transform animation.) */
$.each(Data(element).transformCache, function(transformName) {
/* Except for with skewX/Y, revert the axis-specific transform subproperties to their axis-free master
properties so that they match up with SVG's accepted transform properties. */
if (/^translate/i.test(transformName)) {
transformName = "translate";
} else if (/^scale/i.test(transformName)) {
transformName = "scale";
} else if (/^rotate/i.test(transformName)) {
transformName = "rotate";
}
/* Check that we haven't yet deleted the property from the SVGTransforms container. */
if (SVGTransforms[transformName]) {
/* Append the transform property in the SVG-supported transform format. As per the spec, surround the space-delimited values in parentheses. */
transformString += transformName + "(" + SVGTransforms[transformName].join(" ") + ")" + " ";
/* After processing an SVG transform property, delete it from the SVGTransforms container so we don't
re-insert the same master property if we encounter another one of its axis-specific properties. */
delete SVGTransforms[transformName];
}
});
} else {
var transformValue,
perspective;
/* Transform properties are stored as members of the transformCache object. Concatenate all the members into a string. */
$.each(Data(element).transformCache, function(transformName) {
transformValue = Data(element).transformCache[transformName];
/* Transform's perspective subproperty must be set first in order to take effect. Store it temporarily. */
if (transformName === "transformPerspective") {
perspective = transformValue;
return true;
}
/* IE9 only supports one rotation type, rotateZ, which it refers to as "rotate". */
if (IE === 9 && transformName === "rotateZ") {
transformName = "rotate";
}
transformString += transformName + transformValue + " ";
});
/* If present, set the perspective subproperty first. */
if (perspective) {
transformString = "perspective" + perspective + " " + transformString;
}
}
CSS.setPropertyValue(element, "transform", transformString);
}
};
/* Register hooks and normalizations. */
CSS.Hooks.register();
CSS.Normalizations.register();
/* Allow hook setting in the same fashion as jQuery's $.css(). */
Velocity.hook = function (elements, arg2, arg3) {
var value = undefined;
elements = sanitizeElements(elements);
$.each(elements, function(i, element) {
/* Initialize Velocity's per-element data cache if this element hasn't previously been animated. */
if (Data(element) === undefined) {
Velocity.init(element);
}
/* Get property value. If an element set was passed in, only return the value for the first element. */
if (arg3 === undefined) {
if (value === undefined) {
value = Velocity.CSS.getPropertyValue(element, arg2);
}
/* Set property value. */
} else {
/* sPV returns an array of the normalized propertyName/propertyValue pair used to update the DOM. */
var adjustedSet = Velocity.CSS.setPropertyValue(element, arg2, arg3);
/* Transform properties don't automatically set. They have to be flushed to the DOM. */
if (adjustedSet[0] === "transform") {
Velocity.CSS.flushTransformCache(element);
}
value = adjustedSet;
}
});
return value;
};
/*****************
Animation
*****************/
var animate = function() {
/******************
Call Chain
******************/
/* Logic for determining what to return to the call stack when exiting out of Velocity. */
function getChain () {
/* If we are using the utility function, attempt to return this call's promise. If no promise library was detected,
default to null instead of returning the targeted elements so that utility function's return value is standardized. */
if (isUtility) {
return promiseData.promise || null;
/* Otherwise, if we're using $.fn, return the jQuery-/Zepto-wrapped element set. */
} else {
return elementsWrapped;
}
}
/*************************
Arguments Assignment
*************************/
/* To allow for expressive CoffeeScript code, Velocity supports an alternative syntax in which "elements" (or "e"), "properties" (or "p"), and "options" (or "o")
objects are defined on a container object that's passed in as Velocity's sole argument. */
/* Note: Some browsers automatically populate arguments with a "properties" object. We detect it by checking for its default "names" property. */
var syntacticSugar = (arguments[0] && (arguments[0].p || (($.isPlainObject(arguments[0].properties) && !arguments[0].properties.names) || Type.isString(arguments[0].properties)))),
/* Whether Velocity was called via the utility function (as opposed to on a jQuery/Zepto object). */
isUtility,
/* When Velocity is called via the utility function ($.Velocity()/Velocity()), elements are explicitly
passed in as the first parameter. Thus, argument positioning varies. We normalize them here. */
elementsWrapped,
argumentIndex;
var elements,
propertiesMap,
options;
/* Detect jQuery/Zepto elements being animated via the $.fn method. */
if (Type.isWrapped(this)) {
isUtility = false;
argumentIndex = 0;
elements = this;
elementsWrapped = this;
/* Otherwise, raw elements are being animated via the utility function. */
} else {
isUtility = true;
argumentIndex = 1;
elements = syntacticSugar ? (arguments[0].elements || arguments[0].e) : arguments[0];
}
elements = sanitizeElements(elements);
if (!elements) {
return;
}
if (syntacticSugar) {
propertiesMap = arguments[0].properties || arguments[0].p;
options = arguments[0].options || arguments[0].o;
} else {
propertiesMap = arguments[argumentIndex];
options = arguments[argumentIndex + 1];
}
/* The length of the element set (in the form of a nodeList or an array of elements) is defaulted to 1 in case a
single raw DOM element is passed in (which doesn't contain a length property). */
var elementsLength = elements.length,
elementsIndex = 0;
/***************************
Argument Overloading
***************************/
/* Support is included for jQuery's argument overloading: $.animate(propertyMap [, duration] [, easing] [, complete]).
Overloading is detected by checking for the absence of an object being passed into options. */
/* Note: The stop and finish actions do not accept animation options, and are therefore excluded from this check. */
if (!/^(stop|finish)$/i.test(propertiesMap) && !$.isPlainObject(options)) {
/* The utility function shifts all arguments one position to the right, so we adjust for that offset. */
var startingArgumentPosition = argumentIndex + 1;
options = {};
/* Iterate through all options arguments */
for (var i = startingArgumentPosition; i < arguments.length; i++) {
/* Treat a number as a duration. Parse it out. */
/* Note: The following RegEx will return true if passed an array with a number as its first item.
Thus, arrays are skipped from this check. */
if (!Type.isArray(arguments[i]) && (/^(fast|normal|slow)$/i.test(arguments[i]) || /^\d/.test(arguments[i]))) {
options.duration = arguments[i];
/* Treat strings and arrays as easings. */
} else if (Type.isString(arguments[i]) || Type.isArray(arguments[i])) {
options.easing = arguments[i];
/* Treat a function as a complete callback. */
} else if (Type.isFunction(arguments[i])) {
options.complete = arguments[i];
}
}
}
/***************
Promises
***************/
var promiseData = {
promise: null,
resolver: null,
rejecter: null
};
/* If this call was made via the utility function (which is the default method of invocation when jQuery/Zepto are not being used), and if
promise support was detected, create a promise object for this call and store references to its resolver and rejecter methods. The resolve
method is used when a call completes naturally or is prematurely stopped by the user. In both cases, completeCall() handles the associated
call cleanup and promise resolving logic. The reject method is used when an invalid set of arguments is passed into a Velocity call. */
/* Note: Velocity employs a call-based queueing architecture, which means that stopping an animating element actually stops the full call that
triggered it -- not that one element exclusively. Similarly, there is one promise per call, and all elements targeted by a Velocity call are
grouped together for the purposes of resolving and rejecting a promise. */
if (isUtility && Velocity.Promise) {
promiseData.promise = new Velocity.Promise(function (resolve, reject) {
promiseData.resolver = resolve;
promiseData.rejecter = reject;
});
}
/*********************
Action Detection
*********************/
/* Velocity's behavior is categorized into "actions": Elements can either be specially scrolled into view,
or they can be started, stopped, or reversed. If a literal or referenced properties map is passed in as Velocity's
first argument, the associated action is "start". Alternatively, "scroll", "reverse", or "stop" can be passed in instead of a properties map. */
var action;
switch (propertiesMap) {
case "scroll":
action = "scroll";
break;
case "reverse":
action = "reverse";
break;
case "finish":
case "stop":
/*******************
Action: Stop
*******************/
/* Clear the currently-active delay on each targeted element. */
$.each(elements, function(i, element) {
if (Data(element) && Data(element).delayTimer) {
/* Stop the timer from triggering its cached next() function. */
clearTimeout(Data(element).delayTimer.setTimeout);
/* Manually call the next() function so that the subsequent queue items can progress. */
if (Data(element).delayTimer.next) {
Data(element).delayTimer.next();
}
delete Data(element).delayTimer;
}
});
var callsToStop = [];
/* When the stop action is triggered, the elements' currently active call is immediately stopped. The active call might have
been applied to multiple elements, in which case all of the call's elements will be stopped. When an element
is stopped, the next item in its animation queue is immediately triggered. */
/* An additional argument may be passed in to clear an element's remaining queued calls. Either true (which defaults to the "fx" queue)
or a custom queue string can be passed in. */
/* Note: The stop command runs prior to Velocity's Queueing phase since its behavior is intended to take effect *immediately*,
regardless of the element's current queue state. */
/* Iterate through every active call. */
$.each(Velocity.State.calls, function(i, activeCall) {
/* Inactive calls are set to false by the logic inside completeCall(). Skip them. */
if (activeCall) {
/* Iterate through the active call's targeted elements. */
$.each(activeCall[1], function(k, activeElement) {
/* If true was passed in as a secondary argument, clear absolutely all calls on this element. Otherwise, only
clear calls associated with the relevant queue. */
/* Call stopping logic works as follows:
- options === true --> stop current default queue calls (and queue:false calls), including remaining queued ones.
- options === undefined --> stop current queue:"" call and all queue:false calls.
- options === false --> stop only queue:false calls.
- options === "custom" --> stop current queue:"custom" call, including remaining queued ones (there is no functionality to only clear the currently-running queue:"custom" call). */
var queueName = (options === undefined) ? "" : options;
if (queueName !== true && (activeCall[2].queue !== queueName) && !(options === undefined && activeCall[2].queue === false)) {
return true;
}
/* Iterate through the calls targeted by the stop command. */
$.each(elements, function(l, element) {
/* Check that this call was applied to the target element. */
if (element === activeElement) {
/* Optionally clear the remaining queued calls. */
if (options === true || Type.isString(options)) {
/* Iterate through the items in the element's queue. */
$.each($.queue(element, Type.isString(options) ? options : ""), function(_, item) {
/* The queue array can contain an "inprogress" string, which we skip. */
if (Type.isFunction(item)) {
/* Pass the item's callback a flag indicating that we want to abort from the queue call.
(Specifically, the queue will resolve the call's associated promise then abort.) */
item(null, true);
}
});
/* Clearing the $.queue() array is achieved by resetting it to []. */
$.queue(element, Type.isString(options) ? options : "", []);
}
if (propertiesMap === "stop") {
/* Since "reverse" uses cached start values (the previous call's endValues), these values must be
changed to reflect the final value that the elements were actually tweened to. */
/* Note: If only queue:false animations are currently running on an element, it won't have a tweensContainer
object. Also, queue:false animations can't be reversed. */
if (Data(element) && Data(element).tweensContainer && queueName !== false) {
$.each(Data(element).tweensContainer, function(m, activeTween) {
activeTween.endValue = activeTween.currentValue;
});
}
callsToStop.push(i);
} else if (propertiesMap === "finish") {
/* To get active tweens to finish immediately, we forcefully shorten their durations to 1ms so that
they finish upon the next rAf tick then proceed with normal call completion logic. */
activeCall[2].duration = 1;
}
}
});
});
}
});
/* Prematurely call completeCall() on each matched active call. Pass an additional flag for "stop" to indicate
that the complete callback and display:none setting should be skipped since we're completing prematurely. */
if (propertiesMap === "stop") {
$.each(callsToStop, function(i, j) {
completeCall(j, true);
});
if (promiseData.promise) {
/* Immediately resolve the promise associated with this stop call since stop runs synchronously. */
promiseData.resolver(elements);
}
}
/* Since we're stopping, and not proceeding with queueing, exit out of Velocity. */
return getChain();
default:
/* Treat a non-empty plain object as a literal properties map. */
if ($.isPlainObject(propertiesMap) && !Type.isEmptyObject(propertiesMap)) {
action = "start";
/****************
Redirects
****************/
/* Check if a string matches a registered redirect (see Redirects above). */
} else if (Type.isString(propertiesMap) && Velocity.Redirects[propertiesMap]) {
var opts = $.extend({}, options),
durationOriginal = opts.duration,
delayOriginal = opts.delay || 0;
/* If the backwards option was passed in, reverse the element set so that elements animate from the last to the first. */
if (opts.backwards === true) {
elements = $.extend(true, [], elements).reverse();
}
/* Individually trigger the redirect for each element in the set to prevent users from having to handle iteration logic in their redirect. */
$.each(elements, function(elementIndex, element) {
/* If the stagger option was passed in, successively delay each element by the stagger value (in ms). Retain the original delay value. */
if (parseFloat(opts.stagger)) {
opts.delay = delayOriginal + (parseFloat(opts.stagger) * elementIndex);
} else if (Type.isFunction(opts.stagger)) {
opts.delay = delayOriginal + opts.stagger.call(element, elementIndex, elementsLength);
}
/* If the drag option was passed in, successively increase/decrease (depending on the presense of opts.backwards)
the duration of each element's animation, using floors to prevent producing very short durations. */
if (opts.drag) {
/* Default the duration of UI pack effects (callouts and transitions) to 1000ms instead of the usual default duration of 400ms. */
opts.duration = parseFloat(durationOriginal) || (/^(callout|transition)/.test(propertiesMap) ? 1000 : DURATION_DEFAULT);
/* For each element, take the greater duration of: A) animation completion percentage relative to the original duration,
B) 75% of the original duration, or C) a 200ms fallback (in case duration is already set to a low value).
The end result is a baseline of 75% of the redirect's duration that increases/decreases as the end of the element set is approached. */
opts.duration = Math.max(opts.duration * (opts.backwards ? 1 - elementIndex/elementsLength : (elementIndex + 1) / elementsLength), opts.duration * 0.75, 200);
}
/* Pass in the call's opts object so that the redirect can optionally extend it. It defaults to an empty object instead of null to
reduce the opts checking logic required inside the redirect. */
Velocity.Redirects[propertiesMap].call(element, element, opts || {}, elementIndex, elementsLength, elements, promiseData.promise ? promiseData : undefined);
});
/* Since the animation logic resides within the redirect's own code, abort the remainder of this call.
(The performance overhead up to this point is virtually non-existant.) */
/* Note: The jQuery call chain is kept intact by returning the complete element set. */
return getChain();
} else {
var abortError = "Velocity: First argument (" + propertiesMap + ") was not a property map, a known action, or a registered redirect. Aborting.";
if (promiseData.promise) {
promiseData.rejecter(new Error(abortError));
} else {
console.log(abortError);
}
return getChain();
}
}
/**************************
Call-Wide Variables
**************************/
/* A container for CSS unit conversion ratios (e.g. %, rem, and em ==> px) that is used to cache ratios across all elements
being animated in a single Velocity call. Calculating unit ratios necessitates DOM querying and updating, and is therefore
avoided (via caching) wherever possible. This container is call-wide instead of page-wide to avoid the risk of using stale
conversion metrics across Velocity animations that are not immediately consecutively chained. */
var callUnitConversionData = {
lastParent: null,
lastPosition: null,
lastFontSize: null,
lastPercentToPxWidth: null,
lastPercentToPxHeight: null,
lastEmToPx: null,
remToPx: null,
vwToPx: null,
vhToPx: null
};
/* A container for all the ensuing tween data and metadata associated with this call. This container gets pushed to the page-wide
Velocity.State.calls array that is processed during animation ticking. */
var call = [];
/************************
Element Processing
************************/
/* Element processing consists of three parts -- data processing that cannot go stale and data processing that *can* go stale (i.e. third-party style modifications):
1) Pre-Queueing: Element-wide variables, including the element's data storage, are instantiated. Call options are prepared. If triggered, the Stop action is executed.
2) Queueing: The logic that runs once this call has reached its point of execution in the element's $.queue() stack. Most logic is placed here to avoid risking it becoming stale.
3) Pushing: Consolidation of the tween data followed by its push onto the global in-progress calls container.
*/
function processElement () {
/*************************
Part I: Pre-Queueing
*************************/
/***************************
Element-Wide Variables
***************************/
var element = this,
/* The runtime opts object is the extension of the current call's options and Velocity's page-wide option defaults. */
opts = $.extend({}, Velocity.defaults, options),
/* A container for the processed data associated with each property in the propertyMap.
(Each property in the map produces its own "tween".) */
tweensContainer = {},
elementUnitConversionData;
/******************
Element Init
******************/
if (Data(element) === undefined) {
Velocity.init(element);
}
/******************
Option: Delay
******************/
/* Since queue:false doesn't respect the item's existing queue, we avoid injecting its delay here (it's set later on). */
/* Note: Velocity rolls its own delay function since jQuery doesn't have a utility alias for $.fn.delay()
(and thus requires jQuery element creation, which we avoid since its overhead includes DOM querying). */
if (parseFloat(opts.delay) && opts.queue !== false) {
$.queue(element, opts.queue, function(next) {
/* This is a flag used to indicate to the upcoming completeCall() function that this queue entry was initiated by Velocity. See completeCall() for further details. */
Velocity.velocityQueueEntryFlag = true;
/* The ensuing queue item (which is assigned to the "next" argument that $.queue() automatically passes in) will be triggered after a setTimeout delay.
The setTimeout is stored so that it can be subjected to clearTimeout() if this animation is prematurely stopped via Velocity's "stop" command. */
Data(element).delayTimer = {
setTimeout: setTimeout(next, parseFloat(opts.delay)),
next: next
};
});
}
/*********************
Option: Duration
*********************/
/* Support for jQuery's named durations. */
switch (opts.duration.toString().toLowerCase()) {
case "fast":
opts.duration = 200;
break;
case "normal":
opts.duration = DURATION_DEFAULT;
break;
case "slow":
opts.duration = 600;
break;
default:
/* Remove the potential "ms" suffix and default to 1 if the user is attempting to set a duration of 0 (in order to produce an immediate style change). */
opts.duration = parseFloat(opts.duration) || 1;
}
/************************
Global Option: Mock
************************/
if (Velocity.mock !== false) {
/* In mock mode, all animations are forced to 1ms so that they occur immediately upon the next rAF tick.
Alternatively, a multiplier can be passed in to time remap all delays and durations. */
if (Velocity.mock === true) {
opts.duration = opts.delay = 1;
} else {
opts.duration *= parseFloat(Velocity.mock) || 1;
opts.delay *= parseFloat(Velocity.mock) || 1;
}
}
/*******************
Option: Easing
*******************/
opts.easing = getEasing(opts.easing, opts.duration);
/**********************
Option: Callbacks
**********************/
/* Callbacks must functions. Otherwise, default to null. */
if (opts.begin && !Type.isFunction(opts.begin)) {
opts.begin = null;
}
if (opts.progress && !Type.isFunction(opts.progress)) {
opts.progress = null;
}
if (opts.complete && !Type.isFunction(opts.complete)) {
opts.complete = null;
}
/*********************************
Option: Display & Visibility
*********************************/
/* Refer to Velocity's documentation (VelocityJS.org/#displayAndVisibility) for a description of the display and visibility options' behavior. */
/* Note: We strictly check for undefined instead of falsiness because display accepts an empty string value. */
if (opts.display !== undefined && opts.display !== null) {
opts.display = opts.display.toString().toLowerCase();
/* Users can pass in a special "auto" value to instruct Velocity to set the element to its default display value. */
if (opts.display === "auto") {
opts.display = Velocity.CSS.Values.getDisplayType(element);
}
}
if (opts.visibility !== undefined && opts.visibility !== null) {
opts.visibility = opts.visibility.toString().toLowerCase();
}
/**********************
Option: mobileHA
**********************/
/* When set to true, and if this is a mobile device, mobileHA automatically enables hardware acceleration (via a null transform hack)
on animating elements. HA is removed from the element at the completion of its animation. */
/* Note: Android Gingerbread doesn't support HA. If a null transform hack (mobileHA) is in fact set, it will prevent other tranform subproperties from taking effect. */
/* Note: You can read more about the use of mobileHA in Velocity's documentation: VelocityJS.org/#mobileHA. */
opts.mobileHA = (opts.mobileHA && Velocity.State.isMobile && !Velocity.State.isGingerbread);
/***********************
Part II: Queueing
***********************/
/* When a set of elements is targeted by a Velocity call, the set is broken up and each element has the current Velocity call individually queued onto it.
In this way, each element's existing queue is respected; some elements may already be animating and accordingly should not have this current Velocity call triggered immediately. */
/* In each queue, tween data is processed for each animating property then pushed onto the call-wide calls array. When the last element in the set has had its tweens processed,
the call array is pushed to Velocity.State.calls for live processing by the requestAnimationFrame tick. */
function buildQueue (next) {
/*******************
Option: Begin
*******************/
/* The begin callback is fired once per call -- not once per elemenet -- and is passed the full raw DOM element set as both its context and its first argument. */
if (opts.begin && elementsIndex === 0) {
/* We throw callbacks in a setTimeout so that thrown errors don't halt the execution of Velocity itself. */
try {
opts.begin.call(elements, elements);
} catch (error) {
setTimeout(function() { throw error; }, 1);
}
}
/*****************************************
Tween Data Construction (for Scroll)
*****************************************/
/* Note: In order to be subjected to chaining and animation options, scroll's tweening is routed through Velocity as if it were a standard CSS property animation. */
if (action === "scroll") {
/* The scroll action uniquely takes an optional "offset" option -- specified in pixels -- that offsets the targeted scroll position. */
var scrollDirection = (/^x$/i.test(opts.axis) ? "Left" : "Top"),
scrollOffset = parseFloat(opts.offset) || 0,
scrollPositionCurrent,
scrollPositionCurrentAlternate,
scrollPositionEnd;
/* Scroll also uniquely takes an optional "container" option, which indicates the parent element that should be scrolled --
as opposed to the browser window itself. This is useful for scrolling toward an element that's inside an overflowing parent element. */
if (opts.container) {
/* Ensure that either a jQuery object or a raw DOM element was passed in. */
if (Type.isWrapped(opts.container) || Type.isNode(opts.container)) {
/* Extract the raw DOM element from the jQuery wrapper. */
opts.container = opts.container[0] || opts.container;
/* Note: Unlike other properties in Velocity, the browser's scroll position is never cached since it so frequently changes
(due to the user's natural interaction with the page). */
scrollPositionCurrent = opts.container["scroll" + scrollDirection]; /* GET */
/* $.position() values are relative to the container's currently viewable area (without taking into account the container's true dimensions
-- say, for example, if the container was not overflowing). Thus, the scroll end value is the sum of the child element's position *and*
the scroll container's current scroll position. */
scrollPositionEnd = (scrollPositionCurrent + $(element).position()[scrollDirection.toLowerCase()]) + scrollOffset; /* GET */
/* If a value other than a jQuery object or a raw DOM element was passed in, default to null so that this option is ignored. */
} else {
opts.container = null;
}
} else {
/* If the window itself is being scrolled -- not a containing element -- perform a live scroll position lookup using
the appropriate cached property names (which differ based on browser type). */
scrollPositionCurrent = Velocity.State.scrollAnchor[Velocity.State["scrollProperty" + scrollDirection]]; /* GET */
/* When scrolling the browser window, cache the alternate axis's current value since window.scrollTo() doesn't let us change only one value at a time. */
scrollPositionCurrentAlternate = Velocity.State.scrollAnchor[Velocity.State["scrollProperty" + (scrollDirection === "Left" ? "Top" : "Left")]]; /* GET */
/* Unlike $.position(), $.offset() values are relative to the browser window's true dimensions -- not merely its currently viewable area --
and therefore end values do not need to be compounded onto current values. */
scrollPositionEnd = $(element).offset()[scrollDirection.toLowerCase()] + scrollOffset; /* GET */
}
/* Since there's only one format that scroll's associated tweensContainer can take, we create it manually. */
tweensContainer = {
scroll: {
rootPropertyValue: false,
startValue: scrollPositionCurrent,
currentValue: scrollPositionCurrent,
endValue: scrollPositionEnd,
unitType: "",
easing: opts.easing,
scrollData: {
container: opts.container,
direction: scrollDirection,
alternateValue: scrollPositionCurrentAlternate
}
},
element: element
};
if (Velocity.debug) console.log("tweensContainer (scroll): ", tweensContainer.scroll, element);
/******************************************
Tween Data Construction (for Reverse)
******************************************/
/* Reverse acts like a "start" action in that a property map is animated toward. The only difference is
that the property map used for reverse is the inverse of the map used in the previous call. Thus, we manipulate
the previous call to construct our new map: use the previous map's end values as our new map's start values. Copy over all other data. */
/* Note: Reverse can be directly called via the "reverse" parameter, or it can be indirectly triggered via the loop option. (Loops are composed of multiple reverses.) */
/* Note: Reverse calls do not need to be consecutively chained onto a currently-animating element in order to operate on cached values;
there is no harm to reverse being called on a potentially stale data cache since reverse's behavior is simply defined
as reverting to the element's values as they were prior to the previous *Velocity* call. */
} else if (action === "reverse") {
/* Abort if there is no prior animation data to reverse to. */
if (!Data(element).tweensContainer) {
/* Dequeue the element so that this queue entry releases itself immediately, allowing subsequent queue entries to run. */
$.dequeue(element, opts.queue);
return;
} else {
/*********************
Options Parsing
*********************/
/* If the element was hidden via the display option in the previous call,
revert display to "auto" prior to reversal so that the element is visible again. */
if (Data(element).opts.display === "none") {
Data(element).opts.display = "auto";
}
if (Data(element).opts.visibility === "hidden") {
Data(element).opts.visibility = "visible";
}
/* If the loop option was set in the previous call, disable it so that "reverse" calls aren't recursively generated.
Further, remove the previous call's callback options; typically, users do not want these to be refired. */
Data(element).opts.loop = false;
Data(element).opts.begin = null;
Data(element).opts.complete = null;
/* Since we're extending an opts object that has already been extended with the defaults options object,
we remove non-explicitly-defined properties that are auto-assigned values. */
if (!options.easing) {
delete opts.easing;
}
if (!options.duration) {
delete opts.duration;
}
/* The opts object used for reversal is an extension of the options object optionally passed into this
reverse call plus the options used in the previous Velocity call. */
opts = $.extend({}, Data(element).opts, opts);
/*************************************
Tweens Container Reconstruction
*************************************/
/* Create a deepy copy (indicated via the true flag) of the previous call's tweensContainer. */
var lastTweensContainer = $.extend(true, {}, Data(element).tweensContainer);
/* Manipulate the previous tweensContainer by replacing its end values and currentValues with its start values. */
for (var lastTween in lastTweensContainer) {
/* In addition to tween data, tweensContainers contain an element property that we ignore here. */
if (lastTween !== "element") {
var lastStartValue = lastTweensContainer[lastTween].startValue;
lastTweensContainer[lastTween].startValue = lastTweensContainer[lastTween].currentValue = lastTweensContainer[lastTween].endValue;
lastTweensContainer[lastTween].endValue = lastStartValue;
/* Easing is the only option that embeds into the individual tween data (since it can be defined on a per-property basis).
Accordingly, every property's easing value must be updated when an options object is passed in with a reverse call.
The side effect of this extensibility is that all per-property easing values are forcefully reset to the new value. */
if (!Type.isEmptyObject(options)) {
lastTweensContainer[lastTween].easing = opts.easing;
}
if (Velocity.debug) console.log("reverse tweensContainer (" + lastTween + "): " + JSON.stringify(lastTweensContainer[lastTween]), element);
}
}
tweensContainer = lastTweensContainer;
}
/*****************************************
Tween Data Construction (for Start)
*****************************************/
} else if (action === "start") {
/*************************
Value Transferring
*************************/
/* If this queue entry follows a previous Velocity-initiated queue entry *and* if this entry was created
while the element was in the process of being animated by Velocity, then this current call is safe to use
the end values from the prior call as its start values. Velocity attempts to perform this value transfer
process whenever possible in order to avoid requerying the DOM. */
/* If values aren't transferred from a prior call and start values were not forcefed by the user (more on this below),
then the DOM is queried for the element's current values as a last resort. */
/* Note: Conversely, animation reversal (and looping) *always* perform inter-call value transfers; they never requery the DOM. */
var lastTweensContainer;
/* The per-element isAnimating flag is used to indicate whether it's safe (i.e. the data isn't stale)
to transfer over end values to use as start values. If it's set to true and there is a previous
Velocity call to pull values from, do so. */
if (Data(element).tweensContainer && Data(element).isAnimating === true) {
lastTweensContainer = Data(element).tweensContainer;
}
/***************************
Tween Data Calculation
***************************/
/* This function parses property data and defaults endValue, easing, and startValue as appropriate. */
/* Property map values can either take the form of 1) a single value representing the end value,
or 2) an array in the form of [ endValue, [, easing] [, startValue] ].
The optional third parameter is a forcefed startValue to be used instead of querying the DOM for
the element's current value. Read Velocity's docmentation to learn more about forcefeeding: VelocityJS.org/#forcefeeding */
function parsePropertyValue (valueData, skipResolvingEasing) {
var endValue = undefined,
easing = undefined,
startValue = undefined;
/* Handle the array format, which can be structured as one of three potential overloads:
A) [ endValue, easing, startValue ], B) [ endValue, easing ], or C) [ endValue, startValue ] */
if (Type.isArray(valueData)) {
/* endValue is always the first item in the array. Don't bother validating endValue's value now
since the ensuing property cycling logic does that. */
endValue = valueData[0];
/* Two-item array format: If the second item is a number, function, or hex string, treat it as a
start value since easings can only be non-hex strings or arrays. */
if ((!Type.isArray(valueData[1]) && /^[\d-]/.test(valueData[1])) || Type.isFunction(valueData[1]) || CSS.RegEx.isHex.test(valueData[1])) {
startValue = valueData[1];
/* Two or three-item array: If the second item is a non-hex string or an array, treat it as an easing. */
} else if ((Type.isString(valueData[1]) && !CSS.RegEx.isHex.test(valueData[1])) || Type.isArray(valueData[1])) {
easing = skipResolvingEasing ? valueData[1] : getEasing(valueData[1], opts.duration);
/* Don't bother validating startValue's value now since the ensuing property cycling logic inherently does that. */
if (valueData[2] !== undefined) {
startValue = valueData[2];
}
}
/* Handle the single-value format. */
} else {
endValue = valueData;
}
/* Default to the call's easing if a per-property easing type was not defined. */
if (!skipResolvingEasing) {
easing = easing || opts.easing;
}
/* If functions were passed in as values, pass the function the current element as its context,
plus the element's index and the element set's size as arguments. Then, assign the returned value. */
if (Type.isFunction(endValue)) {
endValue = endValue.call(element, elementsIndex, elementsLength);
}
if (Type.isFunction(startValue)) {
startValue = startValue.call(element, elementsIndex, elementsLength);
}
/* Allow startValue to be left as undefined to indicate to the ensuing code that its value was not forcefed. */
return [ endValue || 0, easing, startValue ];
}
/* Cycle through each property in the map, looking for shorthand color properties (e.g. "color" as opposed to "colorRed"). Inject the corresponding
colorRed, colorGreen, and colorBlue RGB component tweens into the propertiesMap (which Velocity understands) and remove the shorthand property. */
$.each(propertiesMap, function(property, value) {
/* Find shorthand color properties that have been passed a hex string. */
if (RegExp("^" + CSS.Lists.colors.join("$|^") + "$").test(property)) {
/* Parse the value data for each shorthand. */
var valueData = parsePropertyValue(value, true),
endValue = valueData[0],
easing = valueData[1],
startValue = valueData[2];
if (CSS.RegEx.isHex.test(endValue)) {
/* Convert the hex strings into their RGB component arrays. */
var colorComponents = [ "Red", "Green", "Blue" ],
endValueRGB = CSS.Values.hexToRgb(endValue),
startValueRGB = startValue ? CSS.Values.hexToRgb(startValue) : undefined;
/* Inject the RGB component tweens into propertiesMap. */
for (var i = 0; i < colorComponents.length; i++) {
var dataArray = [ endValueRGB[i] ];
if (easing) {
dataArray.push(easing);
}
if (startValueRGB !== undefined) {
dataArray.push(startValueRGB[i]);
}
propertiesMap[property + colorComponents[i]] = dataArray;
}
/* Remove the intermediary shorthand property entry now that we've processed it. */
delete propertiesMap[property];
}
}
});
/* Create a tween out of each property, and append its associated data to tweensContainer. */
for (var property in propertiesMap) {
/**************************
Start Value Sourcing
**************************/
/* Parse out endValue, easing, and startValue from the property's data. */
var valueData = parsePropertyValue(propertiesMap[property]),
endValue = valueData[0],
easing = valueData[1],
startValue = valueData[2];
/* Now that the original property name's format has been used for the parsePropertyValue() lookup above,
we force the property to its camelCase styling to normalize it for manipulation. */
property = CSS.Names.camelCase(property);
/* In case this property is a hook, there are circumstances where we will intend to work on the hook's root property and not the hooked subproperty. */
var rootProperty = CSS.Hooks.getRoot(property),
rootPropertyValue = false;
/* Other than for the dummy tween property, properties that are not supported by the browser (and do not have an associated normalization) will
inherently produce no style changes when set, so they are skipped in order to decrease animation tick overhead.
Property support is determined via prefixCheck(), which returns a false flag when no supported is detected. */
/* Note: Since SVG elements have some of their properties directly applied as HTML attributes,
there is no way to check for their explicit browser support, and so we skip skip this check for them. */
if (!Data(element).isSVG && rootProperty !== "tween" && CSS.Names.prefixCheck(rootProperty)[1] === false && CSS.Normalizations.registered[rootProperty] === undefined) {
if (Velocity.debug) console.log("Skipping [" + rootProperty + "] due to a lack of browser support.");
continue;
}
/* If the display option is being set to a non-"none" (e.g. "block") and opacity (filter on IE<=8) is being
animated to an endValue of non-zero, the user's intention is to fade in from invisible, thus we forcefeed opacity
a startValue of 0 if its startValue hasn't already been sourced by value transferring or prior forcefeeding. */
if (((opts.display !== undefined && opts.display !== null && opts.display !== "none") || (opts.visibility !== undefined && opts.visibility !== "hidden")) && /opacity|filter/.test(property) && !startValue && endValue !== 0) {
startValue = 0;
}
/* If values have been transferred from the previous Velocity call, extract the endValue and rootPropertyValue
for all of the current call's properties that were *also* animated in the previous call. */
/* Note: Value transferring can optionally be disabled by the user via the _cacheValues option. */
if (opts._cacheValues && lastTweensContainer && lastTweensContainer[property]) {
if (startValue === undefined) {
startValue = lastTweensContainer[property].endValue + lastTweensContainer[property].unitType;
}
/* The previous call's rootPropertyValue is extracted from the element's data cache since that's the
instance of rootPropertyValue that gets freshly updated by the tweening process, whereas the rootPropertyValue
attached to the incoming lastTweensContainer is equal to the root property's value prior to any tweening. */
rootPropertyValue = Data(element).rootPropertyValueCache[rootProperty];
/* If values were not transferred from a previous Velocity call, query the DOM as needed. */
} else {
/* Handle hooked properties. */
if (CSS.Hooks.registered[property]) {
if (startValue === undefined) {
rootPropertyValue = CSS.getPropertyValue(element, rootProperty); /* GET */
/* Note: The following getPropertyValue() call does not actually trigger a DOM query;
getPropertyValue() will extract the hook from rootPropertyValue. */
startValue = CSS.getPropertyValue(element, property, rootPropertyValue);
/* If startValue is already defined via forcefeeding, do not query the DOM for the root property's value;
just grab rootProperty's zero-value template from CSS.Hooks. This overwrites the element's actual
root property value (if one is set), but this is acceptable since the primary reason users forcefeed is
to avoid DOM queries, and thus we likewise avoid querying the DOM for the root property's value. */
} else {
/* Grab this hook's zero-value template, e.g. "0px 0px 0px black". */
rootPropertyValue = CSS.Hooks.templates[rootProperty][1];
}
/* Handle non-hooked properties that haven't already been defined via forcefeeding. */
} else if (startValue === undefined) {
startValue = CSS.getPropertyValue(element, property); /* GET */
}
}
/**************************
Value Data Extraction
**************************/
var separatedValue,
endValueUnitType,
startValueUnitType,
operator = false;
/* Separates a property value into its numeric value and its unit type. */
function separateValue (property, value) {
var unitType,
numericValue;
numericValue = (value || "0")
.toString()
.toLowerCase()
/* Match the unit type at the end of the value. */
.replace(/[%A-z]+$/, function(match) {
/* Grab the unit type. */
unitType = match;
/* Strip the unit type off of value. */
return "";
});
/* If no unit type was supplied, assign one that is appropriate for this property (e.g. "deg" for rotateZ or "px" for width). */
if (!unitType) {
unitType = CSS.Values.getUnitType(property);
}
return [ numericValue, unitType ];
}
/* Separate startValue. */
separatedValue = separateValue(property, startValue);
startValue = separatedValue[0];
startValueUnitType = separatedValue[1];
/* Separate endValue, and extract a value operator (e.g. "+=", "-=") if one exists. */
separatedValue = separateValue(property, endValue);
endValue = separatedValue[0].replace(/^([+-\/*])=/, function(match, subMatch) {
operator = subMatch;
/* Strip the operator off of the value. */
return "";
});
endValueUnitType = separatedValue[1];
/* Parse float values from endValue and startValue. Default to 0 if NaN is returned. */
startValue = parseFloat(startValue) || 0;
endValue = parseFloat(endValue) || 0;
/***************************************
Property-Specific Value Conversion
***************************************/
/* Custom support for properties that don't actually accept the % unit type, but where pollyfilling is trivial and relatively foolproof. */
if (endValueUnitType === "%") {
/* A %-value fontSize/lineHeight is relative to the parent's fontSize (as opposed to the parent's dimensions),
which is identical to the em unit's behavior, so we piggyback off of that. */
if (/^(fontSize|lineHeight)$/.test(property)) {
/* Convert % into an em decimal value. */
endValue = endValue / 100;
endValueUnitType = "em";
/* For scaleX and scaleY, convert the value into its decimal format and strip off the unit type. */
} else if (/^scale/.test(property)) {
endValue = endValue / 100;
endValueUnitType = "";
/* For RGB components, take the defined percentage of 255 and strip off the unit type. */
} else if (/(Red|Green|Blue)$/i.test(property)) {
endValue = (endValue / 100) * 255;<|fim▁hole|> endValueUnitType = "";
}
}
/***************************
Unit Ratio Calculation
***************************/
/* When queried, the browser returns (most) CSS property values in pixels. Therefore, if an endValue with a unit type of
%, em, or rem is animated toward, startValue must be converted from pixels into the same unit type as endValue in order
for value manipulation logic (increment/decrement) to proceed. Further, if the startValue was forcefed or transferred
from a previous call, startValue may also not be in pixels. Unit conversion logic therefore consists of two steps:
1) Calculating the ratio of %/em/rem/vh/vw relative to pixels
2) Converting startValue into the same unit of measurement as endValue based on these ratios. */
/* Unit conversion ratios are calculated by inserting a sibling node next to the target node, copying over its position property,
setting values with the target unit type then comparing the returned pixel value. */
/* Note: Even if only one of these unit types is being animated, all unit ratios are calculated at once since the overhead
of batching the SETs and GETs together upfront outweights the potential overhead
of layout thrashing caused by re-querying for uncalculated ratios for subsequently-processed properties. */
/* Todo: Shift this logic into the calls' first tick instance so that it's synced with RAF. */
function calculateUnitRatios () {
/************************
Same Ratio Checks
************************/
/* The properties below are used to determine whether the element differs sufficiently from this call's
previously iterated element to also differ in its unit conversion ratios. If the properties match up with those
of the prior element, the prior element's conversion ratios are used. Like most optimizations in Velocity,
this is done to minimize DOM querying. */
var sameRatioIndicators = {
myParent: element.parentNode || document.body, /* GET */
position: CSS.getPropertyValue(element, "position"), /* GET */
fontSize: CSS.getPropertyValue(element, "fontSize") /* GET */
},
/* Determine if the same % ratio can be used. % is based on the element's position value and its parent's width and height dimensions. */
samePercentRatio = ((sameRatioIndicators.position === callUnitConversionData.lastPosition) && (sameRatioIndicators.myParent === callUnitConversionData.lastParent)),
/* Determine if the same em ratio can be used. em is relative to the element's fontSize. */
sameEmRatio = (sameRatioIndicators.fontSize === callUnitConversionData.lastFontSize);
/* Store these ratio indicators call-wide for the next element to compare against. */
callUnitConversionData.lastParent = sameRatioIndicators.myParent;
callUnitConversionData.lastPosition = sameRatioIndicators.position;
callUnitConversionData.lastFontSize = sameRatioIndicators.fontSize;
/***************************
Element-Specific Units
***************************/
/* Note: IE8 rounds to the nearest pixel when returning CSS values, thus we perform conversions using a measurement
of 100 (instead of 1) to give our ratios a precision of at least 2 decimal values. */
var measurement = 100,
unitRatios = {};
if (!sameEmRatio || !samePercentRatio) {
var dummy = Data(element).isSVG ? document.createElementNS("http://www.w3.org/2000/svg", "rect") : document.createElement("div");
Velocity.init(dummy);
sameRatioIndicators.myParent.appendChild(dummy);
/* To accurately and consistently calculate conversion ratios, the element's cascaded overflow and box-sizing are stripped.
Similarly, since width/height can be artificially constrained by their min-/max- equivalents, these are controlled for as well. */
/* Note: Overflow must be also be controlled for per-axis since the overflow property overwrites its per-axis values. */
$.each([ "overflow", "overflowX", "overflowY" ], function(i, property) {
Velocity.CSS.setPropertyValue(dummy, property, "hidden");
});
Velocity.CSS.setPropertyValue(dummy, "position", sameRatioIndicators.position);
Velocity.CSS.setPropertyValue(dummy, "fontSize", sameRatioIndicators.fontSize);
Velocity.CSS.setPropertyValue(dummy, "boxSizing", "content-box");
/* width and height act as our proxy properties for measuring the horizontal and vertical % ratios. */
$.each([ "minWidth", "maxWidth", "width", "minHeight", "maxHeight", "height" ], function(i, property) {
Velocity.CSS.setPropertyValue(dummy, property, measurement + "%");
});
/* paddingLeft arbitrarily acts as our proxy property for the em ratio. */
Velocity.CSS.setPropertyValue(dummy, "paddingLeft", measurement + "em");
/* Divide the returned value by the measurement to get the ratio between 1% and 1px. Default to 1 since working with 0 can produce Infinite. */
unitRatios.percentToPxWidth = callUnitConversionData.lastPercentToPxWidth = (parseFloat(CSS.getPropertyValue(dummy, "width", null, true)) || 1) / measurement; /* GET */
unitRatios.percentToPxHeight = callUnitConversionData.lastPercentToPxHeight = (parseFloat(CSS.getPropertyValue(dummy, "height", null, true)) || 1) / measurement; /* GET */
unitRatios.emToPx = callUnitConversionData.lastEmToPx = (parseFloat(CSS.getPropertyValue(dummy, "paddingLeft")) || 1) / measurement; /* GET */
sameRatioIndicators.myParent.removeChild(dummy);
} else {
unitRatios.emToPx = callUnitConversionData.lastEmToPx;
unitRatios.percentToPxWidth = callUnitConversionData.lastPercentToPxWidth;
unitRatios.percentToPxHeight = callUnitConversionData.lastPercentToPxHeight;
}
/***************************
Element-Agnostic Units
***************************/
/* Whereas % and em ratios are determined on a per-element basis, the rem unit only needs to be checked
once per call since it's exclusively dependant upon document.body's fontSize. If this is the first time
that calculateUnitRatios() is being run during this call, remToPx will still be set to its default value of null,
so we calculate it now. */
if (callUnitConversionData.remToPx === null) {
/* Default to browsers' default fontSize of 16px in the case of 0. */
callUnitConversionData.remToPx = parseFloat(CSS.getPropertyValue(document.body, "fontSize")) || 16; /* GET */
}
/* Similarly, viewport units are %-relative to the window's inner dimensions. */
if (callUnitConversionData.vwToPx === null) {
callUnitConversionData.vwToPx = parseFloat(window.innerWidth) / 100; /* GET */
callUnitConversionData.vhToPx = parseFloat(window.innerHeight) / 100; /* GET */
}
unitRatios.remToPx = callUnitConversionData.remToPx;
unitRatios.vwToPx = callUnitConversionData.vwToPx;
unitRatios.vhToPx = callUnitConversionData.vhToPx;
if (Velocity.debug >= 1) console.log("Unit ratios: " + JSON.stringify(unitRatios), element);
return unitRatios;
}
/********************
Unit Conversion
********************/
/* The * and / operators, which are not passed in with an associated unit, inherently use startValue's unit. Skip value and unit conversion. */
if (/[\/*]/.test(operator)) {
endValueUnitType = startValueUnitType;
/* If startValue and endValue differ in unit type, convert startValue into the same unit type as endValue so that if endValueUnitType
is a relative unit (%, em, rem), the values set during tweening will continue to be accurately relative even if the metrics they depend
on are dynamically changing during the course of the animation. Conversely, if we always normalized into px and used px for setting values, the px ratio
would become stale if the original unit being animated toward was relative and the underlying metrics change during the animation. */
/* Since 0 is 0 in any unit type, no conversion is necessary when startValue is 0 -- we just start at 0 with endValueUnitType. */
} else if ((startValueUnitType !== endValueUnitType) && startValue !== 0) {
/* Unit conversion is also skipped when endValue is 0, but *startValueUnitType* must be used for tween values to remain accurate. */
/* Note: Skipping unit conversion here means that if endValueUnitType was originally a relative unit, the animation won't relatively
match the underlying metrics if they change, but this is acceptable since we're animating toward invisibility instead of toward visibility,
which remains past the point of the animation's completion. */
if (endValue === 0) {
endValueUnitType = startValueUnitType;
} else {
/* By this point, we cannot avoid unit conversion (it's undesirable since it causes layout thrashing).
If we haven't already, we trigger calculateUnitRatios(), which runs once per element per call. */
elementUnitConversionData = elementUnitConversionData || calculateUnitRatios();
/* The following RegEx matches CSS properties that have their % values measured relative to the x-axis. */
/* Note: W3C spec mandates that all of margin and padding's properties (even top and bottom) are %-relative to the *width* of the parent element. */
var axis = (/margin|padding|left|right|width|text|word|letter/i.test(property) || /X$/.test(property) || property === "x") ? "x" : "y";
/* In order to avoid generating n^2 bespoke conversion functions, unit conversion is a two-step process:
1) Convert startValue into pixels. 2) Convert this new pixel value into endValue's unit type. */
switch (startValueUnitType) {
case "%":
/* Note: translateX and translateY are the only properties that are %-relative to an element's own dimensions -- not its parent's dimensions.
Velocity does not include a special conversion process to account for this behavior. Therefore, animating translateX/Y from a % value
to a non-% value will produce an incorrect start value. Fortunately, this sort of cross-unit conversion is rarely done by users in practice. */
startValue *= (axis === "x" ? elementUnitConversionData.percentToPxWidth : elementUnitConversionData.percentToPxHeight);
break;
case "px":
/* px acts as our midpoint in the unit conversion process; do nothing. */
break;
default:
startValue *= elementUnitConversionData[startValueUnitType + "ToPx"];
}
/* Invert the px ratios to convert into to the target unit. */
switch (endValueUnitType) {
case "%":
startValue *= 1 / (axis === "x" ? elementUnitConversionData.percentToPxWidth : elementUnitConversionData.percentToPxHeight);
break;
case "px":
/* startValue is already in px, do nothing; we're done. */
break;
default:
startValue *= 1 / elementUnitConversionData[endValueUnitType + "ToPx"];
}
}
}
/*********************
Relative Values
*********************/
/* Operator logic must be performed last since it requires unit-normalized start and end values. */
/* Note: Relative *percent values* do not behave how most people think; while one would expect "+=50%"
to increase the property 1.5x its current value, it in fact increases the percent units in absolute terms:
50 points is added on top of the current % value. */
switch (operator) {
case "+":
endValue = startValue + endValue;
break;
case "-":
endValue = startValue - endValue;
break;
case "*":
endValue = startValue * endValue;
break;
case "/":
endValue = startValue / endValue;
break;
}
/**************************
tweensContainer Push
**************************/
/* Construct the per-property tween object, and push it to the element's tweensContainer. */
tweensContainer[property] = {
rootPropertyValue: rootPropertyValue,
startValue: startValue,
currentValue: startValue,
endValue: endValue,
unitType: endValueUnitType,
easing: easing
};
if (Velocity.debug) console.log("tweensContainer (" + property + "): " + JSON.stringify(tweensContainer[property]), element);
}
/* Along with its property data, store a reference to the element itself onto tweensContainer. */
tweensContainer.element = element;
}
/*****************
Call Push
*****************/
/* Note: tweensContainer can be empty if all of the properties in this call's property map were skipped due to not
being supported by the browser. The element property is used for checking that the tweensContainer has been appended to. */
if (tweensContainer.element) {
/* Apply the "velocity-animating" indicator class. */
CSS.Values.addClass(element, "velocity-animating");
/* The call array houses the tweensContainers for each element being animated in the current call. */
call.push(tweensContainer);
/* Store the tweensContainer and options if we're working on the default effects queue, so that they can be used by the reverse command. */
if (opts.queue === "") {
Data(element).tweensContainer = tweensContainer;
Data(element).opts = opts;
}
/* Switch on the element's animating flag. */
Data(element).isAnimating = true;
/* Once the final element in this call's element set has been processed, push the call array onto
Velocity.State.calls for the animation tick to immediately begin processing. */
if (elementsIndex === elementsLength - 1) {
/* Add the current call plus its associated metadata (the element set and the call's options) onto the global call container.
Anything on this call container is subjected to tick() processing. */
Velocity.State.calls.push([ call, elements, opts, null, promiseData.resolver ]);
/* If the animation tick isn't running, start it. (Velocity shuts it off when there are no active calls to process.) */
if (Velocity.State.isTicking === false) {
Velocity.State.isTicking = true;
/* Start the tick loop. */
tick();
}
} else {
elementsIndex++;
}
}
}
/* When the queue option is set to false, the call skips the element's queue and fires immediately. */
if (opts.queue === false) {
/* Since this buildQueue call doesn't respect the element's existing queue (which is where a delay option would have been appended),
we manually inject the delay property here with an explicit setTimeout. */
if (opts.delay) {
setTimeout(buildQueue, opts.delay);
} else {
buildQueue();
}
/* Otherwise, the call undergoes element queueing as normal. */
/* Note: To interoperate with jQuery, Velocity uses jQuery's own $.queue() stack for queuing logic. */
} else {
$.queue(element, opts.queue, function(next, clearQueue) {
/* If the clearQueue flag was passed in by the stop command, resolve this call's promise. (Promises can only be resolved once,
so it's fine if this is repeatedly triggered for each element in the associated call.) */
if (clearQueue === true) {
if (promiseData.promise) {
promiseData.resolver(elements);
}
/* Do not continue with animation queueing. */
return true;
}
/* This flag indicates to the upcoming completeCall() function that this queue entry was initiated by Velocity.
See completeCall() for further details. */
Velocity.velocityQueueEntryFlag = true;
buildQueue(next);
});
}
/*********************
Auto-Dequeuing
*********************/
/* As per jQuery's $.queue() behavior, to fire the first non-custom-queue entry on an element, the element
must be dequeued if its queue stack consists *solely* of the current call. (This can be determined by checking
for the "inprogress" item that jQuery prepends to active queue stack arrays.) Regardless, whenever the element's
queue is further appended with additional items -- including $.delay()'s or even $.animate() calls, the queue's
first entry is automatically fired. This behavior contrasts that of custom queues, which never auto-fire. */
/* Note: When an element set is being subjected to a non-parallel Velocity call, the animation will not begin until
each one of the elements in the set has reached the end of its individually pre-existing queue chain. */
/* Note: Unfortunately, most people don't fully grasp jQuery's powerful, yet quirky, $.queue() function.
Lean more here: http://stackoverflow.com/questions/1058158/can-somebody-explain-jquery-queue-to-me */
if ((opts.queue === "" || opts.queue === "fx") && $.queue(element)[0] !== "inprogress") {
$.dequeue(element);
}
}
/**************************
Element Set Iteration
**************************/
/* If the "nodeType" property exists on the elements variable, we're animating a single element.
Place it in an array so that $.each() can iterate over it. */
$.each(elements, function(i, element) {
/* Ensure each element in a set has a nodeType (is a real element) to avoid throwing errors. */
if (Type.isNode(element)) {
processElement.call(element);
}
});
/******************
Option: Loop
******************/
/* The loop option accepts an integer indicating how many times the element should loop between the values in the
current call's properties map and the element's property values prior to this call. */
/* Note: The loop option's logic is performed here -- after element processing -- because the current call needs
to undergo its queue insertion prior to the loop option generating its series of constituent "reverse" calls,
which chain after the current call. Two reverse calls (two "alternations") constitute one loop. */
var opts = $.extend({}, Velocity.defaults, options),
reverseCallsCount;
opts.loop = parseInt(opts.loop);
reverseCallsCount = (opts.loop * 2) - 1;
if (opts.loop) {
/* Double the loop count to convert it into its appropriate number of "reverse" calls.
Subtract 1 from the resulting value since the current call is included in the total alternation count. */
for (var x = 0; x < reverseCallsCount; x++) {
/* Since the logic for the reverse action occurs inside Queueing and therefore this call's options object
isn't parsed until then as well, the current call's delay option must be explicitly passed into the reverse
call so that the delay logic that occurs inside *Pre-Queueing* can process it. */
var reverseOptions = {
delay: opts.delay,
progress: opts.progress
};
/* If a complete callback was passed into this call, transfer it to the loop redirect's final "reverse" call
so that it's triggered when the entire redirect is complete (and not when the very first animation is complete). */
if (x === reverseCallsCount - 1) {
reverseOptions.display = opts.display;
reverseOptions.visibility = opts.visibility;
reverseOptions.complete = opts.complete;
}
animate(elements, "reverse", reverseOptions);
}
}
/***************
Chaining
***************/
/* Return the elements back to the call chain, with wrapped elements taking precedence in case Velocity was called via the $.fn. extension. */
return getChain();
};
/* Turn Velocity into the animation function, extended with the pre-existing Velocity object. */
Velocity = $.extend(animate, Velocity);
/* For legacy support, also expose the literal animate method. */
Velocity.animate = animate;
/**************
Timing
**************/
/* Ticker function. */
var ticker = window.requestAnimationFrame || rAFShim;
/* Inactive browser tabs pause rAF, which results in all active animations immediately sprinting to their completion states when the tab refocuses.
To get around this, we dynamically switch rAF to setTimeout (which the browser *doesn't* pause) when the tab loses focus. We skip this for mobile
devices to avoid wasting battery power on inactive tabs. */
/* Note: Tab focus detection doesn't work on older versions of IE, but that's okay since they don't support rAF to begin with. */
if (!Velocity.State.isMobile && document.hidden !== undefined) {
document.addEventListener("visibilitychange", function() {
/* Reassign the rAF function (which the global tick() function uses) based on the tab's focus state. */
if (document.hidden) {
ticker = function(callback) {
/* The tick function needs a truthy first argument in order to pass its internal timestamp check. */
return setTimeout(function() { callback(true) }, 16);
};
/* The rAF loop has been paused by the browser, so we manually restart the tick. */
tick();
} else {
ticker = window.requestAnimationFrame || rAFShim;
}
});
}
/************
Tick
************/
/* Note: All calls to Velocity are pushed to the Velocity.State.calls array, which is fully iterated through upon each tick. */
function tick (timestamp) {
/* An empty timestamp argument indicates that this is the first tick occurence since ticking was turned on.
We leverage this metadata to fully ignore the first tick pass since RAF's initial pass is fired whenever
the browser's next tick sync time occurs, which results in the first elements subjected to Velocity
calls being animated out of sync with any elements animated immediately thereafter. In short, we ignore
the first RAF tick pass so that elements being immediately consecutively animated -- instead of simultaneously animated
by the same Velocity call -- are properly batched into the same initial RAF tick and consequently remain in sync thereafter. */
if (timestamp) {
/* We ignore RAF's high resolution timestamp since it can be significantly offset when the browser is
under high stress; we opt for choppiness over allowing the browser to drop huge chunks of frames. */
var timeCurrent = (new Date).getTime();
/********************
Call Iteration
********************/
var callsLength = Velocity.State.calls.length;
/* To speed up iterating over this array, it is compacted (falsey items -- calls that have completed -- are removed)
when its length has ballooned to a point that can impact tick performance. This only becomes necessary when animation
has been continuous with many elements over a long period of time; whenever all active calls are completed, completeCall() clears Velocity.State.calls. */
if (callsLength > 10000) {
Velocity.State.calls = compactSparseArray(Velocity.State.calls);
}
/* Iterate through each active call. */
for (var i = 0; i < callsLength; i++) {
/* When a Velocity call is completed, its Velocity.State.calls entry is set to false. Continue on to the next call. */
if (!Velocity.State.calls[i]) {
continue;
}
/************************
Call-Wide Variables
************************/
var callContainer = Velocity.State.calls[i],
call = callContainer[0],
opts = callContainer[2],
timeStart = callContainer[3],
firstTick = !!timeStart,
tweenDummyValue = null;
/* If timeStart is undefined, then this is the first time that this call has been processed by tick().
We assign timeStart now so that its value is as close to the real animation start time as possible.
(Conversely, had timeStart been defined when this call was added to Velocity.State.calls, the delay
between that time and now would cause the first few frames of the tween to be skipped since
percentComplete is calculated relative to timeStart.) */
/* Further, subtract 16ms (the approximate resolution of RAF) from the current time value so that the
first tick iteration isn't wasted by animating at 0% tween completion, which would produce the
same style value as the element's current value. */
if (!timeStart) {
timeStart = Velocity.State.calls[i][3] = timeCurrent - 16;
}
/* The tween's completion percentage is relative to the tween's start time, not the tween's start value
(which would result in unpredictable tween durations since JavaScript's timers are not particularly accurate).
Accordingly, we ensure that percentComplete does not exceed 1. */
var percentComplete = Math.min((timeCurrent - timeStart) / opts.duration, 1);
/**********************
Element Iteration
**********************/
/* For every call, iterate through each of the elements in its set. */
for (var j = 0, callLength = call.length; j < callLength; j++) {
var tweensContainer = call[j],
element = tweensContainer.element;
/* Check to see if this element has been deleted midway through the animation by checking for the
continued existence of its data cache. If it's gone, skip animating this element. */
if (!Data(element)) {
continue;
}
var transformPropertyExists = false;
/**********************************
Display & Visibility Toggling
**********************************/
/* If the display option is set to non-"none", set it upfront so that the element can become visible before tweening begins.
(Otherwise, display's "none" value is set in completeCall() once the animation has completed.) */
if (opts.display !== undefined && opts.display !== null && opts.display !== "none") {
if (opts.display === "flex") {
var flexValues = [ "-webkit-box", "-moz-box", "-ms-flexbox", "-webkit-flex" ];
$.each(flexValues, function(i, flexValue) {
CSS.setPropertyValue(element, "display", flexValue);
});
}
CSS.setPropertyValue(element, "display", opts.display);
}
/* Same goes with the visibility option, but its "none" equivalent is "hidden". */
if (opts.visibility !== undefined && opts.visibility !== "hidden") {
CSS.setPropertyValue(element, "visibility", opts.visibility);
}
/************************
Property Iteration
************************/
/* For every element, iterate through each property. */
for (var property in tweensContainer) {
/* Note: In addition to property tween data, tweensContainer contains a reference to its associated element. */
if (property !== "element") {
var tween = tweensContainer[property],
currentValue,
/* Easing can either be a pre-genereated function or a string that references a pre-registered easing
on the Velocity.Easings object. In either case, return the appropriate easing *function*. */
easing = Type.isString(tween.easing) ? Velocity.Easings[tween.easing] : tween.easing;
/******************************
Current Value Calculation
******************************/
/* If this is the last tick pass (if we've reached 100% completion for this tween),
ensure that currentValue is explicitly set to its target endValue so that it's not subjected to any rounding. */
if (percentComplete === 1) {
currentValue = tween.endValue;
/* Otherwise, calculate currentValue based on the current delta from startValue. */
} else {
var tweenDelta = tween.endValue - tween.startValue;
currentValue = tween.startValue + (tweenDelta * easing(percentComplete, opts, tweenDelta));
/* If no value change is occurring, don't proceed with DOM updating. */
if (!firstTick && (currentValue === tween.currentValue)) {
continue;
}
}
tween.currentValue = currentValue;
/* If we're tweening a fake 'tween' property in order to log transition values, update the one-per-call variable so that
it can be passed into the progress callback. */
if (property === "tween") {
tweenDummyValue = currentValue;
} else {
/******************
Hooks: Part I
******************/
/* For hooked properties, the newly-updated rootPropertyValueCache is cached onto the element so that it can be used
for subsequent hooks in this call that are associated with the same root property. If we didn't cache the updated
rootPropertyValue, each subsequent update to the root property in this tick pass would reset the previous hook's
updates to rootPropertyValue prior to injection. A nice performance byproduct of rootPropertyValue caching is that
subsequently chained animations using the same hookRoot but a different hook can use this cached rootPropertyValue. */
if (CSS.Hooks.registered[property]) {
var hookRoot = CSS.Hooks.getRoot(property),
rootPropertyValueCache = Data(element).rootPropertyValueCache[hookRoot];
if (rootPropertyValueCache) {
tween.rootPropertyValue = rootPropertyValueCache;
}
}
/*****************
DOM Update
*****************/
/* setPropertyValue() returns an array of the property name and property value post any normalization that may have been performed. */
/* Note: To solve an IE<=8 positioning bug, the unit type is dropped when setting a property value of 0. */
var adjustedSetData = CSS.setPropertyValue(element, /* SET */
property,
tween.currentValue + (parseFloat(currentValue) === 0 ? "" : tween.unitType),
tween.rootPropertyValue,
tween.scrollData);
/*******************
Hooks: Part II
*******************/
/* Now that we have the hook's updated rootPropertyValue (the post-processed value provided by adjustedSetData), cache it onto the element. */
if (CSS.Hooks.registered[property]) {
/* Since adjustedSetData contains normalized data ready for DOM updating, the rootPropertyValue needs to be re-extracted from its normalized form. ?? */
if (CSS.Normalizations.registered[hookRoot]) {
Data(element).rootPropertyValueCache[hookRoot] = CSS.Normalizations.registered[hookRoot]("extract", null, adjustedSetData[1]);
} else {
Data(element).rootPropertyValueCache[hookRoot] = adjustedSetData[1];
}
}
/***************
Transforms
***************/
/* Flag whether a transform property is being animated so that flushTransformCache() can be triggered once this tick pass is complete. */
if (adjustedSetData[0] === "transform") {
transformPropertyExists = true;
}
}
}
}
/****************
mobileHA
****************/
/* If mobileHA is enabled, set the translate3d transform to null to force hardware acceleration.
It's safe to override this property since Velocity doesn't actually support its animation (hooks are used in its place). */
if (opts.mobileHA) {
/* Don't set the null transform hack if we've already done so. */
if (Data(element).transformCache.translate3d === undefined) {
/* All entries on the transformCache object are later concatenated into a single transform string via flushTransformCache(). */
Data(element).transformCache.translate3d = "(0px, 0px, 0px)";
transformPropertyExists = true;
}
}
if (transformPropertyExists) {
CSS.flushTransformCache(element);
}
}
/* The non-"none" display value is only applied to an element once -- when its associated call is first ticked through.
Accordingly, it's set to false so that it isn't re-processed by this call in the next tick. */
if (opts.display !== undefined && opts.display !== "none") {
Velocity.State.calls[i][2].display = false;
}
if (opts.visibility !== undefined && opts.visibility !== "hidden") {
Velocity.State.calls[i][2].visibility = false;
}
/* Pass the elements and the timing data (percentComplete, msRemaining, timeStart, tweenDummyValue) into the progress callback. */
if (opts.progress) {
opts.progress.call(callContainer[1],
callContainer[1],
percentComplete,
Math.max(0, (timeStart + opts.duration) - timeCurrent),
timeStart,
tweenDummyValue);
}
/* If this call has finished tweening, pass its index to completeCall() to handle call cleanup. */
if (percentComplete === 1) {
completeCall(i);
}
}
}
/* Note: completeCall() sets the isTicking flag to false when the last call on Velocity.State.calls has completed. */
if (Velocity.State.isTicking) {
ticker(tick);
}
}
/**********************
Call Completion
**********************/
/* Note: Unlike tick(), which processes all active calls at once, call completion is handled on a per-call basis. */
function completeCall (callIndex, isStopped) {
/* Ensure the call exists. */
if (!Velocity.State.calls[callIndex]) {
return false;
}
/* Pull the metadata from the call. */
var call = Velocity.State.calls[callIndex][0],
elements = Velocity.State.calls[callIndex][1],
opts = Velocity.State.calls[callIndex][2],
resolver = Velocity.State.calls[callIndex][4];
var remainingCallsExist = false;
/*************************
Element Finalization
*************************/
for (var i = 0, callLength = call.length; i < callLength; i++) {
var element = call[i].element;
/* If the user set display to "none" (intending to hide the element), set it now that the animation has completed. */
/* Note: display:none isn't set when calls are manually stopped (via Velocity("stop"). */
/* Note: Display gets ignored with "reverse" calls and infinite loops, since this behavior would be undesirable. */
if (!isStopped && !opts.loop) {
if (opts.display === "none") {
CSS.setPropertyValue(element, "display", opts.display);
}
if (opts.visibility === "hidden") {
CSS.setPropertyValue(element, "visibility", opts.visibility);
}
}
/* If the element's queue is empty (if only the "inprogress" item is left at position 0) or if its queue is about to run
a non-Velocity-initiated entry, turn off the isAnimating flag. A non-Velocity-initiatied queue entry's logic might alter
an element's CSS values and thereby cause Velocity's cached value data to go stale. To detect if a queue entry was initiated by Velocity,
we check for the existence of our special Velocity.queueEntryFlag declaration, which minifiers won't rename since the flag
is assigned to jQuery's global $ object and thus exists out of Velocity's own scope. */
if (opts.loop !== true && ($.queue(element)[1] === undefined || !/\.velocityQueueEntryFlag/i.test($.queue(element)[1]))) {
/* The element may have been deleted. Ensure that its data cache still exists before acting on it. */
if (Data(element)) {
Data(element).isAnimating = false;
/* Clear the element's rootPropertyValueCache, which will become stale. */
Data(element).rootPropertyValueCache = {};
var transformHAPropertyExists = false;
/* If any 3D transform subproperty is at its default value (regardless of unit type), remove it. */
$.each(CSS.Lists.transforms3D, function(i, transformName) {
var defaultValue = /^scale/.test(transformName) ? 1 : 0,
currentValue = Data(element).transformCache[transformName];
if (Data(element).transformCache[transformName] !== undefined && new RegExp("^\\(" + defaultValue + "[^.]").test(currentValue)) {
transformHAPropertyExists = true;
delete Data(element).transformCache[transformName];
}
});
/* Mobile devices have hardware acceleration removed at the end of the animation in order to avoid hogging the GPU's memory. */
if (opts.mobileHA) {
transformHAPropertyExists = true;
delete Data(element).transformCache.translate3d;
}
/* Flush the subproperty removals to the DOM. */
if (transformHAPropertyExists) {
CSS.flushTransformCache(element);
}
/* Remove the "velocity-animating" indicator class. */
CSS.Values.removeClass(element, "velocity-animating");
}
}
/*********************
Option: Complete
*********************/
/* Complete is fired once per call (not once per element) and is passed the full raw DOM element set as both its context and its first argument. */
/* Note: Callbacks aren't fired when calls are manually stopped (via Velocity("stop"). */
if (!isStopped && opts.complete && !opts.loop && (i === callLength - 1)) {
/* We throw callbacks in a setTimeout so that thrown errors don't halt the execution of Velocity itself. */
try {
opts.complete.call(elements, elements);
} catch (error) {
setTimeout(function() { throw error; }, 1);
}
}
/**********************
Promise Resolving
**********************/
/* Note: Infinite loops don't return promises. */
if (resolver && opts.loop !== true) {
resolver(elements);
}
/****************************
Option: Loop (Infinite)
****************************/
if (opts.loop === true && !isStopped) {
/* If a rotateX/Y/Z property is being animated to 360 deg with loop:true, swap tween start/end values to enable
continuous iterative rotation looping. (Otherise, the element would just rotate back and forth.) */
$.each(Data(element).tweensContainer, function(propertyName, tweenContainer) {
if (/^rotate/.test(propertyName) && parseFloat(tweenContainer.endValue) === 360) {
tweenContainer.endValue = 0;
tweenContainer.startValue = 360;
}
if (/^backgroundPosition/.test(propertyName) && parseFloat(tweenContainer.endValue) === 100 && tweenContainer.unitType === "%") {
tweenContainer.endValue = 0;
tweenContainer.startValue = 100;
}
});
Velocity(element, "reverse", { loop: true, delay: opts.delay });
}
/***************
Dequeueing
***************/
/* Fire the next call in the queue so long as this call's queue wasn't set to false (to trigger a parallel animation),
which would have already caused the next call to fire. Note: Even if the end of the animation queue has been reached,
$.dequeue() must still be called in order to completely clear jQuery's animation queue. */
if (opts.queue !== false) {
$.dequeue(element, opts.queue);
}
}
/************************
Calls Array Cleanup
************************/
/* Since this call is complete, set it to false so that the rAF tick skips it. This array is later compacted via compactSparseArray().
(For performance reasons, the call is set to false instead of being deleted from the array: http://www.html5rocks.com/en/tutorials/speed/v8/) */
Velocity.State.calls[callIndex] = false;
/* Iterate through the calls array to determine if this was the final in-progress animation.
If so, set a flag to end ticking and clear the calls array. */
for (var j = 0, callsLength = Velocity.State.calls.length; j < callsLength; j++) {
if (Velocity.State.calls[j] !== false) {
remainingCallsExist = true;
break;
}
}
if (remainingCallsExist === false) {
/* tick() will detect this flag upon its next iteration and subsequently turn itself off. */
Velocity.State.isTicking = false;
/* Clear the calls array so that its length is reset. */
delete Velocity.State.calls;
Velocity.State.calls = [];
}
}
/******************
Frameworks
******************/
/* Both jQuery and Zepto allow their $.fn object to be extended to allow wrapped elements to be subjected to plugin calls.
If either framework is loaded, register a "velocity" extension pointing to Velocity's core animate() method. Velocity
also registers itself onto a global container (window.jQuery || window.Zepto || window) so that certain features are
accessible beyond just a per-element scope. This master object contains an .animate() method, which is later assigned to $.fn
(if jQuery or Zepto are present). Accordingly, Velocity can both act on wrapped DOM elements and stand alone for targeting raw DOM elements. */
global.Velocity = Velocity;
if (global !== window) {
/* Assign the element function to Velocity's core animate() method. */
global.fn.velocity = animate;
/* Assign the object function's defaults to Velocity's global defaults object. */
global.fn.velocity.defaults = Velocity.defaults;
}
/***********************
Packaged Redirects
***********************/
/* slideUp, slideDown */
$.each([ "Down", "Up" ], function(i, direction) {
Velocity.Redirects["slide" + direction] = function (element, options, elementsIndex, elementsSize, elements, promiseData) {
var opts = $.extend({}, options),
begin = opts.begin,
complete = opts.complete,
computedValues = { height: "", marginTop: "", marginBottom: "", paddingTop: "", paddingBottom: "" },
inlineValues = {};
if (opts.display === undefined) {
/* Show the element before slideDown begins and hide the element after slideUp completes. */
/* Note: Inline elements cannot have dimensions animated, so they're reverted to inline-block. */
opts.display = (direction === "Down" ? (Velocity.CSS.Values.getDisplayType(element) === "inline" ? "inline-block" : "block") : "none");
}
opts.begin = function() {
/* If the user passed in a begin callback, fire it now. */
begin && begin.call(elements, elements);
/* Cache the elements' original vertical dimensional property values so that we can animate back to them. */
for (var property in computedValues) {
inlineValues[property] = element.style[property];
/* For slideDown, use forcefeeding to animate all vertical properties from 0. For slideUp,
use forcefeeding to start from computed values and animate down to 0. */
var propertyValue = Velocity.CSS.getPropertyValue(element, property);
computedValues[property] = (direction === "Down") ? [ propertyValue, 0 ] : [ 0, propertyValue ];
}
/* Force vertical overflow content to clip so that sliding works as expected. */
inlineValues.overflow = element.style.overflow;
element.style.overflow = "hidden";
}
opts.complete = function() {
/* Reset element to its pre-slide inline values once its slide animation is complete. */
for (var property in inlineValues) {
element.style[property] = inlineValues[property];
}
/* If the user passed in a complete callback, fire it now. */
complete && complete.call(elements, elements);
promiseData && promiseData.resolver(elements);
};
Velocity(element, computedValues, opts);
};
});
/* fadeIn, fadeOut */
$.each([ "In", "Out" ], function(i, direction) {
Velocity.Redirects["fade" + direction] = function (element, options, elementsIndex, elementsSize, elements, promiseData) {
var opts = $.extend({}, options),
propertiesMap = { opacity: (direction === "In") ? 1 : 0 },
originalComplete = opts.complete;
/* Since redirects are triggered individually for each element in the animated set, avoid repeatedly triggering
callbacks by firing them only when the final element has been reached. */
if (elementsIndex !== elementsSize - 1) {
opts.complete = opts.begin = null;
} else {
opts.complete = function() {
if (originalComplete) {
originalComplete.call(elements, elements);
}
promiseData && promiseData.resolver(elements);
}
}
/* If a display was passed in, use it. Otherwise, default to "none" for fadeOut or the element-specific default for fadeIn. */
/* Note: We allow users to pass in "null" to skip display setting altogether. */
if (opts.display === undefined) {
opts.display = (direction === "In" ? "auto" : "none");
}
Velocity(this, propertiesMap, opts);
};
});
return Velocity;
}((window.jQuery || window.Zepto || window), window, document);
}));
/******************
Known Issues
******************/
/* The CSS spec mandates that the translateX/Y/Z transforms are %-relative to the element itself -- not its parent.
Velocity, however, doesn't make this distinction. Thus, converting to or from the % unit with these subproperties
will produce an inaccurate conversion value. The same issue exists with the cx/cy attributes of SVG circles and ellipses. */<|fim▁end|>
| |
<|file_name|>day09.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
""" 2018 AOC Day 09 """
import argparse
import typing
import unittest
class Node(object):
''' Class representing node in cyclic linked list '''
def __init__(self, prev: 'Node', next: 'Node', value: int):
''' Create a node with explicit parameters '''
self._prev = prev
self._next = next
self._value = value
@staticmethod
def default() -> 'Node':
''' Create a node linked to itself with value 0 '''<|fim▁hole|> return node
def forward(self, n: int = 1) -> 'Node':
''' Go forward n nodes '''
current = self
for _ in range(n):
current = current._next
return current
def back(self, n: int = 1) -> 'Node':
''' Go backward n nodes '''
current = self
for _ in range(n):
current = current._prev
return current
def insert(self, value: int) -> 'Node':
''' Insert new node after current node with given value, and return newly inserted Node '''
new_node = Node(self, self._next, value)
self._next._prev = new_node
self._next = new_node
return self._next
def remove(self) -> 'Node':
''' Remove current Node and return the following Node '''
self._prev._next = self._next
self._next._prev = self._prev
return self._next
def value(self) -> int:
''' Get value '''
return self._value
def chain_values(self):
values = [self.value()]
current = self.forward()
while current != self:
values.append(current.value())
current = current.forward()
return values
def part1(nplayers: int, highest_marble: int) -> int:
""" Solve part 1 """
current = Node.default()
player = 0
scores = {p: 0 for p in range(nplayers)}
for idx in range(1, highest_marble + 1):
if idx % 23 == 0:
scores[player] += idx
current = current.back(7)
scores[player] += current.value()
current = current.remove()
else:
current = current.forward().insert(idx)
player = (player + 1) % nplayers
return max(scores.values())
def part2(nplayers: int, highest_node: int) -> int:
""" Solve part 2 """
return part1(nplayers, highest_node)
def main():
""" Run 2018 Day 09 """
parser = argparse.ArgumentParser(description='Advent of Code 2018 Day 09')
parser.add_argument('nplayers', type=int, help='# of players')
parser.add_argument(
'highest_marble',
type=int,
help='highest-valued marble',
)
opts = parser.parse_args()
print('Part 1:', part1(opts.nplayers, opts.highest_marble))
print('Part 2:', part2(opts.nplayers, opts.highest_marble * 100))
if __name__ == '__main__':
main()
class ExampleTest(unittest.TestCase):
def test_part1(self):
examples = {
(9, 25): 32,
(10, 1618): 8317,
(13, 7999): 146373,
(17, 1104): 2764,
(21, 6111): 54718,
(30, 5807): 37305,
}
for example, expected in examples.items():
self.assertEqual(part1(*example), expected)<|fim▁end|>
|
node = Node(None, None, 0) # type: ignore
node._prev = node
node._next = node
|
<|file_name|>string.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2016 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:<|fim▁hole|> * copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/// Convert a snake case string to a camel case.
pub fn snake_to_camel(string: &str) -> String {
let mut chars = string.chars();
let string =
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
};
let mut camel = String::new();
let mut underscore = false;
for character in string.chars() {
if character == '_' {
underscore = true;
}
else {
if underscore {
camel.push_str(&character.to_uppercase().collect::<String>());
}
else {
camel.push(character);
}
underscore = false;
}
}
camel
}
/// Transform a camel case command name to its dashed version.
/// WinOpen is transformed to win-open.
pub fn to_dash_name(name: &str) -> String {
let mut result = String::new();
for (index, character) in name.chars().enumerate() {
let string: String = character.to_lowercase().collect();
if character.is_uppercase() && index > 0 {
result.push('-');
}
result.push_str(&string);
}
result
}<|fim▁end|>
|
*
* The above copyright notice and this permission notice shall be included in all
|
<|file_name|>AVComponentCollection.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> * The number of items in the collection.
*/
public readonly length: number;
/**
* Return the item at position index in the collection.
*
* @param index The index of the item to be returned
*
* @see http://www.oipf.tv/docs/OIPF-T1-R1-Specification-Volume-5-Declarative-Application-Environment-v1_2-2012-09-19.PDF#page=211
*/
item( index: number ): OIPF.AVComponent;
}
}<|fim▁end|>
|
declare namespace OIPF {
export class AVComponentCollection {
/**
|
<|file_name|>component_jni_registrar.cc<|end_file_name|><|fim▁begin|>// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/spellcheck/browser/android/component_jni_registrar.h"
#include "base/android/jni_android.h"<|fim▁hole|>
namespace android {
static base::android::RegistrationMethod kSpellcheckRegisteredMethods[] = {
{"SpellCheckerSessionBridge", SpellCheckerSessionBridge::RegisterJNI},
};
bool RegisterSpellcheckJni(JNIEnv* env) {
return base::android::RegisterNativeMethods(
env, kSpellcheckRegisteredMethods,
std::size(kSpellcheckRegisteredMethods));
}
} // namespace android
} // namespace spellcheck<|fim▁end|>
|
#include "base/android/jni_registrar.h"
#include "components/spellcheck/browser/spellchecker_session_bridge_android.h"
namespace spellcheck {
|
<|file_name|>_configuration.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from ._version import VERSION
class SynapseClientConfiguration(Configuration):
"""Configuration for SynapseClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: azure.core.credentials.TokenCredential
:param synapse_dns_suffix: Gets the DNS suffix used as the base for all Synapse service requests.
:type synapse_dns_suffix: str
:param livy_api_version: Valid api-version for the request.
:type livy_api_version: str
"""
def __init__(
self,
credential, # type: "TokenCredential"
synapse_dns_suffix="dev.azuresynapse.net", # type: str
livy_api_version="2019-11-01-preview", # type: str
**kwargs # type: Any
):
# type: (...) -> None
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if synapse_dns_suffix is None:
raise ValueError("Parameter 'synapse_dns_suffix' must not be None.")
if livy_api_version is None:
raise ValueError("Parameter 'livy_api_version' must not be None.")
super(SynapseClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.synapse_dns_suffix = synapse_dns_suffix
self.livy_api_version = livy_api_version
self.api_version = "2019-11-01-preview"
self.credential_scopes = ['https://dev.azuresynapse.net/.default']
self._configure(**kwargs)
self.user_agent_policy.add_user_agent('azsdk-python-synapseclient/{}'.format(VERSION))
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)<|fim▁hole|><|fim▁end|>
|
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
|
<|file_name|>cite.js<|end_file_name|><|fim▁begin|>// Script by Bo Tranberg
// http://botranberg.dk
// https://github.com/tranberg/citations
//
// This script requires jQuery and jQuery UI
$(function() {
// Inser html for dialog just before the button to open it
var butt = document.getElementById('citations');
butt.insertAdjacentHTML('beforeBegin',
'\
<div id="dialog" title="Cite this paper" style="text-align:left"> \
<p style="text-align: center;"><b>Copy and paste one of the formatted citations into your bibliography manager.</b></p> \
<table style="border-collapse:separate; border-spacing:2em"> \
<tr style="vertical-align:top;"> \
<td><strong>APA</strong></td> \
<td><span id="APA1"></span><span id="APA2"></span><span id="APA3"></span><span id="APA4" style="font-style: italic"></span></td> \
</tr> \
<tr style="vertical-align:top;"> \
<td><strong>Bibtex</strong></td> \
<td> \
@article{<span id="bibtag"></span>,<br> \
title={<span id="bibtitle"></span>},<br> \
author={<span id="bibauthor"></span>},<br> \
journal={<span id="bibjournal"></span>},<br> \
year={<span id="bibyear"></span>},<br> \
url={<span id="biburl"></span>},<br> \
} \
</td> \
</tr> \
</table> \
</div>');
// Definitions of citations dialog
$("#dialog").dialog({
autoOpen: false,
show: {
effect: "fade",
duration: 200
},
hide: {
effect: "fade",
duration: 200
},
maxWidth:600,
maxHeight: 600,
width: 660,
height: 400,
modal: true,
});
// Open citation dialog on click
$("#citations").click(function() {
$("#dialog").dialog("open");
});
// Find authors
var metas = document.getElementsByTagName('meta');
var author = ''
<|fim▁hole|> numAuthors += 1
};
};
// Build a string of authors for Bibtex
var authorIndex = 0
for (i=0; i<metas.length; i++) {
if (metas[i].getAttribute("name") == "citation_author") {
authorIndex += 1
if (authorIndex>1) {
if (authorIndex<=numAuthors) {
author = author+' and '
};
};
author = author+metas[i].getAttribute("content")
};
};
// Populate formatted citations in Bibtex
var title = $("meta[name='citation_title']").attr('content')
// The following test might seem stupid, but it's needed because some php function at OpenPsych appends two whitespaces to the start of the title in the meta data
if (title[1] == ' ') {
title = title.slice(2)
};
var journal = $("meta[name='citation_journal_title']").attr('content')
var pubyear = $("meta[name='citation_publication_date']").attr('content').substring(0,4)
var puburl = document.URL
// Build a string for the Bibtex tag
if (author.indexOf(',') < author.indexOf(' ')) {
var firstAuthor = author.substr(0,author.indexOf(','));
} else {
var firstAuthor = author.substr(0,author.indexOf(' '));
};
if (title.indexOf(',')<title.indexOf('0')) {
var startTitle = title.substr(0,title.indexOf(','));
} else {
var startTitle = title.substr(0,title.indexOf(' '));
};
$('#bibtag').html(firstAuthor+pubyear)
$('#bibtitle').html(title)
$('#bibauthor').html(author)
$('#bibjournal').html(journal)
$('#bibyear').html(pubyear)
$('#biburl').html(puburl)
//Build a string of authors for APA
var author = ''
var authorIndex = 0
for (i=0; i<metas.length; i++) {
if (metas[i].getAttribute("name") == "citation_author") {
authorIndex += 1
if (authorIndex>1) {
if (authorIndex<numAuthors) {
author = author+', '
};
};
if (authorIndex>1) {
if (authorIndex===numAuthors) {
author = author+', & '
};
};
// Check if author only has a single name
if (metas[i].getAttribute("content").indexOf(', ')>0) {
// Append author string with the surnames and first letter of next author's name
author = author+metas[i].getAttribute("content").substr(0,metas[i].getAttribute("content").indexOf(', ')+3)+'.'
// If the author has several names, append the first letter of these to the string
if (metas[i].getAttribute("content").indexOf(', ') < metas[i].getAttribute("content").lastIndexOf(' ')-1) {
var extraNames = metas[i].getAttribute("content").substr(metas[i].getAttribute("content").indexOf(', ')+2)
var addNames = extraNames.substr(extraNames.indexOf(' '))
author = author+addNames.substr(addNames.indexOf(' '))
};
} else {
author = author+metas[i].getAttribute("content")
};
};
};
// Populate formatted citations in APA
$('#APA1').html(author)
$('#APA2').html(' ('+pubyear+').')
$('#APA3').html(' '+title+'.')
$('#APA4').html(' '+journal+'.')
});<|fim▁end|>
|
// Determine number of authors
var numAuthors = 0
for (i=0; i<metas.length; i++) {
if (metas[i].getAttribute("name") == "citation_author") {
|
<|file_name|>example_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 someonegg. All rights reserscoreed.
// Use of this source code is goscoreerned by a BSD-style
// license that can be found in the LICENSE file.
package skiplist_test
import (
"fmt"
"github.com/someonegg/gocontainer/skiplist"<|fim▁hole|>
type item struct {
score int
// other...
}
func itemCompare(l, r skiplist.Scorable) int {
scoreL := int(0)
switch v := l.(type) {
case int:
scoreL = v
case *item:
scoreL = v.score
default:
panic(l)
}
scoreR := int(0)
switch v := r.(type) {
case int:
scoreR = v
case *item:
scoreR = v.score
default:
panic(r)
}
return scoreL - scoreR
}
func Example() {
//rand.Seed(time.Now().Unix())
// An empty skiplist and put some numbers in it.
l := skiplist.NewList(itemCompare)
// add and print rank
e1 := l.Add(&item{score: 4})
fmt.Println(l.Rank(e1))
e2 := l.Add(&item{score: 1})
fmt.Println(l.Rank(e2))
e3 := l.Add(&item{score: 3})
fmt.Println(l.Rank(e3))
e4 := l.Add(&item{score: 2})
fmt.Println(l.Rank(e4))
e5 := l.Add(&item{score: 4})
fmt.Println(l.Rank(e5))
e6 := l.Add(&item{score: 6})
fmt.Println(l.Rank(e6))
fmt.Println()
// print list after add.
for e := l.Front(); e != nil; e = e.Next() {
fmt.Println(e.Value.(*item).score)
}
fmt.Println()
l.Remove(l.Find(6))
l.Remove(l.Get(4))
// print list after remoscoree.
for e := l.Front(); e != nil; e = e.Next() {
fmt.Println(e.Value.(*item).score)
}
// Output:
// 0
// 0
// 1
// 1
// 3
// 5
//
// 1
// 2
// 3
// 4
// 4
// 6
//
// 1
// 2
// 3
// 4
}<|fim▁end|>
|
//"math/rand"
//"time"
)
|
<|file_name|>JSONWorldDataHelper.java<|end_file_name|><|fim▁begin|>// --------------------------------------------------------------------------------------------------
// Copyright (c) 2016 Microsoft Corporation
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files (the "Software"), to deal in the Software without restriction,
// including without limitation the rights to use, copy, modify, merge, publish, distribute,
// sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
// NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// --------------------------------------------------------------------------------------------------
package com.microsoft.Malmo.Utils;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.stats.StatBase;
import net.minecraft.stats.StatFileWriter;
import net.minecraft.stats.StatList;
import net.minecraft.util.BlockPos;
import net.minecraft.util.ResourceLocation;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
/**
* Helper class for building the "World data" to be passed from Minecraft back to the agent.<br>
* This class contains helper methods to build up a JSON tree of useful information, such as health, XP, food levels, distance travelled, etc.etc.<br>
* It can also build up a grid of the block types around the player.
* Call this on the Server side only.
*/
public class JSONWorldDataHelper
{
/**
* Simple class to hold the dimensions of the environment around the player
* that we want to return in the World Data.<br>
* Min and max define an inclusive range, where the player's feet are situated at (0,0,0)
*/
static public class ImmediateEnvironmentDimensions {
public int xMin;
public int xMax;
public int yMin;
public int yMax;
public int zMin;
public int zMax;
/**
* Default constructor asks for an environment just big enough to contain
* the player and one block all around him.
*/
public ImmediateEnvironmentDimensions() {
this.xMin = -1; this.xMax = 1;
this.zMin = -1; this.zMax = 1;
this.yMin = -1; this.yMax = 2;
}
/**
* Convenient constructor - effectively specifies the margin around the player<br>
* Passing (1,1,1) will have the same effect as the default constructor.
* @param xMargin number of blocks to the left and right of the player
* @param yMargin number of blocks above and below player
* @param zMargin number of blocks in front of and behind player
*/
public ImmediateEnvironmentDimensions(int xMargin, int yMargin, int zMargin) {
this.xMin = -xMargin; this.xMax = xMargin;
this.yMin = -yMargin; this.yMax = yMargin + 1; // +1 because the player is two blocks tall.
this.zMin = -zMargin; this.zMax = zMargin;
}
/**
* Convenient constructor for the case where all that is required is the flat patch of ground<br>
* around the player's feet.
* @param xMargin number of blocks around the player in the x-axis
* @param zMargin number of blocks around the player in the z-axis
*/
public ImmediateEnvironmentDimensions(int xMargin, int zMargin) {
this.xMin = -xMargin; this.xMax = xMargin;
this.yMin = -1; this.yMax = -1; // Flat patch of ground at the player's feet.
this.zMin = -zMargin; this.zMax = zMargin;
}
};
/** Builds the basic achievement world data to be used as observation signals by the listener.
* @param json a JSON object into which the achievement stats will be added.
*/
public static void buildAchievementStats(JsonObject json, EntityPlayerMP player)
{
StatFileWriter sfw = player.getStatFile();
json.addProperty("DistanceTravelled",
sfw.readStat((StatBase)StatList.distanceWalkedStat) <|fim▁hole|> + sfw.readStat((StatBase)StatList.distanceDoveStat)
+ sfw.readStat((StatBase)StatList.distanceFallenStat)
); // TODO: there are many other ways of moving!
json.addProperty("TimeAlive", sfw.readStat((StatBase)StatList.timeSinceDeathStat));
json.addProperty("MobsKilled", sfw.readStat((StatBase)StatList.mobKillsStat));
json.addProperty("DamageTaken", sfw.readStat((StatBase)StatList.damageTakenStat));
/* Other potential reinforcement signals that may be worth researching:
json.addProperty("BlocksDestroyed", sfw.readStat((StatBase)StatList.objectBreakStats) - but objectBreakStats is an array of 32000 StatBase objects - indexed by block type.);
json.addProperty("Blocked", ev.player.isMovementBlocked()) - but isMovementBlocker() is a protected method (can get round this with reflection)
*/
}
/** Builds the basic life world data to be used as observation signals by the listener.
* @param json a JSON object into which the life stats will be added.
*/
public static void buildLifeStats(JsonObject json, EntityPlayerMP player)
{
json.addProperty("Life", player.getHealth());
json.addProperty("Score", player.getScore()); // Might always be the same as XP?
json.addProperty("Food", player.getFoodStats().getFoodLevel());
json.addProperty("XP", player.experienceTotal);
json.addProperty("IsAlive", !player.isDead);
json.addProperty("Air", player.getAir());
}
/** Builds the player position data to be used as observation signals by the listener.
* @param json a JSON object into which the positional information will be added.
*/
public static void buildPositionStats(JsonObject json, EntityPlayerMP player)
{
json.addProperty("XPos", player.posX);
json.addProperty("YPos", player.posY);
json.addProperty("ZPos", player.posZ);
json.addProperty("Pitch", player.rotationPitch);
json.addProperty("Yaw", player.rotationYaw);
}
/**
* Build a signal for the cubic block grid centred on the player.<br>
* Default is 3x3x4. (One cube all around the player.)<br>
* Blocks are returned as a 1D array, in order
* along the x, then z, then y axes.<br>
* Data will be returned in an array called "Cells"
* @param json a JSON object into which the info for the object under the mouse will be added.
* @param environmentDimensions object which specifies the required dimensions of the grid to be returned.
* @param jsonName name to use for identifying the returned JSON array.
*/
public static void buildGridData(JsonObject json, ImmediateEnvironmentDimensions environmentDimensions, EntityPlayerMP player, String jsonName)
{
if (player == null || json == null)
return;
JsonArray arr = new JsonArray();
BlockPos pos = player.getPosition();
for (int y = environmentDimensions.yMin; y <= environmentDimensions.yMax; y++)
{
for (int z = environmentDimensions.zMin; z <= environmentDimensions.zMax; z++)
{
for (int x = environmentDimensions.xMin; x <= environmentDimensions.xMax; x++)
{
BlockPos p = pos.add(x, y, z);
String name = "";
IBlockState state = player.worldObj.getBlockState(p);
Object blockName = Block.blockRegistry.getNameForObject(state.getBlock());
if (blockName instanceof ResourceLocation)
{
name = ((ResourceLocation)blockName).getResourcePath();
}
JsonElement element = new JsonPrimitive(name);
arr.add(element);
}
}
}
json.add(jsonName, arr);
}
}<|fim▁end|>
|
+ sfw.readStat((StatBase)StatList.distanceSwumStat)
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
install_requires = [<|fim▁hole|>
setup(
name='linkedin-auth',
version='0.1',
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app for linkedin authentcation.',
long_description=README,
url='https://github.com/technoarch-softwares/linkedin-auth',
author='Pankul Mittal',
author_email='[email protected]',
install_requires = install_requires,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8', # replace "X.Y" as appropriate
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License', # example license
'Operating System :: OS Independent',
'Programming Language :: Python',
# Replace these appropriately if you are stuck on Python 2.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe=False,
)<|fim▁end|>
|
'requests==2.8.1'
]
|
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015-2020 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use std::fmt::Write;
use toml::Value;
use anyhow::Result;
#[cfg(feature = "early-panic")]
#[macro_export]
macro_rules! if_cfg_panic {
() => { panic!() };
($msg:expr) => { panic!($msg) };
($fmt:expr, $($arg:tt)+) => { panic!($fmt, $($arg),+) };
}
#[cfg(not(feature = "early-panic"))]
#[macro_export]
macro_rules! if_cfg_panic {
() => { };<|fim▁hole|>
pub fn entry_buffer_to_header_content(buf: &str) -> Result<(Value, String)> {
debug!("Building entry from string");
let mut header = String::new();
let mut content = String::new();
let mut header_consumed = false;
let mut iter = buf.split('\n').skip(1).peekable(); // the first line is "---"
while let Some(line) = iter.next() {
if line == "---" && !header_consumed {
header_consumed = true;
// do not further process the line
} else if !header_consumed {
writeln!(header, "{}", line)?;
} else if iter.peek().is_some() {
writeln!(content, "{}", line)?;
} else {
write!(content, "{}", line)?;
}
}
let h = ::toml::de::from_str(&header)?;
Ok((h, content))
}
#[cfg(test)]
mod test {
extern crate env_logger;
use super::entry_buffer_to_header_content;
fn setup_logging() {
let _ = env_logger::try_init();
}
fn mkfile(content: &str) -> String {
format!(r#"---
[imag]
version = '{version}'
---
{content}"#, version = env!("CARGO_PKG_VERSION"), content = content)
}
#[test]
fn test_entry_buffer_to_header_content_1() {
let content = "Hai";
let file = format!(r#"---
[imag]
version = '{version}'
---
{content}"#, version = env!("CARGO_PKG_VERSION"), content = content);
let res = entry_buffer_to_header_content(&file);
assert!(res.is_ok());
let (_, res_content) = res.unwrap();
assert_eq!(res_content, content)
}
#[test]
fn test_entry_buffer_to_header_content_2() {
setup_logging();
let content = r#"Hai
"#;
let file = mkfile(&content);
debug!("FILE: <<<{}>>>", file);
let res = entry_buffer_to_header_content(&file);
assert!(res.is_ok());
let (_, res_content) = res.unwrap();
debug!("CONTENT: <<<{}>>>", res_content);
assert_eq!(res_content, content)
}
#[test]
fn test_entry_buffer_to_header_content_3() {
let content = r#"Hai
barbar
"#;
let file = mkfile(&content);
let res = entry_buffer_to_header_content(&file);
assert!(res.is_ok());
let (_, res_content) = res.unwrap();
assert_eq!(res_content, content)
}
#[test]
fn test_entry_buffer_to_header_content_4() {
let content = r#"Hai
---
barbar
---
"#;
let file = mkfile(&content);
let res = entry_buffer_to_header_content(&file);
assert!(res.is_ok());
let (_, res_content) = res.unwrap();
assert_eq!(res_content, content)
}
#[test]
fn test_entry_buffer_to_header_content_5() {
let content = r#"Hai
---
barbar
---
"#;
let file = mkfile(&content);
let res = entry_buffer_to_header_content(&file);
assert!(res.is_ok());
let (_, res_content) = res.unwrap();
assert_eq!(res_content, content)
}
}<|fim▁end|>
|
($msg:expr) => { };
($fmt:expr, $($arg:tt)+) => { };
}
|
<|file_name|>interface.rs<|end_file_name|><|fim▁begin|>// Copyleft (ↄ) meh. <[email protected]> | http://meh.schizofreni.co
//
// This file is part of screenruster.
//
// screenruster is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// screenruster is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with screenruster. If not, see <http://www.gnu.org/licenses/>.
use std::path::Path;
use std::time::{SystemTime, Duration};
use std::thread;
use std::sync::Arc;
use std::ops::Deref;
use channel::{self, Receiver, Sender, SendError};
use dbus::{
Message,
blocking::{
LocalConnection as Connection,
stdintf::org_freedesktop_dbus::RequestNameReply,
BlockingSender as _,
},
channel::{Sender as _}
};
use log::error;
use crate::error;
use crate::config;
/// The DBus interface.
///
/// It mimics the GNOME screensaver interface for simple integration with a
/// GNOME environment, and also implements some ScreenRuster specific
/// interfaces.
///
/// It listens for relevant system events:
///
/// - `PrepareForSleep` from SystemD
pub struct Interface {
receiver: Receiver<Request>,
sender: Sender<Response>,
signals: Sender<Signal>,
}
#[derive(Debug)]
pub enum Request {
/// Reload the configuration file.
Reload(Option<String>),
/// Lock the screen.
Lock,
/// Cycle the saver.
Cycle,
/// Simulate user activity.
SimulateUserActivity,
/// Inhibit the starting of screen saving.
Inhibit {
application: String,
reason: String,
},
/// Remove a previous Inhibit.
UnInhibit(u32),
/// Throttle the resource usage of the screen saving.
Throttle {
application: String,
reason: String,
},
/// Remove a previous Throttle.
UnThrottle(u32),
/// Suspend any screen saver activity.
Suspend {
application: String,
reason: String,
},
/// Remove a previous Suspend.
Resume(u32),
/// Change the active status of the screen saver.
SetActive(bool),
/// Get the active status of the screen saver.
GetActive,
/// Get how many seconds the screen saver has been active.
GetActiveTime,
/// Get the idle status of the session.
GetSessionIdle,
/// Get how many seconds the session has been idle.
GetSessionIdleTime,
/// The system is preparing for sleep or coming out of sleep.
PrepareForSleep(Option<SystemTime>),
}
#[derive(Debug)]
pub enum Response {
/// Whether the reload was successful or not.
Reload(bool),
/// The cookie for the inhibition.
Inhibit(u32),
/// The cookie for the throttle.
Throttle(u32),
/// The cookie for the suspend.
Suspend(u32),
/// Whether the screen is active or not.
Active(bool),
/// How many seconds the saver has been active.
ActiveTime(u64),
/// Whether the session is idle or not.
SessionIdle(bool),
/// How many seconds the session has been idle.
SessionIdleTime(u64),
}
#[derive(Debug)]
pub enum Signal {
/// The saver has been activated or deactivated.
Active(bool),
/// The session has become idle or active.
SessionIdle(bool),
/// An authentication request was initiated or completed.
AuthenticationRequest(bool),
}
impl Interface {
/// Send a reload request.
pub fn reload<P: AsRef<Path>>(path: Option<P>) -> error::Result<()> {
let mut message = Message::new_method_call(
"meh.rust.ScreenSaver",
"/meh/rust/ScreenSaver",
"meh.rust.ScreenSaver",
"Reload")?;
if let Some(value) = path {
message = message.append1(value.as_ref().to_string_lossy().into_owned());
}
Connection::new_session()?.send(message)?;
Ok(())
}
/// Send a lock request.
pub fn lock() -> error::Result<()> {
Connection::new_session()?.send(Message::new_method_call(
"org.gnome.ScreenSaver",
"/org/gnome/ScreenSaver",
"org.gnome.ScreenSaver",
"Lock")?)?;
Ok(())
}
/// Send an activation request.
pub fn activate() -> error::Result<()> {
Connection::new_session()?.send(Message::new_method_call(
"org.gnome.ScreenSaver",
"/org/gnome/ScreenSaver",
"org.gnome.ScreenSaver",
"SetActive")?
.append1(true))?;
Ok(())
}
/// Send a deactivation request.
pub fn deactivate() -> error::Result<()> {
Connection::new_session()?.send(Message::new_method_call(
"org.gnome.ScreenSaver",
"/org/gnome/ScreenSaver",
"org.gnome.ScreenSaver",
"SimulateUserActivity")?)?;
Ok(())
}
/// Send an inhibition request.
pub fn inhibit() -> error::Result<u32> {
Connection::new_session()?.send_with_reply_and_block(Message::new_method_call(
"org.gnome.ScreenSaver",
"/org/gnome/ScreenSaver",
"org.gnome.ScreenSaver",
"Inhibit")?
.append2("screenruster", "requested by user")
, Duration::from_millis(5_000))?
.get1::<u32>()
.ok_or(dbus::Error::new_custom("inibhition", "wrong response").into())
}
/// Send an uninhibition request.
pub fn uninhibit(cookie: u32) -> error::Result<()> {
Connection::new_session()?.send(Message::new_method_call(
"org.gnome.ScreenSaver",
"/org/gnome/ScreenSaver",
"org.gnome.ScreenSaver",
"UnInhibit")?
.append1(cookie))?;
Ok(())
}
/// Send a throttle request.
pub fn throttle() -> error::Result<u32> {
Connection::new_session()?.send_with_reply_and_block(Message::new_method_call(
"org.gnome.ScreenSaver",
"/org/gnome/ScreenSaver",
"org.gnome.ScreenSaver",
"Throttle")?
.append2("screenruster", "requested by user")
, Duration::from_millis(5_000))?
.get1::<u32>()
.ok_or(dbus::Error::new_custom("throttle", "wrong response").into())
}
/// Send an unthrottle request.
pub fn unthrottle(cookie: u32) -> error::Result<()> {
Connection::new_session()?.send(Message::new_method_call(
"org.gnome.ScreenSaver",
"/org/gnome/ScreenSaver",
"org.gnome.ScreenSaver",
"UnThrottle")?
.append1(cookie))?;
Ok(())
}
/// Send a suspension request.
pub fn suspend() -> error::Result<u32> {
Connection::new_session()?.send_with_reply_and_block(Message::new_method_call(
"meh.rust.ScreenSaver",
"/meh/rust/ScreenSaver",
"meh.rust.ScreenSaver",
"Suspend")?
.append2("screenruster", "requested by user")
, Duration::from_millis(5_000))?
.get1::<u32>()
.ok_or(dbus::Error::new_custom("suspend", "wrong response").into())
}
/// Send a resume request.
pub fn resume(cookie: u32) -> error::Result<()> {
Connection::new_session()?.send(Message::new_method_call(
"meh.rust.ScreenSaver",
"/meh/rust/ScreenSaver",
"meh.rust.ScreenSaver",
"Resume")?
.append1(cookie))?;
Ok(())
}
/// Spawn a DBus interface with the given configuration.
pub fn spawn(config: config::Interface) -> error::Result<Interface> {
let (sender, i_receiver) = channel::unbounded();
let (i_sender, receiver) = channel::unbounded();
let (s_sender, signals) = channel::unbounded();
let (g_sender, g_receiver) = channel::unbounded::<error::Result<()>>();
macro_rules! dbus {
(connect system) => (
Connection::new_system()
);
(connect session) => (
match Connection::new_session() {
Ok(value) => {
value
}
Err(error) => {
g_sender.send(Err(error.into())).unwrap();
return;
}
}
);
(register $conn:expr, $name:expr) => (
match $conn.request_name($name, false, false, true) {
Ok(RequestNameReply::Exists) => {
g_sender.send(Err(error::DBus::AlreadyRegistered.into())).unwrap();
return;
}
Err(error) => {
g_sender.send(Err(error.into())).unwrap();
return;
}
Ok(value) => {
value
}
}
);
(watch $conn:expr, $filter:expr) => (
$conn.add_match_no_cb($filter)
);
(ready) => (
g_sender.send(Ok(())).unwrap();
);
(check) => (
g_receiver.recv().unwrap()
);
(try $body:expr) => (
match $body {
Ok(value) => {
value
}
Err(err) => {
error!("{:?}", err);
return None;
}
}
);
}
macro_rules! cloning {
([$($var:ident),*] $closure:expr) => ({
$(let $var = $var.clone();)*
$closure
});
}
// System DBus handler.
{
let sender = sender.clone();
thread::spawn(move || {
/// Inhibits system suspension temporarily.
fn inhibit(c: &Connection) -> Option<dbus::arg::OwnedFd> {
dbus!(try c.send_with_reply_and_block(dbus!(try Message::new_method_call(
"org.freedesktop.login1",
"/org/freedesktop/login1",
"org.freedesktop.login1.Manager",
"Inhibit"))
.append1("sleep")
.append1("ScreenRuster")
.append1("Preparing for sleep.")
.append1("delay"), Duration::from_millis(1_000)))
.get1()
}
let system = dbus!(connect system).unwrap();
// Delay the next suspension.
let mut inhibitor = inhibit(&system);
// Watch for PrepareForSleep events from SystemD.
dbus!(watch system, "path='/org/freedesktop/login1',interface='org.freedesktop.login1.Manager',member='PrepareForSleep'").unwrap();
#[derive(Debug)]
pub struct PrepareForSleep {
pub arg0: bool,
}
impl dbus::arg::AppendAll for PrepareForSleep {
fn append(&self, i: &mut dbus::arg::IterAppend) {
dbus::arg::RefArg::append(&self.arg0, i);
}
}
impl dbus::arg::ReadAll for PrepareForSleep {
fn read(i: &mut dbus::arg::Iter) -> Result<Self, dbus::arg::TypeMismatchError> {
Ok(PrepareForSleep {
arg0: i.read()?,
})
}
}
impl dbus::message::SignalArgs for PrepareForSleep {
const NAME: &'static str = "PrepareForSleep";
const INTERFACE: &'static str = "org.freedesktop.login1.Manager";
}
system.with_proxy("org.freedesktop.login1.Manager", "/org/freedesktop/login1", Duration::from_micros(5_000))
.match_signal(|p: PrepareForSleep, _: &Connection, _: &Message| {
sender.send(Request::PrepareForSleep(
if p.arg0 { Some(SystemTime::now()) } else { None })).unwrap();
// In case the system is suspending, unlock the suspension,
// otherwise delay the next.
if p.arg0 {
inhibitor.take();
}
else {
inhibitor = inhibit(&system);
}
true
});
});
}
// Session DBus handler.
{
let sender = sender.clone();
thread::spawn(move || {
let mut session = dbus!(connect session);
let f = dbus::tree::Factory::new_sync::<()>();
dbus!(register session, "org.gnome.ScreenSaver");
dbus!(register session, "meh.rust.ScreenSaver");
dbus!(ready);
// GNOME screensaver signals.
let active = Arc::new(f.signal("ActiveChanged", ()).sarg::<bool, _>("status"));
let idle = Arc::new(f.signal("SessionIdleChanged", ()).sarg::<bool, _>("status"));
let begin = Arc::new(f.signal("AuthenticationRequestBegin", ()));
let end = Arc::new(f.signal("AuthenticationRequestEnd", ()));
let tree = f.tree(())
// ScreenRuster interface.
.add(f.object_path("/meh/rust/ScreenSaver", ()).introspectable().add(f.interface("meh.rust.ScreenSaver", ())
.add_m(f.method("Reload", (), cloning!([config, sender, receiver] move |m| {
if config.ignores("reload") {
return Err(dbus::tree::MethodErr::failed(&"Reload is ignored"));
}
sender.send(Request::Reload(m.msg.get1())).unwrap();
if let Response::Reload(value) = receiver.recv().unwrap() {
Ok(vec![m.msg.method_return().append1(value)])
}
else {
unreachable!();
}
})).inarg::<String, _>("path").outarg::<bool, _>("success"))
.add_m(f.method("Suspend", (), cloning!([config, sender, receiver] move |m| {
if config.ignores("suspend") {
return Err(dbus::tree::MethodErr::failed(&"Suspend is ignored"));
}
if let (Some(application), Some(reason)) = m.msg.get2() {
sender.send(Request::Suspend {
application: application,
reason: reason
}).unwrap();
if let Response::Suspend(value) = receiver.recv().unwrap() {
Ok(vec![m.msg.method_return().append1(value)])
}
else {
unreachable!();
}
}
else {
Err(dbus::tree::MethodErr::no_arg())
}
})).in_args(vec![dbus::Signature::make::<String>(), dbus::Signature::make::<String>()]))
.add_m(f.method("Resume", (), cloning!([config, sender] move |m| {
if config.ignores("suspend") {
return Err(dbus::tree::MethodErr::failed(&"Suspend is ignored"));
}
if let Some(cookie) = m.msg.get1() {
sender.send(Request::Resume(cookie)).unwrap();
Ok(vec![m.msg.method_return()])
}
else {
Err(dbus::tree::MethodErr::no_arg())
}
})).inarg::<u32, _>("cookie"))))
// GNOME screensaver interface.
.add(f.object_path("/org/gnome/ScreenSaver", ()).introspectable().add(f.interface("org.gnome.ScreenSaver", ())
.add_m(f.method("Lock", (), cloning!([sender] move |m| {
sender.send(Request::Lock).unwrap();
Ok(vec![m.msg.method_return()])
})))
.add_m(f.method("Cycle", (), cloning!([sender] move |m| {
sender.send(Request::Cycle).unwrap();
Ok(vec![m.msg.method_return()])
})))
.add_m(f.method("SimulateUserActivity", (), cloning!([sender] move |m| {
sender.send(Request::SimulateUserActivity).unwrap();
Ok(vec![m.msg.method_return()])
})))
.add_m(f.method("Inhibit", (), cloning!([config, sender, receiver] move |m| {
if config.ignores("inhibit") {
return Err(dbus::tree::MethodErr::failed(&"Inhibit is ignored"));
}
if let (Some(application), Some(reason)) = m.msg.get2() {
sender.send(Request::Inhibit {
application: application,
reason: reason
}).unwrap();
if let Response::Inhibit(value) = receiver.recv().unwrap() {
Ok(vec![m.msg.method_return().append1(value)])
}
else {
unreachable!();
}
}
else {
Err(dbus::tree::MethodErr::no_arg())
}
})).in_args(vec![dbus::Signature::make::<String>(), dbus::Signature::make::<String>()]))
.add_m(f.method("UnInhibit", (), cloning!([config, sender] move |m| {
if config.ignores("inhibit") {
return Err(dbus::tree::MethodErr::failed(&"Inhibit is ignored"));
}
if let Some(cookie) = m.msg.get1() {
sender.send(Request::UnInhibit(cookie)).unwrap();
Ok(vec![m.msg.method_return()])
}
else {
Err(dbus::tree::MethodErr::no_arg())
}
})).inarg::<u32, _>("cookie"))
.add_m(f.method("Throttle", (), cloning!([config, sender, receiver] move |m| {
if config.ignores("throttle") {
return Err(dbus::tree::MethodErr::failed(&"Inhibit is ignored"));
}
if let (Some(application), Some(reason)) = m.msg.get2() {
sender.send(Request::Throttle {
application: application,
reason: reason
}).unwrap();
if let Response::Throttle(value) = receiver.recv().unwrap() {
Ok(vec![m.msg.method_return().append1(value)])
}
else {
unreachable!();<|fim▁hole|> else {
Err(dbus::tree::MethodErr::no_arg())
}
})).in_args(vec![dbus::Signature::make::<String>(), dbus::Signature::make::<String>()]))
.add_m(f.method("UnThrottle", (), cloning!([config, sender] move |m| {
if config.ignores("throttle") {
return Err(dbus::tree::MethodErr::failed(&"Inhibit is ignored"));
}
if let Some(cookie) = m.msg.get1() {
sender.send(Request::UnThrottle(cookie)).unwrap();
Ok(vec![m.msg.method_return()])
}
else {
Err(dbus::tree::MethodErr::no_arg())
}
})).inarg::<u32, _>("cookie"))
.add_m(f.method("SetActive", (), cloning!([sender] move |m| {
if let Some(value) = m.msg.get1() {
sender.send(Request::SetActive(value)).unwrap();
Ok(vec![m.msg.method_return()])
}
else {
Err(dbus::tree::MethodErr::no_arg())
}
})).inarg::<bool, _>("active"))
.add_m(f.method("GetActive", (), cloning!([sender, receiver] move |m| {
sender.send(Request::GetActive).unwrap();
if let Response::Active(value) = receiver.recv().unwrap() {
Ok(vec![m.msg.method_return().append1(value)])
}
else {
unreachable!();
}
})).outarg::<bool, _>("active"))
.add_m(f.method("GetActiveTime", (), cloning!([sender, receiver] move |m| {
sender.send(Request::GetActiveTime).unwrap();
if let Response::ActiveTime(time) = receiver.recv().unwrap() {
Ok(vec![m.msg.method_return().append1(time)])
}
else {
unreachable!();
}
})).outarg::<u64, _>("time"))
.add_m(f.method("GetSessionIdle", (), cloning!([sender, receiver] move |m| {
sender.send(Request::GetSessionIdle).unwrap();
if let Response::SessionIdle(value) = receiver.recv().unwrap() {
Ok(vec![m.msg.method_return().append1(value)])
}
else {
unreachable!();
}
})).outarg::<bool, _>("idle"))
.add_m(f.method("GetSessionIdleTime", (), cloning!([sender, receiver] move |m| {
sender.send(Request::GetSessionIdleTime).unwrap();
if let Response::SessionIdleTime(time) = receiver.recv().unwrap() {
Ok(vec![m.msg.method_return().append1(time)])
}
else {
unreachable!();
}
})).outarg::<u64, _>("time"))
.add_s(active.clone())
.add_s(idle.clone())
.add_s(begin.clone())
.add_s(end.clone())));
tree.start_receive(&session);
loop {
session.process(Duration::from_millis(500));
while let Ok(signal) = signals.try_recv() {
session.send(match signal {
Signal::Active(status) =>
active.msg(&"/meh/rust/ScreenSaver".into(), &"org.gnome.ScreenSaver".into()).append1(status),
Signal::SessionIdle(status) =>
idle.msg(&"/meh/rust/ScreenSaver".into(), &"org.gnome.ScreenSaver".into()).append1(status),
Signal::AuthenticationRequest(true) =>
begin.msg(&"/meh/rust/ScreenSaver".into(), &"org.gnome.ScreenSaver".into()),
Signal::AuthenticationRequest(false) =>
end.msg(&"/meh/rust/ScreenSaver".into(), &"org.gnome.ScreenSaver".into()),
}).unwrap();
}
}
});
}
dbus!(check)?;
Ok(Interface {
receiver: i_receiver,
sender: i_sender,
signals: s_sender,
})
}
pub fn response(&self, value: Response) -> Result<(), SendError<Response>> {
self.sender.send(value)
}
pub fn signal(&self, value: Signal) -> Result<(), SendError<Signal>> {
self.signals.send(value)
}
}
impl Deref for Interface {
type Target = Receiver<Request>;
fn deref(&self) -> &Receiver<Request> {
&self.receiver
}
}<|fim▁end|>
|
}
}
|
<|file_name|>ConcurrentLinkedQueueExtendsHandler.java<|end_file_name|><|fim▁begin|>package com.mocha.util.queue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
/**
* <strong>Title : ConcurrentLinkedQueueExtendsHandler </strong>. <br>
* <strong>Description : 队列管理.</strong> <br>
* <strong>Create on : 2014年8月14日 下午2:31:02 </strong>. <br>
* <p>
* <strong>Copyright (C) Mocha Software Co.,Ltd.</strong> <br>
* </p>
*
* @author 刘军 [email protected] <br>
* @version <strong>Mocha JavaOA v7.0.0</strong> <br>
* <br>
* <strong>修改历史: .</strong> <br>
* 修改人 修改日期 修改描述<br>
* -------------------------------------------<br>
* <br>
* <br>
*/
public class ConcurrentLinkedQueueExtendsHandler<E> implements QueueHandler<E> {
/**
* 单例对象实例
*/
private static ConcurrentLinkedQueueExtendsHandler instance = null;
// public static ConcurrentLinkedQueueExtendsHandler getInstance() {
// if (instance == null) {
// instance = new ConcurrentLinkedQueueExtendsHandler(); //
// }
// return instance;
// }
private static class ConcurrentLinkedQueueExtendsSingletonHolder {
/**
* 单例对象实例
*/
static final ConcurrentLinkedQueueExtendsHandler INSTANCE = new ConcurrentLinkedQueueExtendsHandler();
}
public static ConcurrentLinkedQueueExtendsHandler getInstance() {
return ConcurrentLinkedQueueExtendsSingletonHolder.INSTANCE;
}
/**
* private的构造函数用于避免外界直接使用new来实例化对象
*/
private ConcurrentLinkedQueueExtendsHandler() {
}
// 队列容器
List<ConcurrentLinkedQueueExtends<E>> queueList = new LinkedList<ConcurrentLinkedQueueExtends<E>>();
// 队列名字容器
List<String> queueNames = new ArrayList<String>();
// =========================对队列的操作
/**
* 根据名称参数动态创建不包含任何元素的空队列
*
* @param name
* 队列的名字
* @return
* @throws Exception
*/
@Override
public ConcurrentLinkedQueueExtends<E> createQueueByName(String name)
throws Exception {
if (null == name || "".equals(name)) {
throw new Exception("队列名称不能为空。");
}
if (queueNames.contains(name)) {
throw new Exception("此名称已被使用,请另起其他名字。");
}
ConcurrentLinkedQueueExtends<E> queue = new ConcurrentLinkedQueueExtends<E>(
name);
// 将队列加到容器中
queueList.add(queue);
// 将本队列名加入容器中
queueNames.add(name);
return queue;
}
/**
* 根据名称参数动态创建不包含任何元素的空队列
*
* @param name
* 队列的名字
* @return
* @throws Exception
*/
@Override
public void createQueueByNames(String[] names) throws Exception {
for (String name : names) {
if (null == name || "".equals(name)) {
throw new Exception("队列名称不能为空。");
}
if (queueNames.contains(name)) {
throw new Exception("此名称已被使用,请另起其他名字。");
}
ConcurrentLinkedQueueExtends<E> queue = new ConcurrentLinkedQueueExtends<E>(
name);
// 将队列加到容器中
queueList.add(queue);
// 将本队列名加入容器中
queueNames.add(name);
}
}
/**
* 根据名称参数动态创建不包含任何元素的空队列, 并设置队列的大小。
*
* @param name
* 队列的名字
* @param length
* 队列元素最大个数
* @return 新的队列对象
* @throws Exception
*/
@Override
public ConcurrentLinkedQueueExtends<E> createQueueByName(String name,
int maxSize) throws Exception {
if (null == name || "".equals(name)) {
throw new Exception("队列名称不能为空。");
}
if (queueNames.contains(name)) {
throw new Exception("此名称已被使用,请另起其他名字。");
}
if (maxSize <= 0) {
throw new Exception("队列大小必须大于零");
}
ConcurrentLinkedQueueExtends<E> queue = new ConcurrentLinkedQueueExtends<E>(
name, maxSize);
// 将队列加到容器中
queueList.add(queue);
// 将本队列名加入容器中
queueNames.add(name);
return queue;
}
public boolean checkqueueName(String name) {
boolean flag = false;
if (queueNames.contains(name)) {
flag = true;
}
return flag;
}
/**
* 根据名称参数动态获得队列。
*
* @param name
* 队列的名字
* @return
* @throws Exception
*/
@Override
public ConcurrentLinkedQueueExtends<E> getQueueByName(String name)
throws Exception {
if (queueNames.contains(name)) {
return queueList.get(queueNames.indexOf(name));
} else
throw new Exception("不存在名称为 " + name + "的队列");
}
/**
* 根据名称参数动态删除队列
*
* @param name
* 队列的名字
*/
@Override
public void removeQueueByName(String name) {
if (queueNames.contains(name)) {
queueList.remove(queueNames.indexOf(name));
queueNames.remove(name);
}
}
// =========================对队列中的元素的操作
// 1.添加
/**
* 根据队列名向队列中添加元素,若添加元素失败,则抛出异常
*
* @param queueName
* 队列名字
* @param e
* 向队列中添加的元素
* @return
* @throws Exception
*/
@Override
public boolean add(String queueName, E e) throws Exception {
ConcurrentLinkedQueueExtends<E> queue = this.getQueueByName(queueName);
if (queue.size() >= queue.getMaxSize()) {
throw new Exception("队列已满,不允许继续添加元素。");
}
return queue.add(e);
}
/**
* 根据队列名向队列中添加元素集合,若添加元素集合失败,则抛出异常
*
* @param queueName
* 队列名称
<|fim▁hole|> * @throws Exception
*/
@Override
public boolean addAll(String queueName, Collection<? extends E> c)
throws Exception {
ConcurrentLinkedQueueExtends<E> queue = this.getQueueByName(queueName);
if (queue.size() >= queue.getMaxSize()) {
throw new Exception("队列已满,不允许继续添加元素。");
} else if (queue.size() + c.size() > queue.getMaxSize()) {
throw new Exception("新增的集合中的元素太多,以致超出队列容量上限");
}
return queue.addAll(c);
}
/**
* 根据队列名向队列中添加元素,若添加元素失败,则返回false
*
* @param queueName
* 队列名
* @param e
* 向队列中添加的元素
* @return
* @throws Exception
*/
@Override
public boolean offer(String queueName, E e) throws Exception {
ConcurrentLinkedQueueExtends<E> queue = this.getQueueByName(queueName);
if (queue.size() >= queue.getMaxSize()) {
throw new Exception("队列已满,不允许继续添加元素。");
}
return queue.offer(e);
}
// 2.得到但不删除
/**
* 获取但不移除此队列的头;如果此队列为空,则返回 null。
*
* @param queueName
* 队列名字
* @return 队列的头,如果此队列为空,则返回 null
* @throws Exception
*/
@Override
public E peek(String queueName) throws Exception {
return this.getQueueByName(queueName).peek();
}
/**
* 获取但不移除此队列的头;如果此队列为空,则抛出异常。
*
* @param queueName
* 队列名字
* @return 队列的头,如果此队列为空,则抛出异常
* @throws Exception
*/
@Override
public E element(String queueName) throws Exception {
return this.getQueueByName(queueName).element();
}
// 3.得到并删除
/**
* 获取并移除此队列的头,如果此队列为空,则返回 null。
*
* @param queueName
* 队列名字
* @return 此队列的头;如果此队列为空,则返回 null
* @throws Exception
*/
@Override
public E poll(String queueName) throws Exception {
return this.getQueueByName(queueName).poll();
}
/**
* 获取并移除此队列的头,如果此队列为空,则抛出异常。
*
* @param queueName
* 队列名字
* @return
* @throws Exception
*/
@Override
public E remove(String queueName) throws Exception {
return this.getQueueByName(queueName).remove();
}
/**
* 根据队列名删除某一元素
*
* @param queueName
* 队列名字
* @param o
* 删除的元素
* @return
* @throws Exception
*/
@Override
public boolean remove(String queueName, Object o) throws Exception {
return this.getQueueByName(queueName).remove(o);
}
/**
*
* @param queueName
* 队列名字
* @param c
* 删除的元素
* @return
* @throws Exception
*/
@Override
public boolean removeAll(String queueName, Collection<?> c)
throws Exception {
return this.getQueueByName(queueName).removeAll(c);
}
/**
* 清空队列
*
* @param queueName
* 队列名字
* @throws Exception
*/
@Override
public void clear(String queueName) throws Exception {
this.getQueueByName(queueName).clear();
}
/**
* 判断 名称与参数一致的 队列 中是否有元素
*
* @param queueName
* 队列名
* @return
* @throws Exception
*/
@Override
public boolean isEmpty(String queueName) throws Exception {
return this.getQueueByName(queueName).isEmpty();
}
/**
* 根据 队列名 判断队列中已有元素的歌声
*
* @param queueName
* 队列名
* @return
* @throws Exception
*/
@Override
public int size(String queueName) throws Exception {
return this.getQueueByName(queueName).size();
}
/**
* 根据队列名判断队列中是否包含某一元素
*
* @param queueName
* 队列名
* @param o
* 元素
* @return
* @throws Exception
*/
@Override
public boolean contains(String queueName, Object o) throws Exception {
return this.getQueueByName(queueName).contains(o);
}
/**
* 根据队列名判断队列中是否包含某些元素
*
* @param queueName
* 队列名
* @param c
* 元素集合
* @return
* @throws Exception
*/
@Override
public boolean containsAll(String queueName, Collection<?> c)
throws Exception {
return this.getQueueByName(queueName).containsAll(c);
}
/**
* 根据队列名将某一队列中的元素转换为Object数组形式
*
* @param queueName
* @return
* @throws Exception
*/
@Override
public Object[] toArray(String queueName) throws Exception {
return this.getQueueByName(queueName).toArray();
}
/**
* 根据队列名将某一队列中的元素转换为某一特定类型的数组形式
*
* @param queueName
* 队列名
* @param a
* @return
* @throws Exception
*/
@Override
public <T> T[] toArray(String queueName, T[] a) throws Exception {
return this.getQueueByName(queueName).toArray(a);
}
/**
* 根据队列名遍历队列中所有元素
*
* @param queueName
* 队列名
* @return
* @throws Exception
*/
@Override
public Iterator<E> iterator(String queueName) throws Exception {
return this.getQueueByName(queueName).iterator();
}
@Override
public Iterator<E> iterator(String[] queueName) throws Exception {
// TODO Auto-generated method stub
return null;
}
}<|fim▁end|>
|
* @param c
* collection containing elements to be added to this queue
* @return <tt>true</tt> if this queue changed as a result of the call
|
<|file_name|>lxqt-config-globalkeyshortcuts_ar.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="ar">
<context>
<name>DefaultModel</name>
<message>
<location filename="../default_model.cpp" line="51"/>
<source>Command</source>
<translation>أمر</translation>
</message>
<message>
<location filename="../default_model.cpp" line="52"/>
<source>DBus call</source>
<translation>نداء DBus</translation>
</message>
<message>
<location filename="../default_model.cpp" line="53"/>
<source>Client</source>
<translation>عميل</translation>
</message>
<message>
<location filename="../default_model.cpp" line="143"/>
<source>Id</source>
<translation>المعرّف</translation>
</message>
<message>
<location filename="../default_model.cpp" line="146"/>
<source>Shortcut</source>
<translation>الاختصار</translation>
</message>
<message>
<location filename="../default_model.cpp" line="149"/>
<source>Description</source>
<translation>الوصف</translation>
</message>
<message>
<location filename="../default_model.cpp" line="152"/>
<source>Type</source>
<translation>النوع</translation>
</message>
<message>
<location filename="../default_model.cpp" line="155"/>
<source>Info</source>
<translation>معلومات</translation>
</message>
</context>
<context>
<name>EditActionDialog</name>
<message>
<location filename="../edit_action_dialog.ui" line="14"/>
<source>Edit Action</source>
<translation>حرّر الإجراء</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="31"/>
<source>&Shortcut:</source>
<translation>الا&ختصار:</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="74"/>
<source>&Description:</source>
<translation>الو&صف:</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="87"/>
<source>&Enabled</source>
<translation>م&فعّل</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="102"/>
<source>&Command</source>
<translation>أ&مر</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="109"/>
<source>&DBus message</source>
<translation>ر&سالة DBus</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="118"/>
<source>Type:</source>
<translation>النوع:</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="164"/>
<source>Co&mmand:</source>
<translation>الأم&ر:</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="206"/>
<source>S&ervice:</source>
<translation>ال&خدمة:</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="219"/>
<source>&Path:</source>
<translation>الم&سار:</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="232"/>
<source>&Interface:</source>
<translation>الوا&جهة:</translation>
</message>
<message>
<location filename="../edit_action_dialog.ui" line="245"/>
<source>&Method:</source>
<translation>ال&طريقة:</translation>
</message>
</context>
<context>
<name>MainWindow</name>
<message>
<location filename="../main_window.ui" line="14"/>
<source>Global Actions Manager</source>
<translation>مدير الإجراءات العمومية</translation>
</message>
<message>
<location filename="../main_window.ui" line="24"/>
<source>Search</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../main_window.ui" line="69"/>
<source>Add ...</source>
<translation>أضِف…</translation>
</message>
<message>
<location filename="../main_window.ui" line="79"/>
<source>Remove</source>
<translation>أزِل</translation>
</message>
<message>
<location filename="../main_window.ui" line="89"/>
<source>Modify ...</source>
<translation>عدّل…</translation>
</message>
<message>
<location filename="../main_window.ui" line="106"/>
<source>Swap</source>
<translation>أبدِل</translation>
</message>
<message>
<location filename="../main_window.ui" line="132"/>
<source>Multiple actions behaviour:</source><|fim▁hole|> <message>
<location filename="../main_window.ui" line="140"/>
<source>First</source>
<comment>globalkeys/config/multipleActions</comment>
<translation type="unfinished">الأول</translation>
</message>
<message>
<location filename="../main_window.ui" line="145"/>
<source>Last</source>
<comment>globalkeys/config/multipleActions</comment>
<translation type="unfinished">الأخير</translation>
</message>
<message>
<location filename="../main_window.ui" line="150"/>
<source>None</source>
<comment>globalkeys/config/multipleActions</comment>
<translation type="unfinished">بلا</translation>
</message>
<message>
<location filename="../main_window.ui" line="155"/>
<source>All</source>
<comment>globalkeys/config/multipleActions</comment>
<translation type="unfinished">الكل</translation>
</message>
<message>
<location filename="../main_window.ui" line="180"/>
<source>Default</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../main_window.ui" line="200"/>
<source>Close</source>
<translation>أغلِق</translation>
</message>
<message>
<location filename="../main_window.cpp" line="191"/>
<source>Restore Default</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../main_window.cpp" line="192"/>
<source>Your defined shortcuts will be removed.
Do you want to proceed?</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|>
|
<translation>السلوك عند وجود عدّة إجراءات:</translation>
</message>
|
<|file_name|>AverageSlopeToDivide.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2011-2012 Dr. John Lindsay <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package plugins;
import java.util.Date;
import whitebox.geospatialfiles.WhiteboxRaster;
import whitebox.interfaces.WhiteboxPlugin;
import whitebox.interfaces.WhiteboxPluginHost;
/**
* This tool calculates the average slope gradient (i.e., slope steepness in degrees) of the flowpaths that run through each grid cell in an input digital elevation model (DEM) to the upslope divide cells.
*
* @author Dr. John Lindsay email: [email protected]
*/
public class AverageSlopeToDivide implements WhiteboxPlugin {
private WhiteboxPluginHost myHost = null;
private String[] args;
// Constants
private static final double LnOf2 = 0.693147180559945;
/**
* Used to retrieve the plugin tool's name. This is a short, unique name
* containing no spaces.
*
* @return String containing plugin name.
*/
@Override
public String getName() {
return "AverageSlopeToDivide";
}
/**
* Used to retrieve the plugin tool's descriptive name. This can be a longer
* name (containing spaces) and is used in the interface to list the tool.
*
* @return String containing the plugin descriptive name.
*/
@Override
public String getDescriptiveName() {
return "Average Flowpath Slope From Cell To Divide";
}
/**
* Used to retrieve a short description of what the plugin tool does.
*
* @return String containing the plugin's description.
*/
@Override
public String getToolDescription() {
return "Measures the average slope gradient from each grid cell to all "
+ "upslope divide cells.";
}
/**
* Used to identify which toolboxes this plugin tool should be listed in.
*
* @return Array of Strings.
*/
@Override
public String[] getToolbox() {
String[] ret = {"FlowpathTAs"};
return ret;
}
/**
* Sets the WhiteboxPluginHost to which the plugin tool is tied. This is the
* class that the plugin will send all feedback messages, progress updates,
* and return objects.
*
* @param host The WhiteboxPluginHost that called the plugin tool.
*/
@Override
public void setPluginHost(WhiteboxPluginHost host) {
myHost = host;
}
/**
* Used to communicate feedback pop-up messages between a plugin tool and
* the main Whitebox user-interface.
*
* @param feedback String containing the text to display.
*/
private void showFeedback(String message) {
if (myHost != null) {
myHost.showFeedback(message);
} else {
System.out.println(message);
}
}
/**
* Used to communicate a return object from a plugin tool to the main
* Whitebox user-interface.
*
* @return Object, such as an output WhiteboxRaster.
*/
private void returnData(Object ret) {
if (myHost != null) {
myHost.returnData(ret);
}
}
private int previousProgress = 0;
private String previousProgressLabel = "";
/**
* Used to communicate a progress update between a plugin tool and the main
* Whitebox user interface.
*
* @param progressLabel A String to use for the progress label.
* @param progress Float containing the progress value (between 0 and 100).
*/
private void updateProgress(String progressLabel, int progress) {
if (myHost != null && ((progress != previousProgress)
|| (!progressLabel.equals(previousProgressLabel)))) {
myHost.updateProgress(progressLabel, progress);
}
previousProgress = progress;
previousProgressLabel = progressLabel;
}
/**
* Used to communicate a progress update between a plugin tool and the main
* Whitebox user interface.
*
* @param progress Float containing the progress value (between 0 and 100).
*/
private void updateProgress(int progress) {
if (myHost != null && progress != previousProgress) {
myHost.updateProgress(progress);
}
previousProgress = progress;
}
/**
* Sets the arguments (parameters) used by the plugin.
*
* @param args An array of string arguments.
*/
@Override
public void setArgs(String[] args) {
this.args = args.clone();
}
private boolean cancelOp = false;
/**
* Used to communicate a cancel operation from the Whitebox GUI.
*
* @param cancel Set to true if the plugin should be canceled.
*/
@Override
public void setCancelOp(boolean cancel) {
cancelOp = cancel;
}
private void cancelOperation() {
showFeedback("Operation cancelled.");
updateProgress("Progress: ", 0);
}
private boolean amIActive = false;
/**
* Used by the Whitebox GUI to tell if this plugin is still running.
*
* @return a boolean describing whether or not the plugin is actively being
* used.
*/
@Override
public boolean isActive() {
return amIActive;
}
/**
* Used to execute this plugin tool.
*/
@Override
public void run() {
amIActive = true;
String inputHeader = null;
String outputHeader = null;
String DEMHeader = null;
int row, col, x, y;
int progress = 0;
double z, val, val2, val3;
int i, c;
int[] dX = new int[]{1, 1, 1, 0, -1, -1, -1, 0};
int[] dY = new int[]{-1, 0, 1, 1, 1, 0, -1, -1};
double[] inflowingVals = new double[]{16, 32, 64, 128, 1, 2, 4, 8};
boolean flag = false;
double flowDir = 0;
double flowLength = 0;
double numUpslopeFlowpaths = 0;
double flowpathLengthToAdd = 0;
double conversionFactor = 1;
double divideElevToAdd = 0;
double radToDeg = 180 / Math.PI;
if (args.length <= 0) {
showFeedback("Plugin parameters have not been set.");
return;
}
inputHeader = args[0];
DEMHeader = args[1];
outputHeader = args[2];
conversionFactor = Double.parseDouble(args[3]);
// check to see that the inputHeader and outputHeader are not null.
if ((inputHeader == null) || (outputHeader == null)) {
showFeedback("One or more of the input parameters have not been set properly.");
return;
}
try {
WhiteboxRaster pntr = new WhiteboxRaster(inputHeader, "r");
int rows = pntr.getNumberRows();
int cols = pntr.getNumberColumns();
double noData = pntr.getNoDataValue();
double gridResX = pntr.getCellSizeX();
double gridResY = pntr.getCellSizeY();
double diagGridRes = Math.sqrt(gridResX * gridResX + gridResY * gridResY);
double[] gridLengths = new double[]{diagGridRes, gridResX, diagGridRes, gridResY, diagGridRes, gridResX, diagGridRes, gridResY};
WhiteboxRaster DEM = new WhiteboxRaster(DEMHeader, "r");
if (DEM.getNumberRows() != rows || DEM.getNumberColumns() != cols) {
showFeedback("The input files must have the same dimensions, i.e. number of "
+ "rows and columns.");
return;
}
WhiteboxRaster output = new WhiteboxRaster(outputHeader, "rw",
inputHeader, WhiteboxRaster.DataType.FLOAT, -999);
output.setPreferredPalette("blueyellow.pal");
output.setDataScale(WhiteboxRaster.DataScale.CONTINUOUS);
output.setZUnits(pntr.getXYUnits());
WhiteboxRaster numInflowingNeighbours = new WhiteboxRaster(outputHeader.replace(".dep",
"_temp1.dep"), "rw", inputHeader, WhiteboxRaster.DataType.FLOAT, 0);
numInflowingNeighbours.isTemporaryFile = true;
WhiteboxRaster numUpslopeDivideCells = new WhiteboxRaster(outputHeader.replace(".dep",
"_temp2.dep"), "rw", inputHeader, WhiteboxRaster.DataType.FLOAT, 0);
numUpslopeDivideCells.isTemporaryFile = true;
WhiteboxRaster totalFlowpathLength = new WhiteboxRaster(outputHeader.replace(".dep",
"_temp3.dep"), "rw", inputHeader, WhiteboxRaster.DataType.FLOAT, 0);
totalFlowpathLength.isTemporaryFile = true;
WhiteboxRaster totalUpslopeDivideElev = new WhiteboxRaster(outputHeader.replace(".dep",
"_temp4.dep"), "rw", inputHeader, WhiteboxRaster.DataType.FLOAT, 0);
totalUpslopeDivideElev.isTemporaryFile = true;
updateProgress("Loop 1 of 3:", 0);
for (row = 0; row < rows; row++) {
for (col = 0; col < cols; col++) {
if (pntr.getValue(row, col) != noData) {
z = 0;
for (i = 0; i < 8; i++) {
if (pntr.getValue(row + dY[i], col + dX[i]) ==
inflowingVals[i]) { z++; }
}
if (z > 0) {
numInflowingNeighbours.setValue(row, col, z);
} else {
numInflowingNeighbours.setValue(row, col, -1);
}
} else {
output.setValue(row, col, noData);
}
}
if (cancelOp) {
cancelOperation();
return;
}
progress = (int) (100f * row / (rows - 1));
updateProgress("Loop 1 of 3:", progress);
}
updateProgress("Loop 2 of 3:", 0);
for (row = 0; row < rows; row++) {
for (col = 0; col < cols; col++) {
val = numInflowingNeighbours.getValue(row, col);
if (val <= 0 && val != noData) {
flag = false;
x = col;
y = row;
do {
val = numInflowingNeighbours.getValue(y, x);
if (val <= 0 && val != noData) {
//there are no more inflowing neighbours to visit; carry on downslope
if (val == -1) {
//it's the start of a flowpath
numUpslopeDivideCells.setValue(y, x, 0);
numUpslopeFlowpaths = 1;
divideElevToAdd = DEM.getValue(y, x);
} else {
numUpslopeFlowpaths = numUpslopeDivideCells.getValue(y, x);
divideElevToAdd = totalUpslopeDivideElev.getValue(y, x);
}
numInflowingNeighbours.setValue(y, x, noData);
// find it's downslope neighbour
flowDir = pntr.getValue(y, x);
if (flowDir > 0) {
// what's the flow direction as an int?
c = (int) (Math.log(flowDir) / LnOf2);
flowLength = gridLengths[c];
val2 = totalFlowpathLength.getValue(y, x);
flowpathLengthToAdd = val2 + numUpslopeFlowpaths * flowLength;
//move x and y accordingly
x += dX[c];
y += dY[c];
numUpslopeDivideCells.setValue(y, x,
numUpslopeDivideCells.getValue(y, x) <|fim▁hole|> totalUpslopeDivideElev.setValue(y, x,
totalUpslopeDivideElev.getValue(y, x)
+ divideElevToAdd);
numInflowingNeighbours.setValue(y, x,
numInflowingNeighbours.getValue(y, x) - 1);
} else { // you've hit the edge or a pit cell.
flag = true;
}
} else {
flag = true;
}
} while (!flag);
}
}
if (cancelOp) {
cancelOperation();
return;
}
progress = (int) (100f * row / (rows - 1));
updateProgress("Loop 2 of 3:", progress);
}
numUpslopeDivideCells.flush();
totalFlowpathLength.flush();
totalUpslopeDivideElev.flush();
numInflowingNeighbours.close();
updateProgress("Loop 3 of 3:", 0);
double[] data1 = null;
double[] data2 = null;
double[] data3 = null;
double[] data4 = null;
double[] data5 = null;
for (row = 0; row < rows; row++) {
data1 = numUpslopeDivideCells.getRowValues(row);
data2 = totalFlowpathLength.getRowValues(row);
data3 = pntr.getRowValues(row);
data4 = totalUpslopeDivideElev.getRowValues(row);
data5 = DEM.getRowValues(row);
for (col = 0; col < cols; col++) {
if (data3[col] != noData) {
if (data1[col] > 0) {
val = data2[col] / data1[col];
val2 = (data4[col] / data1[col] - data5[col]) * conversionFactor;
val3 = Math.atan(val2 / val) * radToDeg;
output.setValue(row, col, val3);
} else {
output.setValue(row, col, 0);
}
} else {
output.setValue(row, col, noData);
}
}
if (cancelOp) {
cancelOperation();
return;
}
progress = (int) (100f * row / (rows - 1));
updateProgress("Loop 3 of 3:", progress);
}
output.addMetadataEntry("Created by the "
+ getDescriptiveName() + " tool.");
output.addMetadataEntry("Created on " + new Date());
pntr.close();
DEM.close();
numUpslopeDivideCells.close();
totalFlowpathLength.close();
totalUpslopeDivideElev.close();
output.close();
// returning a header file string displays the image.
returnData(outputHeader);
} catch (OutOfMemoryError oe) {
myHost.showFeedback("An out-of-memory error has occurred during operation.");
} catch (Exception e) {
myHost.showFeedback("An error has occurred during operation. See log file for details.");
myHost.logException("Error in " + getDescriptiveName(), e);
} finally {
updateProgress("Progress: ", 0);
// tells the main application that this process is completed.
amIActive = false;
myHost.pluginComplete();
}
}
}<|fim▁end|>
|
+ numUpslopeFlowpaths);
totalFlowpathLength.setValue(y, x,
totalFlowpathLength.getValue(y, x)
+ flowpathLengthToAdd);
|
<|file_name|>basic-aliasing.js<|end_file_name|><|fim▁begin|>var Shapes;
(function (Shapes) {
var Polygons;
(function (Polygons) {
var Triangle = (function () {
function Triangle() {
}
return Triangle;
})();
Polygons.Triangle = Triangle;
var Square = (function () {
function Square() {<|fim▁hole|> Polygons.Square = Square;
})(Polygons = Shapes.Polygons || (Shapes.Polygons = {}));
})(Shapes || (Shapes = {}));
var polygons = Shapes.Polygons;
var sq = new polygons.Square();<|fim▁end|>
|
}
return Square;
})();
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-<|fim▁hole|># Filename: lychee/__init__.py
# Purpose: Initialize Lychee.
#
# Copyright (C) 2016, 2017 Christopher Antila
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#--------------------------------------------------------------------------------------------------
"""
Initialize Lychee.
"""
__all__ = [
'converters',
'document',
'exceptions',
'logs',
'namespaces',
'signals',
'tui',
'workflow',
'vcs',
'views',
]
from lychee import *
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions<|fim▁end|>
|
#--------------------------------------------------------------------------------------------------
# Program Name: Lychee
# Program Description: MEI document manager for formalized document control
#
|
<|file_name|>mesh.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright © 2017 No0n3Left <[email protected]>
*
* This file is part of Remnant
*
* Remnant is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Remnant is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Remnant. If not, see <http://www.gnu.org/licenses/>.
*/
#include <assimp/cimport.h>
#include <assimp/postprocess.h>
#include <assimp/scene.h>
#include <functional>
#include "opengl/mesh.h"
Mesh::Mesh(const std::string& filename, unsigned int index): hash(Mesh::pre_hash(filename, index))
{
const aiScene* scene = aiImportFile(filename.c_str(), aiProcess_Triangulate | aiProcess_CalcTangentSpace);
const aiMesh* mesh = scene->mMeshes[index];
this->name = mesh->mName.C_Str();
this->vertex_count = mesh->mNumVertices;
std::cout << "Vertices = " << this->vertex_count << std::endl;
if (mesh->HasPositions()) {
GLfloat* vertex_positions = new GLfloat[this->vertex_count * 3];
for (unsigned int i = 0; i < this->vertex_count; ++i) {
const aiVector3D* vertex_position = &(mesh->mVertices[i]);
vertex_positions[i * 3 + 0] = static_cast<GLfloat>(vertex_position->x);
vertex_positions[i * 3 + 1] = static_cast<GLfloat>(vertex_position->y);
vertex_positions[i * 3 + 2] = static_cast<GLfloat>(vertex_position->z);
}
this->position_vbo.set_data(3 * sizeof(GLfloat) * this->vertex_count, vertex_positions, GL_STATIC_DRAW);
this->position_vbo.bind();
this->vao.enable_attrib(0);
this->vao.attrib_pointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
GLERRORDEBUG();
delete[] vertex_positions;
}
if (mesh->HasTextureCoords(0)) {
GLfloat* vertex_texcoords = new GLfloat[this->vertex_count * 2];
for (unsigned int i = 0; i < this->vertex_count; ++i) {
const aiVector3D* vertex_texcoord = &(mesh->mTextureCoords[0][i]);
vertex_texcoords[i * 2 + 0] = static_cast<GLfloat>(vertex_texcoord->x);
vertex_texcoords[i * 2 + 1] = static_cast<GLfloat>(vertex_texcoord->y);
}
this->texcoord_vbo.set_data(3 * sizeof(GLfloat) * this->vertex_count, vertex_texcoords, GL_STATIC_DRAW);
delete[] vertex_texcoords;
this->texcoord_vbo.bind();
this->vao.enable_attrib(1);
this->vao.attrib_pointer(1, 2, GL_FLOAT, GL_FALSE, 0, 0);<|fim▁hole|> GLERRORDEBUG();
}
if (mesh->HasNormals()) {
GLfloat* vertex_normals = new GLfloat[this->vertex_count * 3];
for (unsigned int i = 0; i < this->vertex_count; ++i) {
const aiVector3D* vertex_normal = &(mesh->mNormals[i]);
vertex_normals[i * 3 + 0] = static_cast<GLfloat>(vertex_normal->x);
vertex_normals[i * 3 + 1] = static_cast<GLfloat>(vertex_normal->y);
vertex_normals[i * 3 + 2] = static_cast<GLfloat>(vertex_normal->z);
}
this->normal_vbo.set_data(3 * sizeof(GLfloat) * this->vertex_count, vertex_normals, GL_STATIC_DRAW);
delete[] vertex_normals;
this->normal_vbo.bind();
this->vao.enable_attrib(2);
this->vao.attrib_pointer(2, 3, GL_FLOAT, GL_FALSE, 0, 0);
GLERRORDEBUG();
}
if(mesh->HasTangentsAndBitangents()) {
GLfloat* vertex_tangents = new GLfloat[this->vertex_count * 3];
for (unsigned int i = 0; i < this->vertex_count; ++i) {
const aiVector3D* vertex_tangent = &(mesh->mTangents[i]);
vertex_tangents[i * 3 + 0] = static_cast<GLfloat>(vertex_tangent->x);
vertex_tangents[i * 3 + 1] = static_cast<GLfloat>(vertex_tangent->y);
vertex_tangents[i * 3 + 2] = static_cast<GLfloat>(vertex_tangent->z);
}
this->tangent_vbo.set_data(3 * sizeof(GLfloat) * this->vertex_count, vertex_tangents, GL_STATIC_DRAW);
delete[] vertex_tangents;
this->tangent_vbo.bind();
this->vao.enable_attrib(3);
this->vao.attrib_pointer(3, 3, GL_FLOAT, GL_FALSE, 0, 0);
GLERRORDEBUG();
}
this->material = std::make_shared<Material>(scene->mMaterials[mesh->mMaterialIndex]);
aiReleaseImport(scene);
}
Mesh::Mesh(Mesh&& other)
: hash(std::move(other.hash)), material(std::move(other.material)), vao(std::move(other.vao)), vertex_count(std::move(other.vertex_count)),
position_vbo(std::move(other.position_vbo)), texcoord_vbo(std::move(other.texcoord_vbo)), normal_vbo(std::move(other.normal_vbo)), tangent_vbo(std::move(other.tangent_vbo)) {}
Mesh::~Mesh() {}
/*
Mesh& Mesh::operator=(Mesh&& other)
{
this->hash = std::move(other.hash);
this->material = std::move(other.material);
this->object_name = std::move(other.object_name);
this->vertex_count = std::move(other.vertex_count);
return *this;
}
*/
void Mesh::bind() const
{
this->vao.bind();
GLERRORDEBUG();
}
const std::shared_ptr<Material>& Mesh::get_material() const noexcept
{
return this->material;
}
GLuint Mesh::get_object_name() const noexcept
{
return this->vao.get_object_name();
}
unsigned int Mesh::get_vertex_count() const noexcept
{
return this->vertex_count;
}
std::size_t Mesh::pre_hash(const std::string& filename, unsigned int index)
{
return std::hash<std::string>()(filename); // XXX BAD REDO
}<|fim▁end|>
| |
<|file_name|>IBehaviourTreeBlackboard.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2015 Rubén Héctor García ([email protected])
*
* This program is free software: you can redistribute it and/or modify<|fim▁hole|> * it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.velonuboso.made.core.abm.api;
/**
*
* @author Rubén Héctor García ([email protected])
*/
public interface IBehaviourTreeBlackboard {
}<|fim▁end|>
| |
<|file_name|>EntityCacheImpl.java<|end_file_name|><|fim▁begin|>/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.persistence.cache;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.flowable.engine.common.impl.persistence.entity.Entity;
/**
* @author Joram Barrez
*/
public class EntityCacheImpl implements EntityCache {
protected Map<Class<?>, Map<String, CachedEntity>> cachedObjects = new HashMap<Class<?>, Map<String,CachedEntity>>();
@Override
public CachedEntity put(Entity entity, boolean storeState) {
Map<String, CachedEntity> classCache = cachedObjects.get(entity.getClass());
if (classCache == null) {
classCache = new HashMap<String, CachedEntity>();
cachedObjects.put(entity.getClass(), classCache);
}
CachedEntity cachedObject = new CachedEntity(entity, storeState);
classCache.put(entity.getId(), cachedObject);
return cachedObject;
}
@Override
@SuppressWarnings("unchecked")
public <T> T findInCache(Class<T> entityClass, String id) {
CachedEntity cachedObject = null;
Map<String, CachedEntity> classCache = cachedObjects.get(entityClass);
<|fim▁hole|> }
if (classCache != null) {
cachedObject = classCache.get(id);
}
if (cachedObject != null) {
return (T) cachedObject.getEntity();
}
return null;
}
protected Map<String, CachedEntity> findClassCacheByCheckingSubclasses(Class<?> entityClass) {
for (Class<?> clazz : cachedObjects.keySet()) {
if (entityClass.isAssignableFrom(clazz)) {
return cachedObjects.get(clazz);
}
}
return null;
}
@Override
public void cacheRemove(Class<?> entityClass, String entityId) {
Map<String, CachedEntity> classCache = cachedObjects.get(entityClass);
if (classCache == null) {
return;
}
classCache.remove(entityId);
}
@Override
public <T> Collection<CachedEntity> findInCacheAsCachedObjects(Class<T> entityClass) {
Map<String, CachedEntity> classCache = cachedObjects.get(entityClass);
if (classCache != null) {
return classCache.values();
}
return null;
}
@Override
@SuppressWarnings("unchecked")
public <T> List<T> findInCache(Class<T> entityClass) {
Map<String, CachedEntity> classCache = cachedObjects.get(entityClass);
if (classCache == null) {
classCache = findClassCacheByCheckingSubclasses(entityClass);
}
if (classCache != null) {
List<T> entities = new ArrayList<T>(classCache.size());
for (CachedEntity cachedObject : classCache.values()) {
entities.add((T) cachedObject.getEntity());
}
return entities;
}
return Collections.emptyList();
}
public Map<Class<?>, Map<String, CachedEntity>> getAllCachedEntities() {
return cachedObjects;
}
@Override
public void close() {
}
@Override
public void flush() {
}
}<|fim▁end|>
|
if (classCache == null) {
classCache = findClassCacheByCheckingSubclasses(entityClass);
|
<|file_name|>camera.py<|end_file_name|><|fim▁begin|>"""Support for Axis camera streaming."""
from homeassistant.components.camera import SUPPORT_STREAM
from homeassistant.components.mjpeg.camera import (
CONF_MJPEG_URL,
CONF_STILL_IMAGE_URL,
MjpegCamera,
filter_urllib3_logging,
)
from homeassistant.const import (
CONF_AUTHENTICATION,
CONF_DEVICE,
CONF_HOST,
CONF_MAC,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
HTTP_DIGEST_AUTHENTICATION,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .axis_base import AxisEntityBase
from .const import DOMAIN as AXIS_DOMAIN
AXIS_IMAGE = "http://{}:{}/axis-cgi/jpg/image.cgi"
AXIS_VIDEO = "http://{}:{}/axis-cgi/mjpg/video.cgi"
AXIS_STREAM = "rtsp://{}:{}@{}/axis-media/media.amp?videocodec=h264"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Axis camera video stream."""
filter_urllib3_logging()
serial_number = config_entry.data[CONF_MAC]
device = hass.data[AXIS_DOMAIN][serial_number]
config = {
CONF_NAME: config_entry.data[CONF_NAME],
CONF_USERNAME: config_entry.data[CONF_DEVICE][CONF_USERNAME],
CONF_PASSWORD: config_entry.data[CONF_DEVICE][CONF_PASSWORD],
CONF_MJPEG_URL: AXIS_VIDEO.format(
config_entry.data[CONF_DEVICE][CONF_HOST],
config_entry.data[CONF_DEVICE][CONF_PORT],
),
CONF_STILL_IMAGE_URL: AXIS_IMAGE.format(
config_entry.data[CONF_DEVICE][CONF_HOST],
config_entry.data[CONF_DEVICE][CONF_PORT],
),
CONF_AUTHENTICATION: HTTP_DIGEST_AUTHENTICATION,
}
async_add_entities([AxisCamera(config, device)])
class AxisCamera(AxisEntityBase, MjpegCamera):<|fim▁hole|> """Representation of a Axis camera."""
def __init__(self, config, device):
"""Initialize Axis Communications camera component."""
AxisEntityBase.__init__(self, device)
MjpegCamera.__init__(self, config)
async def async_added_to_hass(self):
"""Subscribe camera events."""
self.unsub_dispatcher.append(
async_dispatcher_connect(
self.hass, self.device.event_new_address, self._new_address
)
)
await super().async_added_to_hass()
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_STREAM
async def stream_source(self):
"""Return the stream source."""
return AXIS_STREAM.format(
self.device.config_entry.data[CONF_DEVICE][CONF_USERNAME],
self.device.config_entry.data[CONF_DEVICE][CONF_PASSWORD],
self.device.host,
)
def _new_address(self):
"""Set new device address for video stream."""
port = self.device.config_entry.data[CONF_DEVICE][CONF_PORT]
self._mjpeg_url = AXIS_VIDEO.format(self.device.host, port)
self._still_image_url = AXIS_IMAGE.format(self.device.host, port)
@property
def unique_id(self):
"""Return a unique identifier for this device."""
return f"{self.device.serial}-camera"<|fim▁end|>
| |
<|file_name|>ExaquteTaskPyCOMPSs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2002-2019 Barcelona Supercomputing Center (www.bsc.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from exaqute.ExaquteTask import *
from pycompss.api.task import task
from pycompss.api.api import compss_wait_on
from pycompss.api.api import compss_barrier
from pycompss.api.api import compss_delete_object
from pycompss.api.api import compss_delete_file
from pycompss.api.parameter import *
from pycompss.api.implement import implement
from pycompss.api.constraint import *
class ExaquteTask(object):
def __init__(self, *args, **kwargs):
global scheduler
scheduler = "Current scheduler is PyCOMPSs"
self.task_instance = task(*args, **kwargs)
def __call__(self, f):
return self.task_instance.__call__(f)
<|fim▁hole|>def barrier(): # Wait
compss_barrier()
def get_value_from_remote(obj): # Gather
obj = compss_wait_on(obj)
return obj
def delete_object(obj): # Release
compss_delete_object(obj)
def delete_file(file_path):
compss_delete_file(file_path)
def compute(obj): # Submit task
return obj<|fim▁end|>
| |
<|file_name|>ua_handler.js<|end_file_name|><|fim▁begin|>var fs = require('fs'), path = require('path');
var uaList = fs.readFileSync(path.join(__dirname, 'ua.txt'));
uaList = uaList.toString().split('\n');
var OSs = {};
var browsers = {};
var browserVersions = {};
var unknown = [];
var osMatch = {
ios: /iOS|iPhone OS/i,
android: /Android/i,
linux: /Linux/i,
win: /Windows/i,
osx: /Mac|OS X/i,
blackberry: /BlackBerry|BB10/i,
series60: /Series 60|Series60/i,
series40: /Series 40|Series40/i,
j2me: /J2ME|MIDP/i
};
var browserMatch = {
opera: /opera/i,
ie: /msie|trident\//i,
chrome: /chrome/i,
chromium: /chromium/i,
safari: /safari|AppleWebKit/i,
firefox: /firefox/i,
blackberry: /BlackBerry/i
};
var featureMatch = {
ipad: /ipad/i,<|fim▁hole|>
uaList.forEach(function (uaName) {
var os = 'unknown';
for (var curOs in osMatch) {
if (uaName.match(osMatch[curOs])) {
os = curOs;
break;
}
}
var browser = 'unknown';
for (var curBrowser in browserMatch) {
if (uaName.match(browserMatch[curBrowser])) {
browser = curBrowser;
break;
}
}
var version = (
uaName.match(/MSIE ([\d.]+)/) ||
uaName.match( /.+(?:me|ox|on|rv|it|era|opr|ie)[\/: ]([\d.]+)/) ||
[0,'0']
)[1];
if (!OSs[os]) {
OSs[os] = 1;
} else {
OSs[os]++;
}
if (!browsers[os + ' ' + browser]) {
browsers[os + ' ' + browser] = 1;
} else {
browsers[os + ' ' + browser]++;
}
if (os == 'unknown' || browser == 'unknown') {
unknown.push(uaName);
}
if (!browserVersions[os + ' ' + browser + ' ' + version]) {
browserVersions[os + ' ' + browser + ' ' + version] = 1;
} else {
browserVersions[os + ' ' + browser + ' ' + version]++;
}
})
console.log(OSs);
console.log(browsers);
console.log(browserVersions);
// console.log(unknown);<|fim▁end|>
|
opera_mobile: /opera mini|opera mobi/i,
opera_mini: /opera mini/i,
blackberry: /blackberry/i
};
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
## OOPS this is a custom widget that works for initializing
## tinymce instances on stacked and tabular inlines
## for flatpages, just use the tinymce packaged one.
#from content.widgets import TinyMCE
from tinymce.widgets import TinyMCE
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': TinyMCE(attrs={'cols': 100, 'rows': 15}),<|fim▁hole|> """
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)<|fim▁end|>
|
}
class PageAdmin(FlatPageAdmin):
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python<|fim▁hole|># -*- coding: utf-8 -*-
#
# Copyright 2010 TÜBİTAK UEKAE
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/copyleft/gpl.txt.<|fim▁end|>
| |
<|file_name|>BattleGroundHandler.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005-2011 MaNGOS <http://getmangos.com/>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "Common.h"
#include "SharedDefines.h"
#include "WorldPacket.h"
#include "Opcodes.h"
#include "Log.h"
#include "Player.h"
#include "ObjectMgr.h"
#include "WorldSession.h"
#include "Object.h"
#include "Chat.h"
#include "BattleGroundMgr.h"
#include "BattleGroundWS.h"
#include "BattleGround.h"
#include "ArenaTeam.h"
#include "Language.h"
#include "ScriptMgr.h"
#include "World.h"
void WorldSession::HandleBattlemasterHelloOpcode(WorldPacket & recv_data)
{
ObjectGuid guid;
recv_data >> guid;
DEBUG_LOG("WORLD: Recvd CMSG_BATTLEMASTER_HELLO Message from %s", guid.GetString().c_str());
Creature *pCreature = GetPlayer()->GetMap()->GetCreature(guid);
if (!pCreature)
return;
if (!pCreature->isBattleMaster()) // it's not battlemaster
return;
// Stop the npc if moving
if (!pCreature->IsStopped())
pCreature->StopMoving();
BattleGroundTypeId bgTypeId = sBattleGroundMgr.GetBattleMasterBG(pCreature->GetEntry());
if (bgTypeId == BATTLEGROUND_TYPE_NONE)
return;
if (!_player->GetBGAccessByLevel(bgTypeId))
{
// temp, must be gossip message...
SendNotification(LANG_YOUR_BG_LEVEL_REQ_ERROR);
return;
}
SendBattlegGroundList(guid, bgTypeId);
}
void WorldSession::SendBattlegGroundList( ObjectGuid guid, BattleGroundTypeId bgTypeId )
{
WorldPacket data;
sBattleGroundMgr.BuildBattleGroundListPacket(&data, guid, _player, bgTypeId, 0);
SendPacket( &data );
}
void WorldSession::HandleBattlemasterJoinOpcode( WorldPacket & recv_data )
{
ObjectGuid guid;
uint32 bgTypeId_;
uint32 instanceId;
uint8 joinAsGroup;
bool isPremade = false;
Group * grp;
recv_data >> guid; // battlemaster guid
recv_data >> bgTypeId_; // battleground type id (DBC id)
recv_data >> instanceId; // instance id, 0 if First Available selected
recv_data >> joinAsGroup; // join as group
if (!sBattlemasterListStore.LookupEntry(bgTypeId_))
{
sLog.outError("Battleground: invalid bgtype (%u) received. possible cheater? player guid %u",bgTypeId_,_player->GetGUIDLow());
return;
}
BattleGroundTypeId bgTypeId = BattleGroundTypeId(bgTypeId_);
DEBUG_LOG( "WORLD: Recvd CMSG_BATTLEMASTER_JOIN Message from %s", guid.GetString().c_str());
// can do this, since it's battleground, not arena
BattleGroundQueueTypeId bgQueueTypeId = BattleGroundMgr::BGQueueTypeId(bgTypeId, ARENA_TYPE_NONE);
BattleGroundQueueTypeId bgQueueTypeIdRandom = BattleGroundMgr::BGQueueTypeId(BATTLEGROUND_RB, ARENA_TYPE_NONE);
// ignore if player is already in BG
if (_player->InBattleGround())
return;
// prevent joining from instances
uint32 mapid = _player->GetMapId();
if(mapid != 0 && mapid != 1 && mapid != 530 && mapid != 571 && mapid !=13)
{
SendNotification("You cannot join from here");
return;
}
// get bg instance or bg template if instance not found
BattleGround *bg = NULL;
if (instanceId)
bg = sBattleGroundMgr.GetBattleGroundThroughClientInstance(instanceId, bgTypeId);
if (!bg && !(bg = sBattleGroundMgr.GetBattleGroundTemplate(bgTypeId)))
{
sLog.outError("Battleground: no available bg / template found");
return;
}
// expected bracket entry
PvPDifficultyEntry const* bracketEntry = GetBattlegroundBracketByLevel(bg->GetMapId(),_player->getLevel());
if (!bracketEntry)
return;
GroupJoinBattlegroundResult err;
// check queue conditions
if (!joinAsGroup)
{
// check Deserter debuff
if (!_player->CanJoinToBattleground())
{
WorldPacket data;
sBattleGroundMgr.BuildGroupJoinedBattlegroundPacket(&data, ERR_GROUP_JOIN_BATTLEGROUND_DESERTERS);
_player->GetSession()->SendPacket(&data);
return;
}
if (_player->GetBattleGroundQueueIndex(bgQueueTypeIdRandom) < PLAYER_MAX_BATTLEGROUND_QUEUES)
{
//player is already in random queue
WorldPacket data;
sBattleGroundMgr.BuildGroupJoinedBattlegroundPacket(&data, ERR_IN_RANDOM_BG);
_player->GetSession()->SendPacket(&data);
return;
}
if(_player->InBattleGroundQueue() && bgTypeId == BATTLEGROUND_RB)
{
//player is already in queue, can't start random queue
WorldPacket data;
sBattleGroundMgr.BuildGroupJoinedBattlegroundPacket(&data, ERR_IN_NON_RANDOM_BG);
_player->GetSession()->SendPacket(&data);
return;
}
// check if already in queue
if (_player->GetBattleGroundQueueIndex(bgQueueTypeId) < PLAYER_MAX_BATTLEGROUND_QUEUES)
//player is already in this queue
return;
// check if has free queue slots
if (!_player->HasFreeBattleGroundQueueId())
{
WorldPacket data;
sBattleGroundMgr.BuildGroupJoinedBattlegroundPacket(&data, ERR_BATTLEGROUND_TOO_MANY_QUEUES);
_player->GetSession()->SendPacket(&data);
return;
}
}
else
{
grp = _player->GetGroup();
// no group found, error
if (!grp)
return;
if (grp->GetLeaderGuid() != _player->GetObjectGuid())
return;
bool have_bots = false;
for (GroupReference* itr = grp->GetFirstMember(); itr != NULL; itr = itr->next())
{
Player* member = itr->getSource();
if (!member)
continue; // this should never happen
if (member->GetPlayerbotAI())
{
ChatHandler(_player).PSendSysMessage("|cffff0000You cannot get in battleground queue as premade because you have bots in your group. Adding you in queue as single player.");
have_bots = true;
joinAsGroup = false;
break;
}
}
if (!have_bots)
{
err = grp->CanJoinBattleGroundQueue(bg, bgQueueTypeId, 0, bg->GetMaxPlayersPerTeam(), false, 0);
isPremade = sWorld.getConfig(CONFIG_UINT32_BATTLEGROUND_PREMADE_GROUP_WAIT_FOR_MATCH) &&
(grp->GetMembersCount() >= bg->GetMinPlayersPerTeam());
}
}
// if we're here, then the conditions to join a bg are met. We can proceed in joining.
// _player->GetGroup() was already checked, grp is already initialized
BattleGroundQueue& bgQueue = sBattleGroundMgr.m_BattleGroundQueues[bgQueueTypeId];
if (joinAsGroup)
{
GroupQueueInfo *ginfo = NULL;
uint32 avgTime = 0;
if(err > 0)
{
DEBUG_LOG("Battleground: the following players are joining as group:");
ginfo = bgQueue.AddGroup(_player, grp, bgTypeId, bracketEntry, ARENA_TYPE_NONE, false, isPremade, 0, 0);
avgTime = bgQueue.GetAverageQueueWaitTime(ginfo, bracketEntry->GetBracketId());
}
for(GroupReference *itr = grp->GetFirstMember(); itr != NULL; itr = itr->next())
{
Player *member = itr->getSource();
if(!member)
continue; // this should never happen
WorldPacket data;
if(err <= 0)
{
sBattleGroundMgr.BuildGroupJoinedBattlegroundPacket(&data, err);
member->GetSession()->SendPacket(&data);
continue;
}
// add to queue
uint32 queueSlot = member->AddBattleGroundQueueId(bgQueueTypeId);
// send status packet (in queue)
sBattleGroundMgr.BuildBattleGroundStatusPacket(&data, bg, queueSlot, STATUS_WAIT_QUEUE, avgTime, 0, ginfo->arenaType);
member->GetSession()->SendPacket(&data);
sBattleGroundMgr.BuildGroupJoinedBattlegroundPacket(&data, err);
member->GetSession()->SendPacket(&data);
DEBUG_LOG("Battleground: player joined queue for bg queue type %u bg type %u: GUID %u, NAME %s",bgQueueTypeId,bgTypeId,member->GetGUIDLow(), member->GetName());
}
DEBUG_LOG("Battleground: group end");
}
else
{
GroupQueueInfo * ginfo = bgQueue.AddGroup(_player, NULL, bgTypeId, bracketEntry, ARENA_TYPE_NONE, false, isPremade, 0, 0);
uint32 avgTime = bgQueue.GetAverageQueueWaitTime(ginfo, bracketEntry->GetBracketId());
// already checked if queueSlot is valid, now just get it
uint32 queueSlot = _player->AddBattleGroundQueueId(bgQueueTypeId);
WorldPacket data;
// send status packet (in queue)
sBattleGroundMgr.BuildBattleGroundStatusPacket(&data, bg, queueSlot, STATUS_WAIT_QUEUE, avgTime, 0, ginfo->arenaType);
SendPacket(&data);
DEBUG_LOG("Battleground: player joined queue for bg queue type %u bg type %u: GUID %u, NAME %s",bgQueueTypeId,bgTypeId,_player->GetGUIDLow(), _player->GetName());
}
sBattleGroundMgr.ScheduleQueueUpdate(0, ARENA_TYPE_NONE, bgQueueTypeId, bgTypeId, bracketEntry->GetBracketId());
}
void WorldSession::HandleBattleGroundPlayerPositionsOpcode( WorldPacket & /*recv_data*/ )
{
// empty opcode
DEBUG_LOG("WORLD: Recvd MSG_BATTLEGROUND_PLAYER_POSITIONS Message");
BattleGround *bg = _player->GetBattleGround();
if(!bg) // can't be received if player not in battleground
return;
switch( bg->GetTypeID(true) )
{
case BATTLEGROUND_WS:
{
uint32 count1 = 0; // always constant zero?
uint32 count2 = 0; // count of next fields
Player *ali_plr = sObjectMgr.GetPlayer(((BattleGroundWS*)bg)->GetAllianceFlagPickerGuid());
if (ali_plr)
++count2;
Player *horde_plr = sObjectMgr.GetPlayer(((BattleGroundWS*)bg)->GetHordeFlagPickerGuid());
if (horde_plr)
++count2;
WorldPacket data(MSG_BATTLEGROUND_PLAYER_POSITIONS, (4+4+16*count1+16*count2));
data << count1; // alliance flag holders count - obsolete, now always 0
/*for(uint8 i = 0; i < count1; ++i)
{
data << ObjectGuid(0); // guid
data << (float)0; // x
data << (float)0; // y
}*/
data << count2; // horde flag holders count - obsolete, now count of next fields
if (ali_plr)
{
data << ObjectGuid(ali_plr->GetObjectGuid());
data << float(ali_plr->GetPositionX());
data << float(ali_plr->GetPositionY());
}
if (horde_plr)
{
data << ObjectGuid(horde_plr->GetObjectGuid());
data << float(horde_plr->GetPositionX());
data << float(horde_plr->GetPositionY());
}
SendPacket(&data);
}
break;
case BATTLEGROUND_EY:
//TODO : fix me!
break;
case BATTLEGROUND_AB:
case BATTLEGROUND_AV:
{
//for other BG types - send default
WorldPacket data(MSG_BATTLEGROUND_PLAYER_POSITIONS, (4+4));
data << uint32(0);
data << uint32(0);
SendPacket(&data);
}
break;
default:
//maybe it is sent also in arena - do nothing
break;
}
}
void WorldSession::HandlePVPLogDataOpcode( WorldPacket & /*recv_data*/ )
{
DEBUG_LOG( "WORLD: Recvd MSG_PVP_LOG_DATA Message");
BattleGround *bg = _player->GetBattleGround();
if (!bg)
return;
// arena finish version will send in BattleGround::EndBattleGround directly
if (bg->isArena())
return;
WorldPacket data;
sBattleGroundMgr.BuildPvpLogDataPacket(&data, bg);
SendPacket(&data);
DEBUG_LOG( "WORLD: Sent MSG_PVP_LOG_DATA Message");
}
void WorldSession::HandleBattlefieldListOpcode( WorldPacket &recv_data )
{
DEBUG_LOG( "WORLD: Recvd CMSG_BATTLEFIELD_LIST Message");
uint32 bgTypeId;
recv_data >> bgTypeId; // id from DBC
uint8 fromWhere;
recv_data >> fromWhere; // 0 - battlemaster (lua: ShowBattlefieldList), 1 - UI (lua: RequestBattlegroundInstanceInfo)
uint8 unk1;
recv_data >> unk1; // unknown 3.2.2
BattlemasterListEntry const* bl = sBattlemasterListStore.LookupEntry(bgTypeId);
if (!bl)
{
sLog.outError("Battleground: invalid bgtype received.");
return;
}
WorldPacket data;
sBattleGroundMgr.BuildBattleGroundListPacket(&data, ObjectGuid(), _player, BattleGroundTypeId(bgTypeId), fromWhere);
SendPacket( &data );
}
void WorldSession::HandleBattleFieldPortOpcode( WorldPacket &recv_data )
{
DEBUG_LOG( "WORLD: Recvd CMSG_BATTLEFIELD_PORT Message");
uint8 type; // arenatype if arena
uint8 unk2; // unk, can be 0x0 (may be if was invited?) and 0x1
uint32 bgTypeId_; // type id from dbc
uint16 unk; // 0x1F90 constant?
uint8 action; // enter battle 0x1, leave queue 0x0
recv_data >> type >> unk2 >> bgTypeId_ >> unk >> action;
if (!sBattlemasterListStore.LookupEntry(bgTypeId_))
{
sLog.outError("BattlegroundHandler: invalid bgtype (%u) received.", bgTypeId_);
return;
}
if (type && !IsArenaTypeValid(ArenaType(type)))
{
sLog.outError("BattlegroundHandler: Invalid CMSG_BATTLEFIELD_PORT received from player (%u), arena type wrong: %u.", _player->GetGUIDLow(), type);
return;
}
if (!_player->InBattleGroundQueue())
{
sLog.outError("BattlegroundHandler: Invalid CMSG_BATTLEFIELD_PORT received from player (%u), he is not in bg_queue.", _player->GetGUIDLow());
return;
}
//get GroupQueueInfo from BattleGroundQueue
BattleGroundTypeId bgTypeId = BattleGroundTypeId(bgTypeId_);
BattleGroundQueueTypeId bgQueueTypeId = BattleGroundMgr::BGQueueTypeId(bgTypeId, ArenaType(type));
BattleGroundQueue& bgQueue = sBattleGroundMgr.m_BattleGroundQueues[bgQueueTypeId];
//we must use temporary variable, because GroupQueueInfo pointer can be deleted in BattleGroundQueue::RemovePlayer() function
GroupQueueInfo ginfo;
if (!bgQueue.GetPlayerGroupInfoData(_player->GetObjectGuid(), &ginfo))
{
sLog.outError("BattlegroundHandler: itrplayerstatus not found.");
return;
}
// if action == 1, then instanceId is required
if (!ginfo.IsInvitedToBGInstanceGUID && action == 1)
{
sLog.outError("BattlegroundHandler: instance not found.");
return;
}
BattleGround *bg = sBattleGroundMgr.GetBattleGround(ginfo.IsInvitedToBGInstanceGUID, bgTypeId);
// bg template might and must be used in case of leaving queue, when instance is not created yet
if (!bg && action == 0)
bg = sBattleGroundMgr.GetBattleGroundTemplate(bgTypeId);
if (!bg)
{
sLog.outError("BattlegroundHandler: bg_template not found for type id %u.", bgTypeId);<|fim▁hole|> // expected bracket entry
PvPDifficultyEntry const* bracketEntry = GetBattlegroundBracketByLevel(bg->GetMapId(),_player->getLevel());
if (!bracketEntry)
return;
//some checks if player isn't cheating - it is not exactly cheating, but we cannot allow it
if (action == 1 && ginfo.arenaType == ARENA_TYPE_NONE)
{
//if player is trying to enter battleground (not arena!) and he has deserter debuff, we must just remove him from queue
if (!_player->CanJoinToBattleground())
{
//send bg command result to show nice message
WorldPacket data2;
sBattleGroundMgr.BuildGroupJoinedBattlegroundPacket(&data2, ERR_GROUP_JOIN_BATTLEGROUND_DESERTERS);
_player->GetSession()->SendPacket(&data2);
action = 0;
DEBUG_LOG("Battleground: player %s (%u) has a deserter debuff, do not port him to battleground!", _player->GetName(), _player->GetGUIDLow());
}
//if player don't match battleground max level, then do not allow him to enter! (this might happen when player leveled up during his waiting in queue
if (_player->getLevel() > bg->GetMaxLevel())
{
sLog.outError("Battleground: Player %s (%u) has level (%u) higher than maxlevel (%u) of battleground (%u)! Do not port him to battleground!",
_player->GetName(), _player->GetGUIDLow(), _player->getLevel(), bg->GetMaxLevel(), bg->GetTypeID());
action = 0;
}
}
uint32 queueSlot = _player->GetBattleGroundQueueIndex(bgQueueTypeId);
WorldPacket data;
switch( action )
{
case 1: // port to battleground
if (!_player->IsInvitedForBattleGroundQueueType(bgQueueTypeId))
return; // cheating?
if (!_player->InBattleGround())
_player->SetBattleGroundEntryPoint();
// resurrect the player
if (!_player->isAlive())
{
_player->ResurrectPlayer(1.0f);
_player->SpawnCorpseBones();
}
// stop taxi flight at port
if (_player->IsTaxiFlying())
{
_player->GetMotionMaster()->MovementExpired();
_player->m_taxi.ClearTaxiDestinations();
}
sBattleGroundMgr.BuildBattleGroundStatusPacket(&data, bg, queueSlot, STATUS_IN_PROGRESS, 0, bg->GetStartTime(), bg->GetArenaType());
_player->GetSession()->SendPacket(&data);
// remove battleground queue status from BGmgr
bgQueue.RemovePlayer(_player->GetObjectGuid(), false);
// this is still needed here if battleground "jumping" shouldn't add deserter debuff
// also this is required to prevent stuck at old battleground after SetBattleGroundId set to new
if (BattleGround *currentBg = _player->GetBattleGround())
currentBg->RemovePlayerAtLeave(_player->GetObjectGuid(), false, true);
// set the destination instance id
_player->SetBattleGroundId(bg->GetInstanceID(), bgTypeId);
// set the destination team
_player->SetBGTeam(ginfo.GroupTeam);
// bg->HandleBeforeTeleportToBattleGround(_player);
_player->RemoveSpellsCausingAura(SPELL_AURA_MOUNTED);
_player->RemoveSpellsCausingAura(SPELL_AURA_FLY);
sBattleGroundMgr.SendToBattleGround(_player, ginfo.IsInvitedToBGInstanceGUID, bgTypeId);
// add only in HandleMoveWorldPortAck()
// bg->AddPlayer(_player,team);
DEBUG_LOG("Battleground: player %s (%u) joined battle for bg %u, bgtype %u, queue type %u.", _player->GetName(), _player->GetGUIDLow(), bg->GetInstanceID(), bg->GetTypeID(), bgQueueTypeId);
break;
case 0: // leave queue
// if player leaves rated arena match before match start, it is counted as he played but he lost
/*if (ginfo.IsRated && ginfo.IsInvitedToBGInstanceGUID)
{
ArenaTeam * at = sObjectMgr.GetArenaTeamById(ginfo.ArenaTeamId);
if (at)
{
DEBUG_LOG("UPDATING memberLost's personal arena rating for %s by opponents rating: %u, because he has left queue!", _player->GetGuidStr().c_str(), ginfo.OpponentsTeamRating);
at->MemberLost(_player, ginfo.OpponentsTeamRating);
at->SaveToDB();
}
}*/
_player->RemoveBattleGroundQueueId(bgQueueTypeId); // must be called this way, because if you move this call to queue->removeplayer, it causes bugs
sBattleGroundMgr.BuildBattleGroundStatusPacket(&data, bg, queueSlot, STATUS_NONE, 0, 0, ARENA_TYPE_NONE);
bgQueue.RemovePlayer(_player->GetObjectGuid(), true);
// player left queue, we should update it - do not update Arena Queue
if (ginfo.arenaType == ARENA_TYPE_NONE)
sBattleGroundMgr.ScheduleQueueUpdate(ginfo.ArenaTeamRating, ginfo.arenaType, bgQueueTypeId, bgTypeId, bracketEntry->GetBracketId());
SendPacket(&data);
DEBUG_LOG("Battleground: player %s (%u) left queue for bgtype %u, queue type %u.", _player->GetName(), _player->GetGUIDLow(), bg->GetTypeID(), bgQueueTypeId);
break;
default:
sLog.outError("Battleground port: unknown action %u", action);
break;
}
}
void WorldSession::HandleLeaveBattlefieldOpcode( WorldPacket& recv_data )
{
DEBUG_LOG( "WORLD: Recvd CMSG_LEAVE_BATTLEFIELD Message");
recv_data.read_skip<uint8>(); // unk1
recv_data.read_skip<uint8>(); // unk2
recv_data.read_skip<uint32>(); // BattleGroundTypeId
recv_data.read_skip<uint16>(); // unk3
//if(bgTypeId >= MAX_BATTLEGROUND_TYPES) // cheating? but not important in this case
// return;
// not allow leave battleground in combat
if (_player->isInCombat())
if (BattleGround* bg = _player->GetBattleGround())
if (bg->GetStatus() != STATUS_WAIT_LEAVE)
return;
_player->LeaveBattleground();
}
void WorldSession::HandleBattlefieldStatusOpcode( WorldPacket & /*recv_data*/ )
{
// empty opcode
DEBUG_LOG( "WORLD: Battleground status" );
WorldPacket data;
// we must update all queues here
BattleGround *bg = NULL;
for (uint8 i = 0; i < PLAYER_MAX_BATTLEGROUND_QUEUES; ++i)
{
BattleGroundQueueTypeId bgQueueTypeId = _player->GetBattleGroundQueueTypeId(i);
if (!bgQueueTypeId)
continue;
BattleGroundTypeId bgTypeId = BattleGroundMgr::BGTemplateId(bgQueueTypeId);
ArenaType arenaType = BattleGroundMgr::BGArenaType(bgQueueTypeId);
if (bgTypeId == _player->GetBattleGroundTypeId())
{
bg = _player->GetBattleGround();
//i cannot check any variable from player class because player class doesn't know if player is in 2v2 / 3v3 or 5v5 arena
//so i must use bg pointer to get that information
if (bg && bg->GetArenaType() == arenaType)
{
// this line is checked, i only don't know if GetStartTime is changing itself after bg end!
// send status in BattleGround
sBattleGroundMgr.BuildBattleGroundStatusPacket(&data, bg, i, STATUS_IN_PROGRESS, bg->GetEndTime(), bg->GetStartTime(), arenaType);
SendPacket(&data);
continue;
}
}
//we are sending update to player about queue - he can be invited there!
//get GroupQueueInfo for queue status
BattleGroundQueue& bgQueue = sBattleGroundMgr.m_BattleGroundQueues[bgQueueTypeId];
GroupQueueInfo ginfo;
if (!bgQueue.GetPlayerGroupInfoData(_player->GetObjectGuid(), &ginfo))
continue;
if (ginfo.IsInvitedToBGInstanceGUID)
{
bg = sBattleGroundMgr.GetBattleGround(ginfo.IsInvitedToBGInstanceGUID, bgTypeId);
if (!bg)
continue;
uint32 remainingTime = WorldTimer::getMSTimeDiff(WorldTimer::getMSTime(), ginfo.RemoveInviteTime);
// send status invited to BattleGround
sBattleGroundMgr.BuildBattleGroundStatusPacket(&data, bg, i, STATUS_WAIT_JOIN, remainingTime, 0, arenaType);
SendPacket(&data);
}
else
{
bg = sBattleGroundMgr.GetBattleGroundTemplate(bgTypeId);
if (!bg)
continue;
// expected bracket entry
PvPDifficultyEntry const* bracketEntry = GetBattlegroundBracketByLevel(bg->GetMapId(),_player->getLevel());
if (!bracketEntry)
continue;
uint32 avgTime = bgQueue.GetAverageQueueWaitTime(&ginfo, bracketEntry->GetBracketId());
// send status in BattleGround Queue
sBattleGroundMgr.BuildBattleGroundStatusPacket(&data, bg, i, STATUS_WAIT_QUEUE, avgTime, WorldTimer::getMSTimeDiff(ginfo.JoinTime, WorldTimer::getMSTime()), arenaType);
SendPacket(&data);
}
}
}
void WorldSession::HandleAreaSpiritHealerQueryOpcode( WorldPacket & recv_data )
{
DEBUG_LOG("WORLD: CMSG_AREA_SPIRIT_HEALER_QUERY");
BattleGround *bg = _player->GetBattleGround();
if (!bg)
return;
ObjectGuid guid;
recv_data >> guid;
Creature *unit = GetPlayer()->GetMap()->GetCreature(guid);
if (!unit)
return;
if(!unit->isSpiritService()) // it's not spirit service
return;
unit->SendAreaSpiritHealerQueryOpcode(GetPlayer());
}
void WorldSession::HandleAreaSpiritHealerQueueOpcode( WorldPacket & recv_data )
{
DEBUG_LOG("WORLD: CMSG_AREA_SPIRIT_HEALER_QUEUE");
BattleGround *bg = _player->GetBattleGround();
if (!bg)
return;
ObjectGuid guid;
recv_data >> guid;
Creature *unit = GetPlayer()->GetMap()->GetCreature(guid);
if (!unit)
return;
if(!unit->isSpiritService()) // it's not spirit service
return;
sScriptMgr.OnGossipHello(GetPlayer(), unit);
}
void WorldSession::HandleBattlemasterJoinArena( WorldPacket & recv_data )
{
DEBUG_LOG("WORLD: CMSG_BATTLEMASTER_JOIN_ARENA");
//recv_data.hexlike();
ObjectGuid guid; // arena Battlemaster guid
uint8 arenaslot; // 2v2, 3v3 or 5v5
uint8 asGroup; // asGroup
uint8 isRated; // isRated
recv_data >> guid >> arenaslot >> asGroup >> isRated;
// ignore if we already in BG or BG queue
if (_player->InBattleGround())
return;
Creature *unit = GetPlayer()->GetMap()->GetCreature(guid);
if (!unit)
return;
if(!unit->isBattleMaster()) // it's not battle master
return;
ArenaType arenatype;
uint32 arenaRating = 0;
switch(arenaslot)
{
case 0:
arenatype = ARENA_TYPE_2v2;
break;
case 1:
arenatype = ARENA_TYPE_3v3;
break;
case 2:
arenatype = ARENA_TYPE_5v5;
break;
default:
sLog.outError("Unknown arena slot %u at HandleBattlemasterJoinArena()", arenaslot);
return;
}
// check existence
BattleGround* bg = sBattleGroundMgr.GetBattleGroundTemplate(BATTLEGROUND_AA);
if (!bg)
{
sLog.outError("Battleground: template bg (all arenas) not found");
return;
}
BattleGroundTypeId bgTypeId = bg->GetTypeID();
BattleGroundQueueTypeId bgQueueTypeId = BattleGroundMgr::BGQueueTypeId(bgTypeId, arenatype);
PvPDifficultyEntry const* bracketEntry = GetBattlegroundBracketByLevel(bg->GetMapId(),_player->getLevel());
if (!bracketEntry)
return;
GroupJoinBattlegroundResult err;
Group * grp = NULL;
// check queue conditions
if (!asGroup)
{
// you can't join in this way by client
if (isRated)
return;
// check if already in queue
if (_player->GetBattleGroundQueueIndex(bgQueueTypeId) < PLAYER_MAX_BATTLEGROUND_QUEUES)
//player is already in this queue
return;
// check if has free queue slots
if (!_player->HasFreeBattleGroundQueueId())
return;
}
else
{
grp = _player->GetGroup();
// no group found, error
if (!grp)
return;
if (grp->GetLeaderGuid() != _player->GetObjectGuid())
return;
// may be Group::CanJoinBattleGroundQueue should be moved to player class...
err = grp->CanJoinBattleGroundQueue(bg, bgQueueTypeId, arenatype, arenatype, (bool)isRated, arenaslot);
}
uint32 ateamId = 0;
if (isRated)
{
ateamId = _player->GetArenaTeamId(arenaslot);
// check real arena team existence only here (if it was moved to group->CanJoin .. () then we would have to get it twice)
ArenaTeam * at = sObjectMgr.GetArenaTeamById(ateamId);
if (!at)
{
_player->GetSession()->SendNotInArenaTeamPacket(arenatype);
return;
}
// get the team rating for queue
arenaRating = at->GetRating();
// the arena team id must match for everyone in the group
// get the personal ratings for queue
uint32 avg_pers_rating = 0;
for(Group::member_citerator citr = grp->GetMemberSlots().begin(); citr != grp->GetMemberSlots().end(); ++citr)
{
ArenaTeamMember const* at_member = at->GetMember(citr->guid);
if (!at_member) // group member joining to arena must be in leader arena team
return;
avg_pers_rating += at_member->matchmaker_rating;
}
avg_pers_rating /= grp->GetMembersCount();
/* Save mmr before enter arena (matchmaker rating fix) */
at->SetBattleRating(avg_pers_rating);
arenaRating = avg_pers_rating;
}
BattleGroundQueue &bgQueue = sBattleGroundMgr.m_BattleGroundQueues[bgQueueTypeId];
if (asGroup)
{
uint32 avgTime = 0;
if(err > 0)
{
DEBUG_LOG("Battleground: arena join as group start");
if (isRated)
DEBUG_LOG("Battleground: arena team id %u, leader %s queued with rating %u for type %u",_player->GetArenaTeamId(arenaslot),_player->GetName(),arenaRating,arenatype);
GroupQueueInfo * ginfo = bgQueue.AddGroup(_player, grp, bgTypeId, bracketEntry, arenatype, isRated, false, arenaRating, ateamId);
avgTime = bgQueue.GetAverageQueueWaitTime(ginfo, bracketEntry->GetBracketId());
}
for(GroupReference *itr = grp->GetFirstMember(); itr != NULL; itr = itr->next())
{
Player *member = itr->getSource();
if(!member)
continue;
WorldPacket data;
if(err <= 0)
{
sBattleGroundMgr.BuildGroupJoinedBattlegroundPacket(&data, err);
member->GetSession()->SendPacket(&data);
continue;
}
// add to queue
uint32 queueSlot = member->AddBattleGroundQueueId(bgQueueTypeId);
// send status packet (in queue)
sBattleGroundMgr.BuildBattleGroundStatusPacket(&data, bg, queueSlot, STATUS_WAIT_QUEUE, avgTime, 0, arenatype);
member->GetSession()->SendPacket(&data);
sBattleGroundMgr.BuildGroupJoinedBattlegroundPacket(&data, err);
member->GetSession()->SendPacket(&data);
DEBUG_LOG("Battleground: player joined queue for arena as group bg queue type %u bg type %u: GUID %u, NAME %s", bgQueueTypeId, bgTypeId, member->GetGUIDLow(), member->GetName());
}
DEBUG_LOG("Battleground: arena join as group end");
}
else
{
GroupQueueInfo * ginfo = bgQueue.AddGroup(_player, NULL, bgTypeId, bracketEntry, arenatype, isRated, false, arenaRating, ateamId);
uint32 avgTime = bgQueue.GetAverageQueueWaitTime(ginfo, bracketEntry->GetBracketId());
uint32 queueSlot = _player->AddBattleGroundQueueId(bgQueueTypeId);
WorldPacket data;
// send status packet (in queue)
sBattleGroundMgr.BuildBattleGroundStatusPacket(&data, bg, queueSlot, STATUS_WAIT_QUEUE, avgTime, 0, arenatype);
SendPacket(&data);
DEBUG_LOG("Battleground: player joined queue for arena, skirmish, bg queue type %u bg type %u: GUID %u, NAME %s",bgQueueTypeId,bgTypeId,_player->GetGUIDLow(), _player->GetName());
}
sBattleGroundMgr.ScheduleQueueUpdate(arenaRating, arenatype, bgQueueTypeId, bgTypeId, bracketEntry->GetBracketId());
}
void WorldSession::HandleReportPvPAFK( WorldPacket & recv_data )
{
ObjectGuid playerGuid;
recv_data >> playerGuid;
Player *reportedPlayer = sObjectMgr.GetPlayer(playerGuid);
if (!reportedPlayer)
{
DEBUG_LOG("WorldSession::HandleReportPvPAFK: player not found");
return;
}
DEBUG_LOG("WorldSession::HandleReportPvPAFK: %s reported %s", _player->GetName(), reportedPlayer->GetName());
reportedPlayer->ReportedAfkBy(_player);
}<|fim▁end|>
|
return;
}
|
<|file_name|>collect.py<|end_file_name|><|fim▁begin|># (c) Nelen & Schuurmans. GPL licensed, see LICENSE.rst.
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
import logging
import requests
logger = logging.getLogger(__name__)
def collect_filters(url):
"""Return filters from FEWS, cleaned and ready for storing as json."""
from_fews = _download(url)
result = []
for filter_dict in from_fews:
result.append(_process_filter_dict(filter_dict))
return result<|fim▁hole|>
def collect_parameters(url):
from_fews = _download(url)
# TODO
return from_fews
def collect_locations(url):
from_fews = _download(url)
# TODO
return from_fews
def _download(url):
r = requests.get(url)
r.raise_for_status() # Only raises an error when not succesful.
return r.json()
def _process_filter_dict(filter_dict):
# {'filter': {name, childfilters, etc}
content = filter_dict['filter']
name = content['name']
description = content['description']
if name == description:
# Description is only interesting if it is different from the name.
# Often it is the same, so we've got to filter it out.
description = ''
children = [_process_filter_dict(child_filter_dict)
for child_filter_dict in content.get('childFilters', [])]
result = {'id': content['id'],
'name': name,
'description': description,
'children': children}
return result<|fim▁end|>
| |
<|file_name|>launchtree_loader.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
from roslaunch.xmlloader import XmlLoader, loader
from rosgraph.names import get_ros_namespace
from rqt_launchtree.launchtree_context import LaunchtreeContext
class LaunchtreeLoader(XmlLoader):
def _include_tag(self, tag, context, ros_config, default_machine, is_core, verbose):
inc_filename = self.resolve_args(tag.attributes['file'].value, context)
ros_config.push_level(inc_filename, unique=True)
result = super(LaunchtreeLoader, self)._include_tag(tag, context, ros_config, default_machine, is_core, verbose)
ros_config.pop_level()
return result
def _node_tag(self, tag, context, ros_config, default_machine, is_test=False, verbose=True):
try:
if is_test:
self._check_attrs(tag, context, ros_config, XmlLoader.TEST_ATTRS)
(name,) = self.opt_attrs(tag, context, ('name',))
test_name, time_limit, retry = self._test_attrs(tag, context)
if not name:
name = test_name
else:
self._check_attrs(tag, context, ros_config, XmlLoader.NODE_ATTRS)
(name,) = self.reqd_attrs(tag, context, ('name',))
except Exception as e:
pass # will be handled in super
ros_config.push_level(name)
result = super(LaunchtreeLoader, self)._node_tag(tag, context, ros_config, default_machine, is_test, verbose)
ros_config.pop_level()
return result
def _rosparam_tag(self, tag, context, ros_config, verbose):
param_file = tag.attributes['file'].value \
if tag.attributes.has_key('file') else ''
if param_file != '':
param_filename = self.resolve_args(param_file, context)
level_name = ros_config.push_level(param_filename, unique=True)
result = super(LaunchtreeLoader, self)._rosparam_tag(tag, context, ros_config, verbose)<|fim▁hole|> return result
def _load_launch(self, launch, ros_config, is_core=False, filename=None, argv=None, verbose=True):
if argv is None:
argv = sys.argv
self._launch_tag(launch, ros_config, filename)
self.root_context = LaunchtreeContext(get_ros_namespace(), filename, config=ros_config)
loader.load_sysargs_into_context(self.root_context, argv)
if len(launch.getElementsByTagName('master')) > 0:
print "WARNING: ignoring defunct <master /> tag"
self._recurse_load(ros_config, launch.childNodes, self.root_context, None, is_core, verbose)<|fim▁end|>
|
if param_file != '':
ros_config.pop_level()
context.add_rosparam(tag.attributes.get('command', 'load'), param_filename, level_name)
|
<|file_name|>268-missing-number.py<|end_file_name|><|fim▁begin|>class Solution(object):
def missingNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
xor = len(nums)
for i, n in enumerate(nums):
xor ^= n
xor ^= i
return xor
inputs = [
[0],
[1],
[3,0,1],
[9,6,4,2,3,5,7,0,1]
]
<|fim▁hole|><|fim▁end|>
|
s = Solution()
for i in inputs:
print s.missingNumber(i)
|
<|file_name|>simple.js<|end_file_name|><|fim▁begin|>const Homeassistant = require('./../index')
// Connect to home-assistant
let ha = new Homeassistant({
host: '192.168.1.166'
})
ha.connect()
.then(() => {
// subscribe to state changes
ha.on('state:media_player.spotify', data => {
console.log(data)
})
// access current state
console.log(ha.state('sun.sun'))
// call a service
return ha.call({
domain: 'light',
service: 'turn_on'
})<|fim▁hole|> .catch(console.error)
ha.on('connection', info => {
console.log('connection changed', info)
})<|fim▁end|>
|
})
|
<|file_name|>steamstoreprice.py<|end_file_name|><|fim▁begin|>from steamstoreprice.exception import UrlNotSteam, PageNotFound, RequestGenericError
from bs4 import BeautifulSoup
import requests
class SteamStorePrice:
def normalizeurl(self, url):
"""
clean the url from referal and other stuff
:param url(string): amazon url
:return: string(url cleaned)
"""
if "://store.steampowered.com/app" in url:
return url
else:
raise UrlNotSteam("Please check the url, it doesn't contain store.steampowered.com/app*")<|fim▁hole|> remove the currenty from price
:param price(string): price tag find on amazon store
:return: float(price cleaned)
"""
listreplace = ["€", "$", "£", "\t", "\r\n"]
for replacestring in listreplace:
price = price.replace(replacestring, "")
return float(price.replace(",", "."))
def getpage(self, url):
"""
Get the page and raise if status_code is not equal to 200
:param url(string): normalized(url)
:return: bs4(html)
"""
url = self.normalizeurl(url)
req = requests.get(url)
if req.status_code == 200:
return BeautifulSoup(req.text, "html.parser")
elif req.status_code == 404:
raise PageNotFound("Page not found, please check url")
else:
raise RequestGenericError("Return Code: %s, please check url" % req.status_code)
def getprice(self, url):
"""
Find the price on AmazonStore starting from URL
:param url(string): url
:return: float(price cleaned)
"""
body_content = self.getpage(self.normalizeurl(url))
try:
return self.normalizeprice(body_content.find("div", {"class": "game_purchase_price"}).contents[0])
except AttributeError:
return self.normalizeprice(body_content.find("div", {"class": "discount_final_price"}).contents[0])<|fim▁end|>
|
def normalizeprice(self, price):
"""
|
<|file_name|>scrape.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package retrieval
import (
"fmt"
"io"
"net/http"
"sync"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/expfmt"
"github.com/prometheus/common/log"
"github.com/prometheus/common/model"
"github.com/prometheus/common/version"
"golang.org/x/net/context"
"golang.org/x/net/context/ctxhttp"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/local"
"github.com/prometheus/prometheus/util/httputil"
)
const (
scrapeHealthMetricName = "up"
scrapeDurationMetricName = "scrape_duration_seconds"
scrapeSamplesMetricName = "scrape_samples_scraped"
samplesPostRelabelMetricName = "scrape_samples_post_metric_relabeling"
)
var (
targetIntervalLength = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Name: "prometheus_target_interval_length_seconds",
Help: "Actual intervals between scrapes.",
Objectives: map[float64]float64{0.01: 0.001, 0.05: 0.005, 0.5: 0.05, 0.90: 0.01, 0.99: 0.001},
},
[]string{"interval"},
)
targetSkippedScrapes = prometheus.NewCounter(
prometheus.CounterOpts{
Name: "prometheus_target_skipped_scrapes_total",
Help: "Total number of scrapes that were skipped because the metric storage was throttled.",
},
)
targetReloadIntervalLength = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Name: "prometheus_target_reload_length_seconds",
Help: "Actual interval to reload the scrape pool with a given configuration.",
Objectives: map[float64]float64{0.01: 0.001, 0.05: 0.005, 0.5: 0.05, 0.90: 0.01, 0.99: 0.001},
},
[]string{"interval"},
)
targetSyncIntervalLength = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Name: "prometheus_target_sync_length_seconds",
Help: "Actual interval to sync the scrape pool.",
Objectives: map[float64]float64{0.01: 0.001, 0.05: 0.005, 0.5: 0.05, 0.90: 0.01, 0.99: 0.001},
},
[]string{"scrape_job"},
)
targetScrapePoolSyncsCounter = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "prometheus_target_scrape_pool_sync_total",
Help: "Total number of syncs that were executed on a scrape pool.",
},
[]string{"scrape_job"},
)
targetScrapeSampleLimit = prometheus.NewCounter(
prometheus.CounterOpts{
Name: "prometheus_target_scrapes_exceeded_sample_limit_total",
Help: "Total number of scrapes that hit the sample limit and were rejected.",
},
)
)
func init() {
prometheus.MustRegister(targetIntervalLength)
prometheus.MustRegister(targetSkippedScrapes)
prometheus.MustRegister(targetReloadIntervalLength)
prometheus.MustRegister(targetSyncIntervalLength)
prometheus.MustRegister(targetScrapePoolSyncsCounter)
prometheus.MustRegister(targetScrapeSampleLimit)
}
// scrapePool manages scrapes for sets of targets.
type scrapePool struct {
appender storage.SampleAppender
ctx context.Context
mtx sync.RWMutex
config *config.ScrapeConfig
client *http.Client
// Targets and loops must always be synchronized to have the same
// set of hashes.
targets map[uint64]*Target
loops map[uint64]loop
// Constructor for new scrape loops. This is settable for testing convenience.
newLoop func(context.Context, scraper, storage.SampleAppender, model.LabelSet, *config.ScrapeConfig) loop
}
func newScrapePool(ctx context.Context, cfg *config.ScrapeConfig, app storage.SampleAppender) *scrapePool {
client, err := httputil.NewClientFromConfig(cfg.HTTPClientConfig)
if err != nil {
// Any errors that could occur here should be caught during config validation.
log.Errorf("Error creating HTTP client for job %q: %s", cfg.JobName, err)
}
return &scrapePool{
appender: app,
config: cfg,
ctx: ctx,
client: client,
targets: map[uint64]*Target{},
loops: map[uint64]loop{},
newLoop: newScrapeLoop,
}
}
// stop terminates all scrape loops and returns after they all terminated.
func (sp *scrapePool) stop() {
var wg sync.WaitGroup
sp.mtx.Lock()
defer sp.mtx.Unlock()
for fp, l := range sp.loops {
wg.Add(1)
go func(l loop) {
l.stop()
wg.Done()
}(l)
delete(sp.loops, fp)
delete(sp.targets, fp)
}
wg.Wait()
}
// reload the scrape pool with the given scrape configuration. The target state is preserved
// but all scrape loops are restarted with the new scrape configuration.
// This method returns after all scrape loops that were stopped have fully terminated.
func (sp *scrapePool) reload(cfg *config.ScrapeConfig) {
start := time.Now()
sp.mtx.Lock()
defer sp.mtx.Unlock()
client, err := httputil.NewClientFromConfig(cfg.HTTPClientConfig)
if err != nil {
// Any errors that could occur here should be caught during config validation.
log.Errorf("Error creating HTTP client for job %q: %s", cfg.JobName, err)
}
sp.config = cfg
sp.client = client
var (
wg sync.WaitGroup
interval = time.Duration(sp.config.ScrapeInterval)
timeout = time.Duration(sp.config.ScrapeTimeout)
)
for fp, oldLoop := range sp.loops {
var (
t = sp.targets[fp]
s = &targetScraper{
Target: t,
client: sp.client,
timeout: timeout,
}
newLoop = sp.newLoop(sp.ctx, s, sp.appender, t.Labels(), sp.config)
)
wg.Add(1)
go func(oldLoop, newLoop loop) {
oldLoop.stop()
wg.Done()
go newLoop.run(interval, timeout, nil)
}(oldLoop, newLoop)
sp.loops[fp] = newLoop
}
wg.Wait()
targetReloadIntervalLength.WithLabelValues(interval.String()).Observe(
time.Since(start).Seconds(),
)
}
// Sync converts target groups into actual scrape targets and synchronizes
// the currently running scraper with the resulting set.
func (sp *scrapePool) Sync(tgs []*config.TargetGroup) {
start := time.Now()
var all []*Target
for _, tg := range tgs {
targets, err := targetsFromGroup(tg, sp.config)
if err != nil {
log.With("err", err).Error("creating targets failed")
continue
}
all = append(all, targets...)
}
sp.sync(all)
targetSyncIntervalLength.WithLabelValues(sp.config.JobName).Observe(
time.Since(start).Seconds(),
)
targetScrapePoolSyncsCounter.WithLabelValues(sp.config.JobName).Inc()
}
// sync takes a list of potentially duplicated targets, deduplicates them, starts
// scrape loops for new targets, and stops scrape loops for disappeared targets.
// It returns after all stopped scrape loops terminated.
func (sp *scrapePool) sync(targets []*Target) {
sp.mtx.Lock()
defer sp.mtx.Unlock()
var (
uniqueTargets = map[uint64]struct{}{}
interval = time.Duration(sp.config.ScrapeInterval)
timeout = time.Duration(sp.config.ScrapeTimeout)
)
for _, t := range targets {
hash := t.hash()
uniqueTargets[hash] = struct{}{}
if _, ok := sp.targets[hash]; !ok {
s := &targetScraper{
Target: t,
client: sp.client,
timeout: timeout,
}
l := sp.newLoop(sp.ctx, s, sp.appender, t.Labels(), sp.config)
sp.targets[hash] = t
sp.loops[hash] = l
go l.run(interval, timeout, nil)
}
}
var wg sync.WaitGroup
// Stop and remove old targets and scraper loops.
for hash := range sp.targets {
if _, ok := uniqueTargets[hash]; !ok {
wg.Add(1)
go func(l loop) {
l.stop()
wg.Done()
}(sp.loops[hash])
delete(sp.loops, hash)
delete(sp.targets, hash)
}
}
// Wait for all potentially stopped scrapers to terminate.
// This covers the case of flapping targets. If the server is under high load, a new scraper
// may be active and tries to insert. The old scraper that didn't terminate yet could still
// be inserting a previous sample set.
wg.Wait()
}
// A scraper retrieves samples and accepts a status report at the end.
type scraper interface {
scrape(ctx context.Context, ts time.Time) (model.Samples, error)
report(start time.Time, dur time.Duration, err error)
offset(interval time.Duration) time.Duration
}
// targetScraper implements the scraper interface for a target.
type targetScraper struct {
*Target
client *http.Client
timeout time.Duration
}
const acceptHeader = `application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.7,text/plain;version=0.0.4;q=0.3,*/*;q=0.1`
var userAgentHeader = fmt.Sprintf("Prometheus/%s", version.Version)
func (s *targetScraper) scrape(ctx context.Context, ts time.Time) (model.Samples, error) {
req, err := http.NewRequest("GET", s.URL().String(), nil)
if err != nil {
return nil, err
}
req.Header.Add("Accept", acceptHeader)
req.Header.Set("User-Agent", userAgentHeader)
req.Header.Set("X-Prometheus-Scrape-Timeout-Seconds", fmt.Sprintf("%f", s.timeout.Seconds()))
resp, err := ctxhttp.Do(ctx, s.client, req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("server returned HTTP status %s", resp.Status)
}
var (
allSamples = make(model.Samples, 0, 200)
decSamples = make(model.Vector, 0, 50)
)
sdec := expfmt.SampleDecoder{
Dec: expfmt.NewDecoder(resp.Body, expfmt.ResponseFormat(resp.Header)),
Opts: &expfmt.DecodeOptions{
Timestamp: model.TimeFromUnixNano(ts.UnixNano()),
},
}
for {
if err = sdec.Decode(&decSamples); err != nil {
break
}
allSamples = append(allSamples, decSamples...)
decSamples = decSamples[:0]
}
if err == io.EOF {
// Set err to nil since it is used in the scrape health recording.
err = nil
}
return allSamples, err
}
// A loop can run and be stopped again. It must not be reused after it was stopped.
type loop interface {
run(interval, timeout time.Duration, errc chan<- error)
stop()
}
type scrapeLoop struct {
scraper scraper
// Where samples are ultimately sent.
appender storage.SampleAppender
targetLabels model.LabelSet
metricRelabelConfigs []*config.RelabelConfig
honorLabels bool
sampleLimit uint
done chan struct{}
ctx context.Context
cancel func()
}
func newScrapeLoop(
ctx context.Context,
sc scraper,
appender storage.SampleAppender,
targetLabels model.LabelSet,
config *config.ScrapeConfig,
) loop {
sl := &scrapeLoop{
scraper: sc,
appender: appender,
targetLabels: targetLabels,
metricRelabelConfigs: config.MetricRelabelConfigs,
honorLabels: config.HonorLabels,
sampleLimit: config.SampleLimit,
done: make(chan struct{}),
}
sl.ctx, sl.cancel = context.WithCancel(ctx)
return sl
}
func (sl *scrapeLoop) run(interval, timeout time.Duration, errc chan<- error) {
defer close(sl.done)
select {
case <-time.After(sl.scraper.offset(interval)):
// Continue after a scraping offset.
case <-sl.ctx.Done():
return
}
var last time.Time
ticker := time.NewTicker(interval)
defer ticker.Stop()
for {
select {
case <-sl.ctx.Done():
return
default:
}
if !sl.appender.NeedsThrottling() {
var (
start = time.Now()
scrapeCtx, _ = context.WithTimeout(sl.ctx, timeout)
numPostRelabelSamples = 0
)
// Only record after the first scrape.
if !last.IsZero() {
targetIntervalLength.WithLabelValues(interval.String()).Observe(
time.Since(last).Seconds(),
)
}
samples, err := sl.scraper.scrape(scrapeCtx, start)
if err == nil {
numPostRelabelSamples, err = sl.append(samples)
}
if err != nil && errc != nil {
errc <- err
}
sl.report(start, time.Since(start), len(samples), numPostRelabelSamples, err)
last = start
} else {
targetSkippedScrapes.Inc()
}
select {
case <-sl.ctx.Done():
return
case <-ticker.C:<|fim▁hole|> }
}
}
func (sl *scrapeLoop) stop() {
sl.cancel()
<-sl.done
}
// wrapAppender wraps a SampleAppender for relabeling. It returns the wrappend
// appender and an innermost countingAppender that counts the samples actually
// appended in the end.
func (sl *scrapeLoop) wrapAppender(app storage.SampleAppender) (storage.SampleAppender, *countingAppender) {
// Innermost appender is a countingAppender to count how many samples
// are left in the end.
countingAppender := &countingAppender{
SampleAppender: app,
}
app = countingAppender
// The relabelAppender has to be inside the label-modifying appenders so
// the relabeling rules are applied to the correct label set.
if len(sl.metricRelabelConfigs) > 0 {
app = relabelAppender{
SampleAppender: app,
relabelings: sl.metricRelabelConfigs,
}
}
if sl.honorLabels {
app = honorLabelsAppender{
SampleAppender: app,
labels: sl.targetLabels,
}
} else {
app = ruleLabelsAppender{
SampleAppender: app,
labels: sl.targetLabels,
}
}
return app, countingAppender
}
func (sl *scrapeLoop) append(samples model.Samples) (int, error) {
var (
numOutOfOrder = 0
numDuplicates = 0
app = sl.appender
countingApp *countingAppender
)
if sl.sampleLimit > 0 {
// We need to check for the sample limit, so append everything
// to a wrapped bufferAppender first. Then point samples to the
// result.
bufApp := &bufferAppender{buffer: make(model.Samples, 0, len(samples))}
var wrappedBufApp storage.SampleAppender
wrappedBufApp, countingApp = sl.wrapAppender(bufApp)
for _, s := range samples {
// Ignore errors as bufferedAppender always succeeds.
wrappedBufApp.Append(s)
}
samples = bufApp.buffer
if uint(countingApp.count) > sl.sampleLimit {
targetScrapeSampleLimit.Inc()
return countingApp.count, fmt.Errorf(
"%d samples exceeded limit of %d", countingApp.count, sl.sampleLimit,
)
}
} else {
// No need to check for sample limit. Wrap sl.appender directly.
app, countingApp = sl.wrapAppender(sl.appender)
}
for _, s := range samples {
if err := app.Append(s); err != nil {
switch err {
case local.ErrOutOfOrderSample:
numOutOfOrder++
log.With("sample", s).With("error", err).Debug("Sample discarded")
case local.ErrDuplicateSampleForTimestamp:
numDuplicates++
log.With("sample", s).With("error", err).Debug("Sample discarded")
default:
log.With("sample", s).With("error", err).Warn("Sample discarded")
}
}
}
if numOutOfOrder > 0 {
log.With("numDropped", numOutOfOrder).Warn("Error on ingesting out-of-order samples")
}
if numDuplicates > 0 {
log.With("numDropped", numDuplicates).Warn("Error on ingesting samples with different value but same timestamp")
}
return countingApp.count, nil
}
func (sl *scrapeLoop) report(start time.Time, duration time.Duration, scrapedSamples, postRelabelSamples int, err error) {
sl.scraper.report(start, duration, err)
ts := model.TimeFromUnixNano(start.UnixNano())
var health model.SampleValue
if err == nil {
health = 1
}
healthSample := &model.Sample{
Metric: model.Metric{
model.MetricNameLabel: scrapeHealthMetricName,
},
Timestamp: ts,
Value: health,
}
durationSample := &model.Sample{
Metric: model.Metric{
model.MetricNameLabel: scrapeDurationMetricName,
},
Timestamp: ts,
Value: model.SampleValue(duration.Seconds()),
}
countSample := &model.Sample{
Metric: model.Metric{
model.MetricNameLabel: scrapeSamplesMetricName,
},
Timestamp: ts,
Value: model.SampleValue(scrapedSamples),
}
postRelabelSample := &model.Sample{
Metric: model.Metric{
model.MetricNameLabel: samplesPostRelabelMetricName,
},
Timestamp: ts,
Value: model.SampleValue(postRelabelSamples),
}
reportAppender := ruleLabelsAppender{
SampleAppender: sl.appender,
labels: sl.targetLabels,
}
if err := reportAppender.Append(healthSample); err != nil {
log.With("sample", healthSample).With("error", err).Warn("Scrape health sample discarded")
}
if err := reportAppender.Append(durationSample); err != nil {
log.With("sample", durationSample).With("error", err).Warn("Scrape duration sample discarded")
}
if err := reportAppender.Append(countSample); err != nil {
log.With("sample", durationSample).With("error", err).Warn("Scrape sample count sample discarded")
}
if err := reportAppender.Append(postRelabelSample); err != nil {
log.With("sample", durationSample).With("error", err).Warn("Scrape sample count post-relabeling sample discarded")
}
}<|fim▁end|>
| |
<|file_name|>test_udp_socket.rs<|end_file_name|><|fim▁begin|>use mio::*;
use mio::udp::*;
use bytes::{Buf, RingBuf, SliceBuf, MutBuf};
use super::localhost;
use std::str;
const LISTENER: Token = Token(0);
const SENDER: Token = Token(1);
pub struct UdpHandler {
tx: UdpSocket,
rx: UdpSocket,
msg: &'static str,
buf: SliceBuf<'static>,
rx_buf: RingBuf
}
impl UdpHandler {
fn new(tx: UdpSocket, rx: UdpSocket, msg : &'static str) -> UdpHandler {
UdpHandler {
tx: tx,
rx: rx,
msg: msg,
buf: SliceBuf::wrap(msg.as_bytes()),
rx_buf: RingBuf::new(1024)
}
}
}<|fim▁hole|> type Timeout = usize;
type Message = ();
fn ready(&mut self, event_loop: &mut EventLoop<UdpHandler>, token: Token, events: EventSet) {
if events.is_readable() {
match token {
LISTENER => {
debug!("We are receiving a datagram now...");
let (cnt, _) = unsafe {
self.rx.recv_from(self.rx_buf.mut_bytes()).unwrap()
.unwrap()
};
MutBuf::advance(&mut self.rx_buf, cnt);
assert!(str::from_utf8(self.rx_buf.bytes()).unwrap() == self.msg);
event_loop.shutdown();
},
_ => ()
}
}
if events.is_writable() {
match token {
SENDER => {
let addr = self.rx.local_addr().unwrap();
let cnt = self.tx.send_to(self.buf.bytes(), &addr).unwrap()
.unwrap();
self.buf.advance(cnt);
},
_ => {}
}
}
}
}
fn assert_send<T: Send>() {
}
#[test]
pub fn test_udp_socket() {
debug!("Starting TEST_UDP_SOCKETS");
let mut event_loop = EventLoop::new().unwrap();
let addr = localhost();
let any = str::FromStr::from_str("0.0.0.0:0").unwrap();
let tx = UdpSocket::bound(&any).unwrap();
let rx = UdpSocket::bound(&addr).unwrap();
assert_send::<UdpSocket>();
// ensure that the sockets are non-blocking
let mut buf = [0; 128];
assert!(rx.recv_from(&mut buf).unwrap().is_none());
info!("Registering SENDER");
event_loop.register(&tx, SENDER, EventSet::writable(), PollOpt::edge()).unwrap();
info!("Registering LISTENER");
event_loop.register(&rx, LISTENER, EventSet::readable(), PollOpt::edge()).unwrap();
info!("Starting event loop to test with...");
event_loop.run(&mut UdpHandler::new(tx, rx, "hello world")).unwrap();
}<|fim▁end|>
|
impl Handler for UdpHandler {
|
<|file_name|>build.py<|end_file_name|><|fim▁begin|>from abc import ABC, abstractmethod
from .utils import FileUtils, LoggingUtils
import os
import subprocess
class Build(ABC):
def __init__(self, configuration):
super().__init__()
self.configuration = configuration
@abstractmethod
def before_build(self):
raise NotImplementedError()
@abstractmethod
def build(self):
raise NotImplementedError()
@abstractmethod
def after_build(self):
raise NotImplementedError()
def build_application(self):
assert self.configuration.source_folder is not None
assert self.configuration.build_command is not None
FileUtils.chdir(self.configuration.source_folder)
if isinstance(self.configuration.build_command, list):
_cmdline = self.configuration.build_command
else:
_cmdline = self.configuration.build_command.split(' ')
print(_cmdline)
subprocess.call(_cmdline)
def _setup_tokens_before_build(self):
assert self.configuration.tokens_before_build is not None
self._tokens(self.configuration.tokens_before_build)
def _setup_tokens_after_build(self):
assert self.configuration.tokens_after_build is not None
self._tokens(self.configuration.tokens_after_build)
def _create_build_version_folder(self):
assert self.configuration.build_parent_path is not None
FileUtils.make_folders(self.configuration.build_parent_path)
def _tokens(self, tokens, replace_tokens=True):
for _filename in tokens:
_source_pathname = f'{self.configuration.source_folder}{os.sep}{_filename}'<|fim▁hole|> if replace_tokens and tokens[_filename]:
for _token in tokens[_filename]:
self._replace_token_filename_content(_source_pathname, _token, tokens[_filename][_token])
def _replace_token_filename_content(self, source_filename, token, text_to_insert):
_start_token = f'// BUILD_START_TOKEN: {token}'
_end_token = f'// BUILD_END_TOKEN: {token}'
FileUtils.replace_text_between_tags(
source_filename,
_start_token,
_end_token,
text_to_insert)
def _revert_git_file(self, git_filename):
assert git_filename is not None
FileUtils.chdir(os.path.dirname(git_filename))
LoggingUtils.log(f'(Git Reset) {git_filename}')
_cmd_line = 'git checkout --quiet ' + os.path.basename(git_filename)
subprocess.call(_cmd_line.split(' '))
""" Public Members """
configuration = None<|fim▁end|>
|
self._revert_git_file(_source_pathname)
|
<|file_name|>camera.cpp<|end_file_name|><|fim▁begin|>#include "camera.h"
#include "../window.h"
D3DXMATRIX Camera::GetViewMatrix()
{
if (recalculateViewMatrix)
updateViewMatrix();
return viewMatrix;
}
D3DXMATRIX Camera::GetProjectionMatrix()
{
if (recalculateProjectionMatrix)
updateProjectionMatrix();
return projectionMatrix;
}
D3DXVECTOR3 Camera::ScreenPointToWorldDirection(D3DXVECTOR2& point)
{
if (recalculateProjectionMatrix)
updateProjectionMatrix();
if (recalculateViewMatrix)
updateViewMatrix();
float px = point.x, py = point.y;
D3DXMATRIX proj = GetProjectionMatrix();
px = 2.0f * px / Window::GetWidth() - 1.0f;
py = -2.0f * py / Window::GetHeight() + 1.0f;
<|fim▁hole|> py /= proj._22;
D3DXMatrixInverse(&proj, null, &GetViewMatrix());
D3DXVECTOR3 dir;
dir.x = (px * proj._11) + (py * proj._21) + proj._31;
dir.y = (px * proj._12) + (py * proj._22) + proj._32;
dir.z = (px * proj._13) + (py * proj._23) + proj._33;
D3DXVec3Normalize(&dir, &dir);
return dir;
}<|fim▁end|>
|
px /= proj._11;//proj matrix actually
|
<|file_name|>005_tle_error.py<|end_file_name|><|fim▁begin|>class Solution(object):
def longestPalindrome(self, s):
max_len = 0
max_str = ''
if len(s) <= 2:
return s
for i, ch in enumerate(s):
delta = 1
count = 0
# center is ch
while (i - delta) >= 0 and (i + delta) < len(s):
if s[i-delta] != s[i+delta]:
break
count += 1
delta += 1
if count * 2 + 1 > max_len:
max_len = count * 2 + 1
max_str = s[i-count:i+1+count]
# center is ch right<|fim▁hole|> delta = 0.5
count = 0
j = i + 0.5
while (j - delta) >= 0 and (j + delta) < len(s):
if s[int(j - delta)] != s[int(j + delta)]:
break
count += 1
delta += 1
if count * 2 > max_len:
max_len = count * 2
max_str = s[i-count+1:i+count+1]
return max_str
def test(self):
assert self.longestPalindrome('a') == 'a'
assert self.longestPalindrome('abcba') == 'abcba'
assert self.longestPalindrome('eabcbae') == 'eabcbae'
assert self.longestPalindrome('abba') == 'abba'
assert self.longestPalindrome('abbc') == 'bb'
assert self.longestPalindrome('dbabba') == 'abba'
assert self.longestPalindrome('decababace') == 'ecababace'
assert self.longestPalindrome('decababaceehgagbgnag') == 'ecababace'
if __name__ == '__main__':
s = Solution()
s.test()<|fim▁end|>
| |
<|file_name|>thermal_monitor.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# vim: ai:ts=4:sw=4:sts=4:et:fileencoding=utf-8
#
# Thermal monitor
#
# Copyright 2013 Michal Belica <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.<|fim▁hole|>
import serial
import sys
import signal
import select
import re
import time
import subprocess
from optparse import OptionParser
class ThermalMonitor(object):
def zabbix_sender(self):
proc = subprocess.Popen(['zabbix_sender', '-z', self.options.zabbix, '-p',
self.options.port, '-s', self.options.host, '-i', '-'], stdin=subprocess.PIPE)
for addr,temp in self.data.items():
proc.communicate('- %s[%s] %g\n' % (self.options.key, addr, temp))
proc.stdin.close()
proc.wait()
def parse_options(self):
parser = OptionParser()
parser.add_option("-d", "--device", dest="device",
help="read from serial port DEVICE (required)", metavar="DEVICE")
parser.add_option("-s", "--speed", dest="speed", type="int", default=9600,
help="serial port baud rate (default: 9600)", metavar="BAUD")
parser.add_option("-i", "--interval", dest="interval", type="int", default=10,
help="sampling interval (default: 10)", metavar="SECONDS")
parser.add_option("-z", "--zabbix", dest="zabbix",
help="Zabbix server (required)", metavar="ADDR")
parser.add_option("-p", "--port", dest="port", default="10051",
help="listening port of Zabbix server (default: 10051)", metavar="PORT")
parser.add_option("-n", "--host", dest="host",
help="name of host in Zabbix (required)", metavar="NAME")
parser.add_option("-k", "--key", dest="key", default="thermal_monitor",
help="item key base name; device address will be added as an argument, "
+"e.g. thermal_monitor[addr] (default: thermal_monitor)", metavar="key")
(self.options, self.args) = parser.parse_args()
# check for required options
for opt in ['device', 'zabbix', 'host']:
if opt not in self.options.__dict__ or self.options.__dict__[opt] is None:
parser.error("parameter --%s is required" % opt)
def sighandler_terminate(self, signum, frame):
self.running = False
def register_signals(self, ignore=[],
terminate=[signal.SIGINT, signal.SIGTERM, signal.SIGHUP]):
for sig in ignore:
signal.signal(sig, signal.SIG_IGN)
for sig in terminate:
signal.signal(sig, self.sighandler_terminate)
def open_serial(self):
self.ser = serial.Serial(self.options.device, self.options.speed)
self.ser.readline() # ignore first (incomplete) line
def __init__(self):
self.running = False
self.data = dict()
self.register_signals()
self.parse_options()
self.open_serial()
self.cre = re.compile(r"R=(?P<addr>\w+)\s+T=(?P<temp>[.0-9]+)\r?$")
def start(self):
self.running = True
next = time.time()
sent = False
while self.running:
try:
line = self.ser.readline()
except select.error as e:
if e[0] == 4: # interrupted system call
continue
else:
raise
if time.time() > next:
next += self.options.interval
# clears the list to send all addresses again
for k,v in self.data.items():
self.data[k] = None
sent = False
elif sent:
# data already sent in this cycle
continue
m = self.cre.search(line)
if m:
# line matched pattern
addr = m.group('addr')
temp = float(m.group('temp'))
if addr not in self.data or self.data[addr] is None:
# address not yet collected in this cycle
self.data[addr] = temp
else:
# repeating address reached - send out data
print "sending", addr, temp
self.zabbix_sender()
sent = True
else:
print "invalid line received"
self.cleanup()
def cleanup(self):
self.ser.close()
if __name__ == "__main__":
thermalmonitor = ThermalMonitor()
thermalmonitor.start()<|fim▁end|>
|
#
|
<|file_name|>jquery.jvectormap.init.js<|end_file_name|><|fim▁begin|>/**
* Theme: Simple Admin Template
* Author: Coderthemes
* VectorMap
*/
! function($) {
"use strict";
var VectorMap = function() {
};
VectorMap.prototype.init = function() {
//various examples
$('#world-map-markers').vectorMap({
map : 'world_mill_en',
scaleColors : ['#4bd396', '#4bd396'],
normalizeFunction : 'polynomial',
hoverOpacity : 0.7,
hoverColor : false,
regionStyle : {
initial : {
fill : '#ddd'
}
},
markerStyle: {
initial: {
r: 9,
'fill': '#4bd396',
'fill-opacity': 0.9,
'stroke': '#fff',
'stroke-width' : 7,
'stroke-opacity': 0.4
},
hover: {
'stroke': '#fff',
'fill-opacity': 1,
'stroke-width': 1.5
}
},
backgroundColor : 'transparent',
markers : [{
latLng : [41.90, 12.45],
name : 'Vatican City'
}, {
latLng : [43.73, 7.41],
name : 'Monaco'
}, {
latLng : [-0.52, 166.93],
name : 'Nauru'
}, {
latLng : [-8.51, 179.21],
name : 'Tuvalu'
}, {
latLng : [43.93, 12.46],
name : 'San Marino'
}, {
latLng : [47.14, 9.52],
name : 'Liechtenstein'
}, {
latLng : [7.11, 171.06],
name : 'Marshall Islands'
}, {
latLng : [17.3, -62.73],
name : 'Saint Kitts and Nevis'
}, {
latLng : [3.2, 73.22],
name : 'Maldives'
}, {
latLng : [35.88, 14.5],
name : 'Malta'
}, {
latLng : [12.05, -61.75],
name : 'Grenada'
}, {
latLng : [13.16, -61.23],
name : 'Saint Vincent and the Grenadines'
}, {
latLng : [13.16, -59.55],
name : 'Barbados'
}, {
latLng : [17.11, -61.85],<|fim▁hole|> latLng : [-4.61, 55.45],
name : 'Seychelles'
}, {
latLng : [7.35, 134.46],
name : 'Palau'
}, {
latLng : [42.5, 1.51],
name : 'Andorra'
}, {
latLng : [14.01, -60.98],
name : 'Saint Lucia'
}, {
latLng : [6.91, 158.18],
name : 'Federated States of Micronesia'
}, {
latLng : [1.3, 103.8],
name : 'Singapore'
}, {
latLng : [1.46, 173.03],
name : 'Kiribati'
}, {
latLng : [-21.13, -175.2],
name : 'Tonga'
}, {
latLng : [15.3, -61.38],
name : 'Dominica'
}, {
latLng : [-20.2, 57.5],
name : 'Mauritius'
}, {
latLng : [26.02, 50.55],
name : 'Bahrain'
}, {
latLng : [0.33, 6.73],
name : 'São Tomé and Príncipe'
}]
});
$('#usa').vectorMap({
map : 'us_aea_en',
backgroundColor : 'transparent',
regionStyle : {
initial : {
fill : '#ddd'
}
}
});
$('#india').vectorMap({
map : 'in_mill',
backgroundColor : 'transparent',
regionStyle : {
initial : {
fill : '#ddd'
}
}
});
$('#uk').vectorMap({
map : 'uk_mill_en',
backgroundColor : 'transparent',
regionStyle : {
initial : {
fill : '#ddd'
}
}
});
$('#chicago').vectorMap({
map : 'us-il-chicago_mill_en',
backgroundColor : 'transparent',
regionStyle : {
initial : {
fill : '#ddd'
}
}
});
$('#australia').vectorMap({
map : 'au_mill',
backgroundColor : 'transparent',
regionStyle : {
initial : {
fill : '#ddd'
}
}
});
$('#canada').vectorMap({
map : 'ca_lcc',
backgroundColor : 'transparent',
regionStyle : {
initial : {
fill : '#ddd'
}
}
});
$('#germany').vectorMap({
map : 'de_mill',
backgroundColor : 'transparent',
regionStyle : {
initial : {
fill : '#ddd'
}
}
});
$('#asia').vectorMap({
map : 'asia_mill',
backgroundColor : 'transparent',
regionStyle : {
initial : {
fill : '#ddd'
}
}
});
},
//init
$.VectorMap = new VectorMap, $.VectorMap.Constructor =
VectorMap
}(window.jQuery),
//initializing
function($) {
"use strict";
$.VectorMap.init()
}(window.jQuery);<|fim▁end|>
|
name : 'Antigua and Barbuda'
}, {
|
<|file_name|>uu_cases_regular.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from tools.factories import generator_factory
import ctypes
basic_cases = [
[b'%U\n', ctypes.c_long(0)],
[b'% U\n', ctypes.c_long(0)],
[b'%+U\n', ctypes.c_long(0)],
[b'%-U\n', ctypes.c_long(0)],
[b'%0U\n', ctypes.c_long(0)],
[b'%#U\n', ctypes.c_long(0)],
[b'%10U\n', ctypes.c_long(0)],
[b'%.6U\n', ctypes.c_long(0)],
[b'%hhU\n', ctypes.c_long(0)],
[b'%llU\n', ctypes.c_long(0)],
[b'%hU\n', ctypes.c_long(0)],
[b'%lU\n', ctypes.c_long(0)],
[b'%jU\n', ctypes.c_long(0)],
[b'%zU\n', ctypes.c_long(0)],
]
mixed_cases = [
[b'%-02U\n', ctypes.c_short(0)],
[b'% 0+-#10.5llU\n', ctypes.c_long(42)],
]
test_sets = [
{
'name': 'U tests - basics.',
'cases': basic_cases
},<|fim▁hole|> }
]
cases_generator = generator_factory(test_sets)<|fim▁end|>
|
{
'name': 'U tests - basics.',
'cases': mixed_cases
|
<|file_name|>QuantileTransform.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Transform;
import Objects.TagList;
import java.util.ArrayList;
import java.util.Collections;
/**
*
* @author SHKim12
*/
public class QuantileTransform implements Transform {
public QuantileTransform( TagList l ) {
if( l.size() == 0 ) m_QuantileVector = null;
ArrayList<Float> totalList = new ArrayList<>();
totalList.addAll(l);
Collections.sort(totalList);
int qvSize = QUANTILE_RESOLUTION<totalList.size()?QUANTILE_RESOLUTION:totalList.size();
m_QuantileVector = new ArrayList<>();
for( int i = 0; i < qvSize; ++i ) {
m_QuantileVector.add( totalList.get( i * totalList.size() / qvSize ) );
}
}
@Override
public float transform( float v ) {
int s = 0;
int e = m_QuantileVector.size();
int m = s;
while( s < e - 1) {
m = (s+e)/2;
Float mv = m_QuantileVector.get(m);
if( mv < v ) s = m;
else if( v < mv ) e = m;
else break;
}
<|fim▁hole|> }
public final static int QUANTILE_RESOLUTION = 1000;
ArrayList<Float> m_QuantileVector;
}<|fim▁end|>
|
return m / (float)(m_QuantileVector.size() - 1);
|
<|file_name|>SvgViewBoxFilter.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Requirements
* @ignore
*/
const Filter = require('./Filter.js').Filter;
const PathesConfiguration = require('../../model/configuration/PathesConfiguration.js').PathesConfiguration;
const assertParameter = require('../../utils/assert.js').assertParameter;
const pathes = require('../../utils/pathes.js');
const fs = require('fs');
const templateString = require('es6-template-strings');
/**
* @memberOf nunjucks.filter
*/
class SvgViewBoxFilter extends Filter
{
/**
* @inheritDocs
*/
constructor(pathesConfiguration, basePath)
{
super();
this._name = 'svgViewBox';
// Check params
assertParameter(this, 'pathesConfiguration', pathesConfiguration, true, PathesConfiguration);
// Assign options
this._basePath = basePath || '/';
this._pathesConfiguration = pathesConfiguration;
}
/**
* @inheritDocs
*/
static get className()
{
return 'nunjucks.filter/SvgViewBoxFilter';
}
/**
* @inheritDocs
*/
static get injections()
{
return { 'parameters': [PathesConfiguration, 'nunjucks.filter/SvgViewBoxFilter.basePath'] };
}
/**
* @type {String}
*/
get basePath()
{
return this._basePath;
}
/**
* @returns {String}
*/
getBasePath(globals)
{
let result = this._basePath;
if (this.environment &&
this.environment.buildConfiguration)
{
result = this.environment.buildConfiguration.get('filters.svgPath', this._basePath);
}
return templateString(result, globals.location || {});
}
/**
* @type {model.configuration.PathesConfiguration}
*/
get pathesConfiguration()
{
return this._pathesConfiguration;
}
/**
* @inheritDocs
*/
filter(value)
{
const scope = this;
return function(value)
{
let result = '0 0 0 0';
const globals = scope.getGlobals(this);
const filename = pathes.concat(scope.pathesConfiguration.sites, scope.getBasePath(globals), value + '.svg');
if (fs.existsSync(filename))
{
const icon = fs.readFileSync(filename, { encoding: 'utf8' });
const viewbox = icon.match(/viewBox="([^"]*)"/i);
if (viewbox && viewbox[1])
{
result = viewbox[1];
}
}
else
{
scope.logger.warn('Could not locate svg <' + filename + '>');
}<|fim▁hole|> };
}
}
/**
* Exports
* @ignore
*/
module.exports.SvgViewBoxFilter = SvgViewBoxFilter;<|fim▁end|>
|
return scope.applyCallbacks(result, arguments);
|
<|file_name|>reshape.d.ts<|end_file_name|><|fim▁begin|>import { NDArrayMath } from '../../math/math';<|fim▁hole|>import { SummedTensorArrayMap, TensorArrayMap } from '../tensor_array_map';
import { Operation } from './op';
export declare class Reshape<T1 extends NDArray, T2 extends NDArray> extends Operation {
private xTensor;
private yTensor;
constructor(xTensor: Tensor, yTensor: Tensor);
feedForward(math: NDArrayMath, inferenceArrays: TensorArrayMap): void;
backProp(math: NDArrayMath, inferenceArrays: TensorArrayMap, gradientArrays: SummedTensorArrayMap): void;
}<|fim▁end|>
|
import { NDArray } from '../../math/ndarray';
import { Tensor } from '../graph';
|
<|file_name|>resize.rs<|end_file_name|><|fim▁begin|>use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use crossbeam_channel::Sender;
use signal_hook::iterator::Signals;
/// This starts a new thread to listen for SIGWINCH signals
#[allow(unused)]
pub fn start_resize_thread(
resize_sender: Sender<()>,
resize_running: Arc<AtomicBool>,
) {
let signals = Signals::new(&[libc::SIGWINCH]).unwrap();
thread::spawn(move || {
// This thread will listen to SIGWINCH events and report them.
while resize_running.load(Ordering::Relaxed) {
// We know it will only contain SIGWINCH signals, so no need to check.<|fim▁hole|> if signals.wait().count() > 0 {
resize_sender.send(()).unwrap();
}
}
});
}<|fim▁end|>
| |
<|file_name|>allcall.py<|end_file_name|><|fim▁begin|>"""
Decode all-call reply messages, with downlink format 11
"""
from pyModeS import common
def _checkdf(func):
"""Ensure downlink format is 11."""
def wrapper(msg):
df = common.df(msg)
if df != 11:
raise RuntimeError(
"Incorrect downlink format, expect 11, got {}".format(df)
)
return func(msg)
return wrapper
@_checkdf
def icao(msg):
"""Decode transponder code (ICAO address).
Args:
msg (str): 14 hexdigits string
Returns:
string: ICAO address
"""
return common.icao(msg)
@_checkdf
def interrogator(msg):
"""Decode interrogator identifier code.
Args:
msg (str): 14 hexdigits string
Returns:
int: interrogator identifier code
"""
# the CRC remainder contains the CL and IC field. top three bits are CL field and last four bits are IC field.
remainder = common.crc(msg)
if remainder > 79:
IC = "corrupt IC"
elif remainder < 16:
IC="II"+str(remainder)
else:
IC="SI"+str(remainder-16)<|fim▁hole|>def capability(msg):
"""Decode transponder capability.
Args:
msg (str): 14 hexdigits string
Returns:
int, str: transponder capability, description
"""
msgbin = common.hex2bin(msg)
ca = common.bin2int(msgbin[5:8])
if ca == 0:
text = "level 1 transponder"
elif ca == 4:
text = "level 2 transponder, ability to set CA to 7, on ground"
elif ca == 5:
text = "level 2 transponder, ability to set CA to 7, airborne"
elif ca == 6:
text = "evel 2 transponder, ability to set CA to 7, either airborne or ground"
elif ca == 7:
text = "Downlink Request value is 0,or the Flight Status is 2, 3, 4 or 5, either airborne or on the ground"
else:
text = None
return ca, text<|fim▁end|>
|
return IC
@_checkdf
|
<|file_name|>Cert_5_6_01_NetworkDataRegisterBeforeAttachLeader.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import time
import unittest
import node
LEADER = 1
ROUTER = 2
ED1 = 3
SED1 = 4
class Cert_5_6_1_NetworkDataLeaderAsBr(unittest.TestCase):
def setUp(self):
self.nodes = {}
for i in range(1,5):
self.nodes[i] = node.Node(i)
self.nodes[LEADER].set_panid(0xface)
self.nodes[LEADER].set_mode('rsdn')
self.nodes[LEADER].add_whitelist(self.nodes[ROUTER].get_addr64())
self.nodes[LEADER].enable_whitelist()
self.nodes[ROUTER].set_panid(0xface)
self.nodes[ROUTER].set_mode('rsdn')
self.nodes[ROUTER].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ROUTER].add_whitelist(self.nodes[ED1].get_addr64())
self.nodes[ROUTER].add_whitelist(self.nodes[SED1].get_addr64())
self.nodes[ROUTER].enable_whitelist()
self.nodes[ROUTER].set_router_selection_jitter(1)
self.nodes[ED1].set_panid(0xface)
self.nodes[ED1].set_mode('rsn')
self.nodes[ED1].add_whitelist(self.nodes[ROUTER].get_addr64())
self.nodes[ED1].enable_whitelist()
self.nodes[SED1].set_panid(0xface)
self.nodes[SED1].set_mode('s')
self.nodes[SED1].add_whitelist(self.nodes[ROUTER].get_addr64())
self.nodes[SED1].enable_whitelist()
self.nodes[SED1].set_timeout(3)
def tearDown(self):
for node in list(self.nodes.values()):
node.stop()
del self.nodes
def test(self):
self.nodes[LEADER].start()
self.nodes[LEADER].set_state('leader')
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[LEADER].add_prefix('2001:2:0:1::/64', 'paros')
self.nodes[LEADER].add_prefix('2001:2:0:2::/64', 'paro')
self.nodes[LEADER].register_netdata()
self.nodes[ROUTER].start()
time.sleep(5)
self.assertEqual(self.nodes[ROUTER].get_state(), 'router')
self.nodes[ED1].start()
time.sleep(5)
self.assertEqual(self.nodes[ED1].get_state(), 'child')
self.nodes[SED1].start()
time.sleep(5)
self.assertEqual(self.nodes[SED1].get_state(), 'child')
addrs = self.nodes[ED1].get_addrs()
self.assertTrue(any('2001:2:0:1' in addr[0:10] for addr in addrs))
self.assertTrue(any('2001:2:0:2' in addr[0:10] for addr in addrs))
for addr in addrs:
if addr[0:10] == '2001:2:0:1' or addr[0:10] == '2001:2:0:2':
self.assertTrue(self.nodes[LEADER].ping(addr))
addrs = self.nodes[SED1].get_addrs()
self.assertTrue(any('2001:2:0:1' in addr[0:10] for addr in addrs))
self.assertFalse(any('2001:2:0:2' in addr[0:10] for addr in addrs))
for addr in addrs:
if addr[0:10] == '2001:2:0:1' or addr[0:10] == '2001:2:0:2':
self.assertTrue(self.nodes[LEADER].ping(addr))
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
#!/usr/bin/env python
#
|
<|file_name|>test_drive.py<|end_file_name|><|fim▁begin|>import os, sys
import tempfile
import unittest as unittest0
try:
unittest0.skipUnless
unittest0.skip
except AttributeError:
import unittest2 as unittest
else:
unittest = unittest0<|fim▁hole|>
from winsys import fs
class TestDrive (unittest.TestCase):
#
# The name of the drive should be normalised:
# lowercase-letter;colon;backslash
#
def test_name (self):
names = ["C", "C:", "C:/", "C:\\"]
for name in names:
self.assertEquals (fs.drive (name).name, "c:\\")
self.assertEquals (fs.drive (name.lower ()).name, "c:\\")
def test_DriveType (self):
self.assertEquals (fs.drive ("C:").type, win32file.GetDriveTypeW ("C:"))
def test_DriveRoot (self):
self.assertEquals (fs.drive ("C:").root, fs.dir ("C:\\"))
def test_volume (self):
self.assertEquals (fs.drive ("C:").volume.name, win32file.GetVolumeNameForVolumeMountPoint ("C:\\"))
@unittest.skip ("Skip destructive test")
def test_mount (self):
#
# Difficult to test because it's not possible
# to mount a volume on two drive letters simultaneously.
# Try to find something unimportant, like a CDROM, and
# dismount it before remounting it.
#
pass
@unittest.skip ("Skip destructive test")
def test_dismount (self):
#
# Likewise difficult to test because destructive
#
pass
if __name__ == "__main__":
unittest.main ()
if sys.stdout.isatty (): raw_input ("Press enter...")<|fim▁end|>
|
del unittest0
import win32file
|
<|file_name|>celery.py<|end_file_name|><|fim▁begin|>import os
from celery import Celery
from django.apps import apps, AppConfig
from django.conf import settings
if not settings.configured:
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover
app = Celery('lipame')
class CeleryConfig(AppConfig):
name = 'lipame.taskapp'
verbose_name = 'Celery Config'
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
<|fim▁hole|>def debug_task(self):
print('Request: {0!r}'.format(self.request)) # pragma: no cover<|fim▁end|>
|
@app.task(bind=True)
|
<|file_name|>marked.js<|end_file_name|><|fim▁begin|>import { RocketChat } from 'meteor/rocketchat:lib';
import { Random } from 'meteor/random';
import _ from 'underscore';
import s from 'underscore.string';
import hljs from 'highlight.js';
import _marked from 'marked';
const renderer = new _marked.Renderer();
let msg = null;
renderer.code = function(code, lang, escaped) {
if (this.options.highlight) {
const out = this.options.highlight(code, lang);
if (out != null && out !== code) {
escaped = true;
code = out;
}
}
let text = null;
if (!lang) {
text = `<pre><code class="code-colors hljs">${ (escaped ? code : s.escapeHTML(code, true)) }</code></pre>`;
} else {
text = `<pre><code class="code-colors hljs ${ escape(lang, true) }">${ (escaped ? code : s.escapeHTML(code, true)) }</code></pre>`;
}
if (_.isString(msg)) {
return text;
}
const token = `=!=${ Random.id() }=!=`;
msg.tokens.push({
highlight: true,
token,
text<|fim▁hole|>};
renderer.codespan = function(text) {
text = `<code class="code-colors inline">${ text }</code>`;
if (_.isString(msg)) {
return text;
}
const token = `=!=${ Random.id() }=!=`;
msg.tokens.push({
token,
text
});
return token;
};
renderer.blockquote = function(quote) {
return `<blockquote class="background-transparent-darker-before">${ quote }</blockquote>`;
};
const highlight = function(code, lang) {
if (!lang) {
return code;
}
try {
return hljs.highlight(lang, code).value;
} catch (e) {
// Unknown language
return code;
}
};
let gfm = null;
let tables = null;
let breaks = null;
let pedantic = null;
let smartLists = null;
let smartypants = null;
export const marked = (message) => {
msg = message;
if (!msg.tokens) {
msg.tokens = [];
}
if (gfm == null) { gfm = RocketChat.settings.get('Markdown_Marked_GFM'); }
if (tables == null) { tables = RocketChat.settings.get('Markdown_Marked_Tables'); }
if (breaks == null) { breaks = RocketChat.settings.get('Markdown_Marked_Breaks'); }
if (pedantic == null) { pedantic = RocketChat.settings.get('Markdown_Marked_Pedantic'); }
if (smartLists == null) { smartLists = RocketChat.settings.get('Markdown_Marked_SmartLists'); }
if (smartypants == null) { smartypants = RocketChat.settings.get('Markdown_Marked_Smartypants'); }
msg.html = _marked(s.unescapeHTML(msg.html), {
gfm,
tables,
breaks,
pedantic,
smartLists,
smartypants,
renderer,
sanitize: true,
highlight
});
return msg;
};<|fim▁end|>
|
});
return token;
|
<|file_name|>install_py_dlls.hpp<|end_file_name|><|fim▁begin|>/*
This source file is part of KBEngine
For the latest info, see http://www.kbengine.org/
Copyright (c) 2008-2012 KBEngine.
KBEngine is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
KBEngine is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with KBEngine. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef KBE_INSTALL_PY_DLLS_HPP
#define KBE_INSTALL_PY_DLLS_HPP
namespace KBEngine{ namespace script{
bool install_py_dlls(void);
bool uninstall_py_dlls(void);
}
}
<|fim▁hole|><|fim▁end|>
|
#endif // KBE_INSTALL_PY_DLLS_HPP
|
<|file_name|>connectionSizeScript.js<|end_file_name|><|fim▁begin|>$(function() {
$("#childGrid").jqGrid(
{
url : "TBCcnsizePrm.html?getGridData",datatype : "json",mtype : "GET",colNames : [ 'Conn Id',getLocalMessage('water.connsize.frmDt'), getLocalMessage('water.connsize.toDt'), getLocalMessage('water.connsize.frm'),getLocalMessage('water.connsize.to'), getLocalMessage('edit.msg'), getLocalMessage('master.view')],
colModel : [ {name : "cnsId",width : 10,sortable : false,searchoptions: { "sopt": [ "eq"] }},
{name : "cnsFrmdt",width : 20,sortable : true,searchoptions: { "sopt": ["bw", "eq"] },formatter : dateTemplate},
{name : "cnsTodt",width : 20,sortable : true, searchoptions: { "sopt": ["bw", "eq"] },formatter : dateTemplate},
{name : "cnsFrom",width : 20,sortable : false,searchoptions: { "sopt": [ "eq"] }},
{name : "cnsTo",width : 20,sortable : false,searchoptions: { "sopt": [ "eq"] }},
{name : 'cnsId',index : 'cnsId',width : 20,align : 'center',formatter : returnEditUrl,editoptions : {value : "Yes:No"},formatoptions : {disabled : false},search:false },
{name : 'cnsId',index : 'cnsId',width : 20,align : 'center',formatter : returnViewUrl,editoptions : {value : "Yes:No"},formatoptions : {disabled : false},search:false}
],
pager : "#pagered",
rowNum : 30,
rowList : [ 5, 10, 20, 30 ],
sortname : "dsgid",
sortorder : "desc",
height : 'auto',
viewrecords : true,
gridview : true,
loadonce : true,
jsonReader : {
root : "rows",
page : "page",
total : "total",
records : "records",
repeatitems : false,
},
autoencode : true,
caption : getLocalMessage('water.connsize.gridTtl')
});
jQuery("#grid").jqGrid('navGrid','#pagered',{edit:false,add:false,del:false,search:true,refresh:false});
$("#pagered_left").css("width", "");
});
function returnEditUrl(cellValue, options, rowdata, action) {
return "<a href='#' return false; class='editClass' value='"+rowdata.cnsId+"' ><img src='css/images/edit.png' width='20px' alt='Edit Charge Master' title='Edit Scrutiny Data' /></a>";
}
function returnViewUrl(cellValue, options, rowdata, action) {
return "<a href='#' return false; class='viewConnectionClass' value='"+rowdata.cnsId+"'><img src='css/images/grid/view-icon.png' width='20px' alt='View Master' title='View Master' /></a>";
}
function returnisdeletedUrl(cellValue, options, rowdata, action) {
if (rowdata.isdeleted == '0') {
return "<a href='#' class='fa fa-check-circle fa-2x green ' value='"+rowdata.isdeleted+"' alt='Designation is Active' title='Designation is Active'></a>";
} else {
return "<a href='#' class='fa fa-times-circle fa-2x red ' value='"+rowdata.isdeleted+"' alt='Designation is INActive' title='Designation is InActive'></a>";
}
}
$(function() {
$(document)
.on('click','.addConnectionClass',function() {
var $link = $(this);
var cnsId = $link.closest('tr').find('td:eq(0)').text();
var url = "TBCcnsizePrm.html?formForUpdate";
var requestData = "cnsId=" + cnsId + "&MODE1=" + "EDIT";
var returnData =__doAjaxRequest(url,'post',requestData,false);
$('.content').html(returnData);
prepareDateTag();
});
});
$(function() {
$(document).on('click', '.editClass', function() {
var $link = $(this);
var cnsId = $link.closest('tr').find('td:eq(0)').text();
var url = "TBCcnsizePrm.html?formForUpdate";
var requestData = "cnsId=" + cnsId + "&MODE1=" + "EDIT";
var returnData =__doAjaxRequest(url,'post',requestData,false);
$('.content').html(returnData);
prepareDateTag();
});
});
$(function() {
$(document).on('click', '.viewConnectionClass', function() {
var $link = $(this);
var cnsId = $link.closest('tr').find('td:eq(0)').text();
<|fim▁hole|> var url = "TBCcnsizePrm.html?formForUpdate";
var requestData = "cnsId=" + cnsId + "&MODE1=" + "VIEW";
var returnData =__doAjaxRequest(url,'post',requestData,false);
$('.content').html(returnData);
prepareDateTag();
});
});
/*ADD Form*/
function saveConnectionSizeDetails(obj){
return saveOrUpdateForm(obj, 'Saved Successfully', 'TBCcnsizePrm.html', 'create');
}
function updateConnectionSizeDetails(obj){
return saveOrUpdateForm(obj, 'Saved Successfully', 'TBCcnsizePrm.html', 'update');
}
function showConfirmBox(){
var errMsgDiv = '.msg-dialog-box';
var message='';
var cls = 'Yes';
message +='<p>Record Saved Successfully..</p>';
message +='<p style=\'text-align:center;margin: 5px;\'>'+
'<br/><input type=\'button\' value=\''+cls+'\' id=\'btnNo\' class=\'css_btn \' '+
' onclick="ShowView()"/>'+
'</p>';
$(errMsgDiv).addClass('ok-msg').removeClass('warn-msg');
$(errMsgDiv).html(message);
$(errMsgDiv).show();
$('#btnNo').focus();
showModalBox(errMsgDiv);
}
function ShowView(){
window.location.href='TBCcnsizePrm.html';
}
$(".datepicker").datepicker({
dateFormat: 'dd/mm/yy',
changeMonth: true,
changeYear: true
});
$(".warning-div ul").each(function () {
var lines = $(this).html().split("<br>");
$(this).html('<li>' + lines.join("</li><li><i class='fa fa-exclamation-circle'></i> ") + '</li>');
});
$('html,body').animate({ scrollTop: 0 }, 'slow');<|fim▁end|>
| |
<|file_name|>instr_extractps.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn extractps_1() {
run_test(&Instruction { mnemonic: Mnemonic::EXTRACTPS, operand1: Some(Direct(EBP)), operand2: Some(Direct(XMM6)), operand3: Some(Literal8(70)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 58, 23, 245, 70], OperandSize::Dword)
}
#[test]
fn extractps_2() {
run_test(&Instruction { mnemonic: Mnemonic::EXTRACTPS, operand1: Some(IndirectScaledDisplaced(EAX, Four, 630659303, Some(OperandSize::Dword), None)), operand2: Some(Direct(XMM7)), operand3: Some(Literal8(106)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 58, 23, 60, 133, 231, 24, 151, 37, 106], OperandSize::Dword)
}
#[test]
fn extractps_3() {
run_test(&Instruction { mnemonic: Mnemonic::EXTRACTPS, operand1: Some(Direct(EDI)), operand2: Some(Direct(XMM3)), operand3: Some(Literal8(85)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 58, 23, 223, 85], OperandSize::Qword)
}<|fim▁hole|>#[test]
fn extractps_4() {
run_test(&Instruction { mnemonic: Mnemonic::EXTRACTPS, operand1: Some(Indirect(RDX, Some(OperandSize::Dword), None)), operand2: Some(Direct(XMM1)), operand3: Some(Literal8(62)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 58, 23, 10, 62], OperandSize::Qword)
}<|fim▁end|>
| |
<|file_name|>core.go<|end_file_name|><|fim▁begin|>/*
Package core implements the IpfsNode object and related methods.
Packages underneath core/ provide a (relatively) stable, low-level API
to carry out most IPFS-related tasks. For more details on the other
interfaces and how core/... fits into the bigger IPFS picture, see:
$ godoc github.com/ipfs/go-ipfs
*/
package core
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"io/ioutil"
"net"
"os"
"strings"
"time"
bstore "github.com/ipfs/go-ipfs/blocks/blockstore"
bserv "github.com/ipfs/go-ipfs/blockservice"
exchange "github.com/ipfs/go-ipfs/exchange"
bitswap "github.com/ipfs/go-ipfs/exchange/bitswap"
bsnet "github.com/ipfs/go-ipfs/exchange/bitswap/network"
rp "github.com/ipfs/go-ipfs/exchange/reprovide"
filestore "github.com/ipfs/go-ipfs/filestore"
mount "github.com/ipfs/go-ipfs/fuse/mount"
merkledag "github.com/ipfs/go-ipfs/merkledag"
mfs "github.com/ipfs/go-ipfs/mfs"
namesys "github.com/ipfs/go-ipfs/namesys"
ipnsrp "github.com/ipfs/go-ipfs/namesys/republisher"
p2p "github.com/ipfs/go-ipfs/p2p"
path "github.com/ipfs/go-ipfs/path"
pin "github.com/ipfs/go-ipfs/pin"
repo "github.com/ipfs/go-ipfs/repo"
config "github.com/ipfs/go-ipfs/repo/config"
nilrouting "github.com/ipfs/go-ipfs/routing/none"
offroute "github.com/ipfs/go-ipfs/routing/offline"
ft "github.com/ipfs/go-ipfs/unixfs"
pnet "gx/ipfs/QmP4wThjSLaCanG1SstjuDXZ8yRyWT2APx5APCEa4YPPvB/go-libp2p-pnet"
pstore "gx/ipfs/QmPgDWmTmuzvP7QE5zwo1TmjbJme9pmZHNujB2453jkCTr/go-libp2p-peerstore"
routing "gx/ipfs/QmPjTrrSfE6TzLv6ya6VWhGcCgPrUAdcgrDcQyRDX2VyW1/go-libp2p-routing"
mplex "gx/ipfs/QmQ3UABWTgK78utKeiVXaH9BrjC7Ydn1pRuwqnWHT3p4zh/go-smux-multiplex"
ipnet "gx/ipfs/QmQq9YzmdFdWNTDdArueGyD7L5yyiRQigrRHJnTGkxcEjT/go-libp2p-interface-pnet"
addrutil "gx/ipfs/QmR1fAHJvEyYFdEGn5jVmU4NL5kNSVJ48cduXB2whWbJq2/go-addr-util"
mssmux "gx/ipfs/QmRVYfZ7tWNHPBzWiG6KWGzvT2hcGems8srihsQE29x1U5/go-smux-multistream"
goprocess "gx/ipfs/QmSF8fPo3jgVBAy8fpdjjYqgG87dkJgUprRBHRd2tmfgpP/goprocess"
mamask "gx/ipfs/QmSMZwvs3n4GBikZ7hKzT17c3bk65FmyZo2JqtJ16swqCv/multiaddr-filter"
u "gx/ipfs/QmSU6eubNdhXjFBJBSksTp8kv8YRub8mGAPv8tVJHmL2EU/go-ipfs-util"
logging "gx/ipfs/QmSpJByNKFX1sCsHBEp3R73FL4NF6FnQTEGyNAXHm2GS52/go-log"
b58 "gx/ipfs/QmT8rehPR3F6bmwL6zjUN8XpiDBFFpMP2myPdC6ApsWfJf/go-base58"
dht "gx/ipfs/QmTHyAbD9KzGrseLNzmEoNkVxA8F2h7LQG2iV6uhBqs6kX/go-libp2p-kad-dht"
cid "gx/ipfs/QmTprEaAA2A9bst5XH7exuyi5KzNMK3SEDNN8rBDnKWcUS/go-cid"
ds "gx/ipfs/QmVSase1JP7cq9QkPT46oNwdp9pT6kBkG3oqS14y3QcZjG/go-datastore"
metrics "gx/ipfs/QmVjRAPfRtResCMCE4eBqr4Beoa6A89P1YweG9wUS6RqUL/go-libp2p-metrics"
ma "gx/ipfs/QmXY77cVe7rVRQXZZQRioukUM7aRW3BTcAgJe12MCtb3Ji/go-multiaddr"
peer "gx/ipfs/QmXYjuNuxVzXKJCfWasQk1RqkhVLDM9jtUKhqc2WPQmFSB/go-libp2p-peer"
floodsub "gx/ipfs/QmZdsQf8BiCpAj61nz9NgqVeRUkw9vATvCs7UHFTxoUMDb/floodsub"
p2phost "gx/ipfs/QmZy7c24mmkEHpNJndwgsEE3wcVxHd8yB969yTnAJFVw7f/go-libp2p-host"
ic "gx/ipfs/QmaPbCnUMBohSGo3KnxEa2bHqyJVVeEEcwtqJAYxerieBo/go-libp2p-crypto"
swarm "gx/ipfs/QmaijwHnbD4SabGA8C2fN9gchptLvRe2RxqTU5XkjAGBw5/go-libp2p-swarm"
discovery "gx/ipfs/QmapADMpK4e5kFGBxC2aHreaDqKP9vmMng5f91MA14Ces9/go-libp2p/p2p/discovery"
p2pbhost "gx/ipfs/QmapADMpK4e5kFGBxC2aHreaDqKP9vmMng5f91MA14Ces9/go-libp2p/p2p/host/basic"
rhost "gx/ipfs/QmapADMpK4e5kFGBxC2aHreaDqKP9vmMng5f91MA14Ces9/go-libp2p/p2p/host/routed"
identify "gx/ipfs/QmapADMpK4e5kFGBxC2aHreaDqKP9vmMng5f91MA14Ces9/go-libp2p/p2p/protocol/identify"
ping "gx/ipfs/QmapADMpK4e5kFGBxC2aHreaDqKP9vmMng5f91MA14Ces9/go-libp2p/p2p/protocol/ping"
yamux "gx/ipfs/Qmbn7RYyWzBVXiUp9jZ1dA4VADHy9DtS7iZLwfhEUQvm3U/go-smux-yamux"
smux "gx/ipfs/QmeZBgYBHvxMukGK5ojg28BCNLB9SeXqT7XXg6o7r2GbJy/go-stream-muxer"
)
const IpnsValidatorTag = "ipns"
const kReprovideFrequency = time.Hour * 12
const discoveryConnTimeout = time.Second * 30
var log = logging.Logger("core")
type mode int
const (
// zero value is not a valid mode, must be explicitly set
localMode mode = iota
offlineMode
onlineMode
)
func init() {
identify.ClientVersion = "go-ipfs/" + config.CurrentVersionNumber + "/" + config.CurrentCommit
}
// IpfsNode is IPFS Core module. It represents an IPFS instance.
type IpfsNode struct {
// Self
Identity peer.ID // the local node's identity
Repo repo.Repo
// Local node
Pinning pin.Pinner // the pinning manager
Mounts Mounts // current mount state, if any.
PrivateKey ic.PrivKey // the local node's private Key
PNetFingerpint []byte // fingerprint of private network
// Services<|fim▁hole|> Blockstore bstore.GCBlockstore // the block store (lower level)
Filestore *filestore.Filestore // the filestore blockstore
BaseBlocks bstore.Blockstore // the raw blockstore, no filestore wrapping
GCLocker bstore.GCLocker // the locker used to protect the blockstore during gc
Blocks bserv.BlockService // the block service, get/add blocks.
DAG merkledag.DAGService // the merkle dag service, get/add objects.
Resolver *path.Resolver // the path resolution system
Reporter metrics.Reporter
Discovery discovery.Service
FilesRoot *mfs.Root
// Online
PeerHost p2phost.Host // the network host (server+client)
Bootstrapper io.Closer // the periodic bootstrapper
Routing routing.IpfsRouting // the routing system. recommend ipfs-dht
Exchange exchange.Interface // the block exchange + strategy (bitswap)
Namesys namesys.NameSystem // the name system, resolves paths to hashes
Ping *ping.PingService
Reprovider *rp.Reprovider // the value reprovider system
IpnsRepub *ipnsrp.Republisher
Floodsub *floodsub.PubSub
P2P *p2p.P2P
proc goprocess.Process
ctx context.Context
mode mode
localModeSet bool
}
// Mounts defines what the node's mount state is. This should
// perhaps be moved to the daemon or mount. It's here because
// it needs to be accessible across daemon requests.
type Mounts struct {
Ipfs mount.Mount
Ipns mount.Mount
}
func (n *IpfsNode) startOnlineServices(ctx context.Context, routingOption RoutingOption, hostOption HostOption, do DiscoveryOption, pubsub, mplex bool) error {
if n.PeerHost != nil { // already online.
return errors.New("node already online")
}
// load private key
if err := n.LoadPrivateKey(); err != nil {
return err
}
// get undialable addrs from config
cfg, err := n.Repo.Config()
if err != nil {
return err
}
var addrfilter []*net.IPNet
for _, s := range cfg.Swarm.AddrFilters {
f, err := mamask.NewMask(s)
if err != nil {
return fmt.Errorf("incorrectly formatted address filter in config: %s", s)
}
addrfilter = append(addrfilter, f)
}
if !cfg.Swarm.DisableBandwidthMetrics {
// Set reporter
n.Reporter = metrics.NewBandwidthCounter()
}
tpt := makeSmuxTransport(mplex)
swarmkey, err := n.Repo.SwarmKey()
if err != nil {
return err
}
var protec ipnet.Protector
if swarmkey != nil {
protec, err = pnet.NewProtector(bytes.NewReader(swarmkey))
if err != nil {
return err
}
n.PNetFingerpint = protec.Fingerprint()
go func() {
t := time.NewTicker(30 * time.Second)
<-t.C // swallow one tick
for {
select {
case <-t.C:
if ph := n.PeerHost; ph != nil {
if len(ph.Network().Peers()) == 0 {
log.Warning("We are in private network and have no peers.")
log.Warning("This might be configuration mistake.")
}
}
case <-n.Process().Closing():
t.Stop()
return
}
}
}()
}
peerhost, err := hostOption(ctx, n.Identity, n.Peerstore, n.Reporter,
addrfilter, tpt, protec, &ConstructPeerHostOpts{DisableNatPortMap: cfg.Swarm.DisableNatPortMap})
if err != nil {
return err
}
if err := n.startOnlineServicesWithHost(ctx, peerhost, routingOption); err != nil {
return err
}
// Ok, now we're ready to listen.
if err := startListening(ctx, n.PeerHost, cfg); err != nil {
return err
}
n.Reprovider = rp.NewReprovider(n.Routing, n.Blockstore)
if cfg.Reprovider.Interval != "0" {
interval := kReprovideFrequency
if cfg.Reprovider.Interval != "" {
dur, err := time.ParseDuration(cfg.Reprovider.Interval)
if err != nil {
return err
}
interval = dur
}
go n.Reprovider.ProvideEvery(ctx, interval)
}
if pubsub {
n.Floodsub = floodsub.NewFloodSub(ctx, peerhost)
}
n.P2P = p2p.NewP2P(n.Identity, n.PeerHost, n.Peerstore)
// setup local discovery
if do != nil {
service, err := do(ctx, n.PeerHost)
if err != nil {
log.Error("mdns error: ", err)
} else {
service.RegisterNotifee(n)
n.Discovery = service
}
}
return n.Bootstrap(DefaultBootstrapConfig)
}
func makeSmuxTransport(mplexExp bool) smux.Transport {
mstpt := mssmux.NewBlankTransport()
ymxtpt := &yamux.Transport{
AcceptBacklog: 8192,
ConnectionWriteTimeout: time.Second * 10,
KeepAliveInterval: time.Second * 30,
EnableKeepAlive: true,
MaxStreamWindowSize: uint32(1024 * 512),
LogOutput: ioutil.Discard,
}
if os.Getenv("YAMUX_DEBUG") != "" {
ymxtpt.LogOutput = os.Stderr
}
mstpt.AddTransport("/yamux/1.0.0", ymxtpt)
if mplexExp {
mstpt.AddTransport("/mplex/6.7.0", mplex.DefaultTransport)
}
// Allow muxer preference order overriding
if prefs := os.Getenv("LIBP2P_MUX_PREFS"); prefs != "" {
mstpt.OrderPreference = strings.Fields(prefs)
}
return mstpt
}
func setupDiscoveryOption(d config.Discovery) DiscoveryOption {
if d.MDNS.Enabled {
return func(ctx context.Context, h p2phost.Host) (discovery.Service, error) {
if d.MDNS.Interval == 0 {
d.MDNS.Interval = 5
}
return discovery.NewMdnsService(ctx, h, time.Duration(d.MDNS.Interval)*time.Second)
}
}
return nil
}
func (n *IpfsNode) HandlePeerFound(p pstore.PeerInfo) {
log.Warning("trying peer info: ", p)
ctx, cancel := context.WithTimeout(n.Context(), discoveryConnTimeout)
defer cancel()
if err := n.PeerHost.Connect(ctx, p); err != nil {
log.Warning("Failed to connect to peer found by discovery: ", err)
}
}
// startOnlineServicesWithHost is the set of services which need to be
// initialized with the host and _before_ we start listening.
func (n *IpfsNode) startOnlineServicesWithHost(ctx context.Context, host p2phost.Host, routingOption RoutingOption) error {
// setup diagnostics service
n.Ping = ping.NewPingService(host)
// setup routing service
r, err := routingOption(ctx, host, n.Repo.Datastore())
if err != nil {
return err
}
n.Routing = r
// Wrap standard peer host with routing system to allow unknown peer lookups
n.PeerHost = rhost.Wrap(host, n.Routing)
// setup exchange service
const alwaysSendToPeer = true // use YesManStrategy
bitswapNetwork := bsnet.NewFromIpfsHost(n.PeerHost, n.Routing)
n.Exchange = bitswap.New(ctx, n.Identity, bitswapNetwork, n.Blockstore, alwaysSendToPeer)
size, err := n.getCacheSize()
if err != nil {
return err
}
// setup name system
n.Namesys = namesys.NewNameSystem(n.Routing, n.Repo.Datastore(), size)
// setup ipns republishing
return n.setupIpnsRepublisher()
}
// getCacheSize returns cache life and cache size
func (n *IpfsNode) getCacheSize() (int, error) {
cfg, err := n.Repo.Config()
if err != nil {
return 0, err
}
cs := cfg.Ipns.ResolveCacheSize
if cs == 0 {
cs = 128
}
if cs < 0 {
return 0, fmt.Errorf("cannot specify negative resolve cache size")
}
return cs, nil
}
func (n *IpfsNode) setupIpnsRepublisher() error {
cfg, err := n.Repo.Config()
if err != nil {
return err
}
n.IpnsRepub = ipnsrp.NewRepublisher(n.Routing, n.Repo.Datastore(), n.PrivateKey, n.Repo.Keystore())
if cfg.Ipns.RepublishPeriod != "" {
d, err := time.ParseDuration(cfg.Ipns.RepublishPeriod)
if err != nil {
return fmt.Errorf("failure to parse config setting IPNS.RepublishPeriod: %s", err)
}
if !u.Debug && (d < time.Minute || d > (time.Hour*24)) {
return fmt.Errorf("config setting IPNS.RepublishPeriod is not between 1min and 1day: %s", d)
}
n.IpnsRepub.Interval = d
}
if cfg.Ipns.RecordLifetime != "" {
d, err := time.ParseDuration(cfg.Ipns.RepublishPeriod)
if err != nil {
return fmt.Errorf("failure to parse config setting IPNS.RecordLifetime: %s", err)
}
n.IpnsRepub.RecordLifetime = d
}
n.Process().Go(n.IpnsRepub.Run)
return nil
}
// Process returns the Process object
func (n *IpfsNode) Process() goprocess.Process {
return n.proc
}
// Close calls Close() on the Process object
func (n *IpfsNode) Close() error {
return n.proc.Close()
}
// Context returns the IpfsNode context
func (n *IpfsNode) Context() context.Context {
if n.ctx == nil {
n.ctx = context.TODO()
}
return n.ctx
}
// teardown closes owned children. If any errors occur, this function returns
// the first error.
func (n *IpfsNode) teardown() error {
log.Debug("core is shutting down...")
// owned objects are closed in this teardown to ensure that they're closed
// regardless of which constructor was used to add them to the node.
var closers []io.Closer
// NOTE: The order that objects are added(closed) matters, if an object
// needs to use another during its shutdown/cleanup process, it should be
// closed before that other object
if n.FilesRoot != nil {
closers = append(closers, n.FilesRoot)
}
if n.Exchange != nil {
closers = append(closers, n.Exchange)
}
if n.Mounts.Ipfs != nil && !n.Mounts.Ipfs.IsActive() {
closers = append(closers, mount.Closer(n.Mounts.Ipfs))
}
if n.Mounts.Ipns != nil && !n.Mounts.Ipns.IsActive() {
closers = append(closers, mount.Closer(n.Mounts.Ipns))
}
if dht, ok := n.Routing.(*dht.IpfsDHT); ok {
closers = append(closers, dht.Process())
}
if n.Blocks != nil {
closers = append(closers, n.Blocks)
}
if n.Bootstrapper != nil {
closers = append(closers, n.Bootstrapper)
}
if n.PeerHost != nil {
closers = append(closers, n.PeerHost)
}
// Repo closed last, most things need to preserve state here
closers = append(closers, n.Repo)
var errs []error
for _, closer := range closers {
if err := closer.Close(); err != nil {
errs = append(errs, err)
}
}
if len(errs) > 0 {
return errs[0]
}
return nil
}
func (n *IpfsNode) OnlineMode() bool {
switch n.mode {
case onlineMode:
return true
default:
return false
}
}
func (n *IpfsNode) SetLocal(isLocal bool) {
if isLocal {
n.mode = localMode
}
n.localModeSet = true
}
func (n *IpfsNode) LocalMode() bool {
if !n.localModeSet {
// programmer error should not happen
panic("local mode not set")
}
switch n.mode {
case localMode:
return true
default:
return false
}
}
func (n *IpfsNode) Bootstrap(cfg BootstrapConfig) error {
// TODO what should return value be when in offlineMode?
if n.Routing == nil {
return nil
}
if n.Bootstrapper != nil {
n.Bootstrapper.Close() // stop previous bootstrap process.
}
// if the caller did not specify a bootstrap peer function, get the
// freshest bootstrap peers from config. this responds to live changes.
if cfg.BootstrapPeers == nil {
cfg.BootstrapPeers = func() []pstore.PeerInfo {
ps, err := n.loadBootstrapPeers()
if err != nil {
log.Warning("failed to parse bootstrap peers from config")
return nil
}
return ps
}
}
var err error
n.Bootstrapper, err = Bootstrap(n, cfg)
return err
}
func (n *IpfsNode) loadID() error {
if n.Identity != "" {
return errors.New("identity already loaded")
}
cfg, err := n.Repo.Config()
if err != nil {
return err
}
cid := cfg.Identity.PeerID
if cid == "" {
return errors.New("identity was not set in config (was 'ipfs init' run?)")
}
if len(cid) == 0 {
return errors.New("no peer ID in config! (was 'ipfs init' run?)")
}
n.Identity = peer.ID(b58.Decode(cid))
return nil
}
func (n *IpfsNode) GetKey(name string) (ic.PrivKey, error) {
if name == "self" {
return n.PrivateKey, nil
} else {
return n.Repo.Keystore().Get(name)
}
}
func (n *IpfsNode) LoadPrivateKey() error {
if n.Identity == "" || n.Peerstore == nil {
return errors.New("loaded private key out of order.")
}
if n.PrivateKey != nil {
return errors.New("private key already loaded")
}
cfg, err := n.Repo.Config()
if err != nil {
return err
}
sk, err := loadPrivateKey(&cfg.Identity, n.Identity)
if err != nil {
return err
}
n.PrivateKey = sk
n.Peerstore.AddPrivKey(n.Identity, n.PrivateKey)
n.Peerstore.AddPubKey(n.Identity, sk.GetPublic())
return nil
}
func (n *IpfsNode) loadBootstrapPeers() ([]pstore.PeerInfo, error) {
cfg, err := n.Repo.Config()
if err != nil {
return nil, err
}
parsed, err := cfg.BootstrapPeers()
if err != nil {
return nil, err
}
return toPeerInfos(parsed), nil
}
func (n *IpfsNode) loadFilesRoot() error {
dsk := ds.NewKey("/local/filesroot")
pf := func(ctx context.Context, c *cid.Cid) error {
return n.Repo.Datastore().Put(dsk, c.Bytes())
}
var nd *merkledag.ProtoNode
val, err := n.Repo.Datastore().Get(dsk)
switch {
case err == ds.ErrNotFound || val == nil:
nd = ft.EmptyDirNode()
_, err := n.DAG.Add(nd)
if err != nil {
return fmt.Errorf("failure writing to dagstore: %s", err)
}
case err == nil:
c, err := cid.Cast(val.([]byte))
if err != nil {
return err
}
rnd, err := n.DAG.Get(n.Context(), c)
if err != nil {
return fmt.Errorf("error loading filesroot from DAG: %s", err)
}
pbnd, ok := rnd.(*merkledag.ProtoNode)
if !ok {
return merkledag.ErrNotProtobuf
}
nd = pbnd
default:
return err
}
mr, err := mfs.NewRoot(n.Context(), n.DAG, nd, pf)
if err != nil {
return err
}
n.FilesRoot = mr
return nil
}
// SetupOfflineRouting loads the local nodes private key and
// uses it to instantiate a routing system in offline mode.
// This is primarily used for offline ipns modifications.
func (n *IpfsNode) SetupOfflineRouting() error {
if n.Routing != nil {
// Routing was already set up
return nil
}
err := n.LoadPrivateKey()
if err != nil {
return err
}
n.Routing = offroute.NewOfflineRouter(n.Repo.Datastore(), n.PrivateKey)
size, err := n.getCacheSize()
if err != nil {
return err
}
n.Namesys = namesys.NewNameSystem(n.Routing, n.Repo.Datastore(), size)
return nil
}
func loadPrivateKey(cfg *config.Identity, id peer.ID) (ic.PrivKey, error) {
sk, err := cfg.DecodePrivateKey("passphrase todo!")
if err != nil {
return nil, err
}
id2, err := peer.IDFromPrivateKey(sk)
if err != nil {
return nil, err
}
if id2 != id {
return nil, fmt.Errorf("private key in config does not match id: %s != %s", id, id2)
}
return sk, nil
}
func listenAddresses(cfg *config.Config) ([]ma.Multiaddr, error) {
var listen []ma.Multiaddr
for _, addr := range cfg.Addresses.Swarm {
maddr, err := ma.NewMultiaddr(addr)
if err != nil {
return nil, fmt.Errorf("Failure to parse config.Addresses.Swarm: %s", cfg.Addresses.Swarm)
}
listen = append(listen, maddr)
}
return listen, nil
}
type ConstructPeerHostOpts struct {
DisableNatPortMap bool
}
type HostOption func(ctx context.Context, id peer.ID, ps pstore.Peerstore, bwr metrics.Reporter, fs []*net.IPNet, tpt smux.Transport, protc ipnet.Protector, opts *ConstructPeerHostOpts) (p2phost.Host, error)
var DefaultHostOption HostOption = constructPeerHost
// isolates the complex initialization steps
func constructPeerHost(ctx context.Context, id peer.ID, ps pstore.Peerstore, bwr metrics.Reporter, fs []*net.IPNet, tpt smux.Transport, protec ipnet.Protector, opts *ConstructPeerHostOpts) (p2phost.Host, error) {
// no addresses to begin with. we'll start later.
swrm, err := swarm.NewSwarmWithProtector(ctx, nil, id, ps, protec, tpt, bwr)
if err != nil {
return nil, err
}
network := (*swarm.Network)(swrm)
for _, f := range fs {
network.Swarm().Filters.AddDialFilter(f)
}
hostOpts := []interface{}{bwr}
if !opts.DisableNatPortMap {
hostOpts = append(hostOpts, p2pbhost.NATPortMap)
}
host := p2pbhost.New(network, hostOpts...)
return host, nil
}
// startListening on the network addresses
func startListening(ctx context.Context, host p2phost.Host, cfg *config.Config) error {
listenAddrs, err := listenAddresses(cfg)
if err != nil {
return err
}
// make sure we error out if our config does not have addresses we can use
log.Debugf("Config.Addresses.Swarm:%s", listenAddrs)
filteredAddrs := addrutil.FilterUsableAddrs(listenAddrs)
log.Debugf("Config.Addresses.Swarm:%s (filtered)", filteredAddrs)
if len(filteredAddrs) < 1 {
return fmt.Errorf("addresses in config not usable: %s", listenAddrs)
}
// Actually start listening:
if err := host.Network().Listen(filteredAddrs...); err != nil {
return err
}
// list out our addresses
addrs, err := host.Network().InterfaceListenAddresses()
if err != nil {
return err
}
log.Infof("Swarm listening at: %s", addrs)
return nil
}
func constructDHTRouting(ctx context.Context, host p2phost.Host, dstore repo.Datastore) (routing.IpfsRouting, error) {
dhtRouting := dht.NewDHT(ctx, host, dstore)
dhtRouting.Validator[IpnsValidatorTag] = namesys.IpnsRecordValidator
dhtRouting.Selector[IpnsValidatorTag] = namesys.IpnsSelectorFunc
return dhtRouting, nil
}
func constructClientDHTRouting(ctx context.Context, host p2phost.Host, dstore repo.Datastore) (routing.IpfsRouting, error) {
dhtRouting := dht.NewDHTClient(ctx, host, dstore)
dhtRouting.Validator[IpnsValidatorTag] = namesys.IpnsRecordValidator
dhtRouting.Selector[IpnsValidatorTag] = namesys.IpnsSelectorFunc
return dhtRouting, nil
}
type RoutingOption func(context.Context, p2phost.Host, repo.Datastore) (routing.IpfsRouting, error)
type DiscoveryOption func(context.Context, p2phost.Host) (discovery.Service, error)
var DHTOption RoutingOption = constructDHTRouting
var DHTClientOption RoutingOption = constructClientDHTRouting
var NilRouterOption RoutingOption = nilrouting.ConstructNilRouting<|fim▁end|>
|
Peerstore pstore.Peerstore // storage for other Peer instances
|
<|file_name|>remote_test.go<|end_file_name|><|fim▁begin|>package selenium
import (
"flag"
"fmt"
"net/http"
"strings"
"testing"
)
var grid = flag.Bool("test.grid", false, "skip tests that fail on Selenium Grid")
var executor = flag.String("test.executor", defaultExecutor, "executor URL")
var browserName = flag.String("test.browserName", "firefox", "browser to run tests on")
func init() {
flag.BoolVar(&Trace, "trace", false, "trace HTTP requests and responses")
flag.Parse()
caps["browserName"] = *browserName
}
var caps = make(Capabilities)
var runOnSauce = flag.Bool("saucelabs", false, "run on sauce")
func newRemote(testName string, t *testing.T) (wd WebDriver) {
var err error
if wd, err = NewRemote(caps, *executor); err != nil {
t.Fatalf("can't start session for test %s: %s", testName, err)
}
return wd
}
func TestStatus(t *testing.T) {
if *grid {
t.Skip()
}
t.Parallel()
wd := newRemote("TestStatus", t)
defer wd.Quit()
status, err := wd.Status()
if err != nil {
t.Fatal(err)
}
if status.OS.Name == "" {
t.Fatal("No OS")
}
}
func TestSessions(t *testing.T) {
if *grid {
t.Skip()
}
t.Parallel()
wd := newRemote("TestSessions", t)
defer wd.Quit()
_, err := wd.Sessions()
if err != nil {
t.Fatal(err)
}
}
func TestNewSession(t *testing.T) {
t.Parallel()
if *runOnSauce {
return
}
wd := &remoteWebDriver{capabilities: caps, executor: defaultExecutor}
sid, err := wd.NewSession()
defer wd.Quit()
if err != nil {
t.Fatalf("error in new session - %s", err)
}
if sid == "" {
t.Fatal("Empty session id")
}
if wd.id != sid {
t.Fatal("Session id mismatch")
}
}
func TestCapabilities(t *testing.T) {
t.Parallel()
wd := newRemote("TestCapabilities", t)
defer wd.Quit()
c, err := wd.Capabilities()
if err != nil {
t.Fatal(err)
}
if c["browserName"] != caps["browserName"] {
t.Fatalf("bad browser name - %s", c["browserName"])
}
}
func TestSetTimeouts(t *testing.T) {
t.Parallel()
wd := newRemote("TestSetTimeouts", t).T(t)
defer wd.Quit()
wd.SetTimeouts(200)
}
func TestSetAsyncScriptTimeout(t *testing.T) {
t.Parallel()
wd := newRemote("TestSetAsyncScriptTimeout", t).T(t)
defer wd.Quit()
wd.SetAsyncScriptTimeout(200)
}
func TestSetImplicitWaitTimeout(t *testing.T) {
t.Parallel()
wd := newRemote("TestSetImplicitWaitTimeout", t).T(t)
defer wd.Quit()
wd.SetImplicitWaitTimeout(200)
}
func TestCurrentWindowHandle(t *testing.T) {
t.Parallel()
wd := newRemote("TestCurrentWindowHandle", t).T(t)
defer wd.Quit()
handle := wd.CurrentWindowHandle()
if handle == "" {
t.Fatal("Empty handle")
}
}
func TestWindowHandles(t *testing.T) {
t.Parallel()
wd := newRemote("TestWindowHandles", t).T(t)
defer wd.Quit()
handles := wd.CurrentWindowHandle()
if handles == "" {
t.Fatal("No handles")
}
}
func TestWindowSize(t *testing.T) {
t.Parallel()
wd := newRemote("TestWindowSize", t).T(t)
defer wd.Quit()
size := wd.WindowSize(wd.CurrentWindowHandle())
if size == nil || size.Height == 0 || size.Width == 0 {
t.Fatal("Window size failed with size: %+v", size)
}
}
func TestWindowPosition(t *testing.T) {
t.Parallel()
wd := newRemote("TestWindowPosition", t).T(t)
defer wd.Quit()
pos := wd.WindowPosition(wd.CurrentWindowHandle())
if pos == nil {
t.Fatal("Window position failed")
}
}
func TestResizeWindow(t *testing.T) {
t.Parallel()
wd := newRemote("TestResizeWindow", t).T(t)
defer wd.Quit()
wd.ResizeWindow(wd.CurrentWindowHandle(), Size{400, 400})
sz := wd.WindowSize(wd.CurrentWindowHandle())
if int(sz.Width) != 400 {
t.Fatalf("got width %f, want 400", sz.Width)
}
if int(sz.Height) != 400 {
t.Fatalf("got height %f, want 400", sz.Height)
}
}
func TestGet(t *testing.T) {
t.Parallel()
wd := newRemote("TestGet", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
newURL := wd.CurrentURL()
if newURL != serverURL {
t.Fatalf("%s != %s", newURL, serverURL)
}
}
func TestNavigation(t *testing.T) {
t.Parallel()
wd := newRemote("TestNavigation", t).T(t)
defer wd.Quit()
<|fim▁hole|> url2 := serverURL + "other"
wd.Get(url2)
wd.Back()
url := wd.CurrentURL()
if url != url1 {
t.Fatalf("back got me to %s (expected %s)", url, url1)
}
wd.Forward()
url = wd.CurrentURL()
if url != url2 {
t.Fatalf("forward got me to %s (expected %s)", url, url2)
}
wd.Refresh()
url = wd.CurrentURL()
if url != url2 {
t.Fatalf("refresh got me to %s (expected %s)", url, url2)
}
}
func TestTitle(t *testing.T) {
t.Parallel()
wd := newRemote("TestTitle", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
title := wd.Title()
expectedTitle := "Go Selenium Test Suite"
if title != expectedTitle {
t.Fatal("Bad title %s, should be %s", title, expectedTitle)
}
}
func TestPageSource(t *testing.T) {
t.Parallel()
wd := newRemote("TestPageSource", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
source := wd.PageSource()
if !strings.Contains(source, "The home page.") {
t.Fatalf("Bad source\n%s", source)
}
}
type elementFinder interface {
FindElement(by, value string) WebElementT
FindElements(by, value string) []WebElementT
}
func TestFindElement(t *testing.T) {
t.Parallel()
wd := newRemote("TestFindElement", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
testFindElement(t, wd, ByCSSSelector, "ol.list li", "foo")
}
func TestFindChildElement(t *testing.T) {
t.Parallel()
wd := newRemote("TestFindChildElement", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
testFindElement(t, wd.FindElement(ByTagName, "body"), ByCSSSelector, "ol.list li", "foo")
}
func testFindElement(t *testing.T, ef elementFinder, by, value string, txt string) {
elem := ef.FindElement(by, value)
if want, got := txt, elem.Text(); want != got {
t.Errorf("Elem for %q %q: want text %q, got %q", by, value, want, got)
}
}
func TestFindElements(t *testing.T) {
t.Parallel()
wd := newRemote("TestFindElements", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
testFindElements(t, wd, ByCSSSelector, "ol.list li", []string{"foo", "bar"})
}
func TestFindChildElements(t *testing.T) {
t.Parallel()
wd := newRemote("TestFindChildElements", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
testFindElements(t, wd.FindElement(ByCSSSelector, "ol.list"), ByCSSSelector, "li", []string{"foo", "bar"})
}
func testFindElements(t *testing.T, ef elementFinder, by, value string, elemsTxt []string) {
elems := ef.FindElements(by, value)
if len(elems) != len(elemsTxt) {
t.Fatal("Wrong number of elements %d (should be %d)", len(elems), len(elemsTxt))
}
t.Logf("Found %d elements for %q %q", len(elems), by, value)
for i, txt := range elemsTxt {
elem := elems[i]
if want, got := txt, elem.Text(); want != got {
t.Errorf("Elem %d for %q %q: want text %q, got %q", i, by, value, want, got)
}
}
}
func TestSendKeys(t *testing.T) {
t.Parallel()
wd := newRemote("TestSendKeys", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
input := wd.FindElement(ByName, "q")
input.SendKeys("golang\n")
source := wd.PageSource()
if !strings.Contains(source, "The Go Programming Language") {
t.Fatal("Can't find Go")
}
if !strings.Contains(source, "golang") {
t.Fatal("Can't find search query in source")
}
}
func TestClick(t *testing.T) {
t.Parallel()
wd := newRemote("TestClick", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
input := wd.FindElement(ByName, "q")
input.SendKeys("golang")
button := wd.FindElement(ByID, "submit")
button.Click()
if !strings.Contains(wd.PageSource(), "The Go Programming Language") {
t.Fatal("Can't find Go")
}
}
func TestGetCookies(t *testing.T) {
t.Parallel()
wd := newRemote("TestGetCookies", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
cookies := wd.GetCookies()
if len(cookies) == 0 {
t.Fatal("No cookies")
}
if cookies[0].Name == "" {
t.Fatal("Empty cookie")
}
}
func TestAddCookie(t *testing.T) {
t.Parallel()
wd := newRemote("TestAddCookie", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
cookie := &Cookie{Name: "the nameless cookie", Value: "I have nothing"}
wd.AddCookie(cookie)
cookies := wd.GetCookies()
for _, c := range cookies {
if (c.Name == cookie.Name) && (c.Value == cookie.Value) {
return
}
}
t.Fatal("Can't find new cookie")
}
func TestDeleteCookie(t *testing.T) {
t.Parallel()
wd := newRemote("TestDeleteCookie", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
cookies := wd.GetCookies()
if len(cookies) == 0 {
t.Fatal("No cookies")
}
wd.DeleteCookie(cookies[0].Name)
newCookies := wd.GetCookies()
if len(newCookies) != len(cookies)-1 {
t.Fatal("Cookie not deleted")
}
for _, c := range newCookies {
if c.Name == cookies[0].Name {
t.Fatal("Deleted cookie found")
}
}
}
func TestLocation(t *testing.T) {
wd := newRemote("TestLocation", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
button := wd.FindElement(ByID, "submit")
loc := button.Location()
if (loc.X == 0) || (loc.Y == 0) {
t.Fatalf("Bad location: %v\n", loc)
}
}
func TestLocationInView(t *testing.T) {
t.Parallel()
wd := newRemote("TestLocationInView", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
button := wd.FindElement(ByID, "submit")
loc := button.LocationInView()
if (loc.X == 0) || (loc.Y == 0) {
t.Fatalf("Bad location: %v\n", loc)
}
}
func TestSize(t *testing.T) {
t.Parallel()
wd := newRemote("TestSize", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
button := wd.FindElement(ByID, "submit")
size := button.Size()
if (size.Width == 0) || (size.Height == 0) {
t.Fatalf("Bad size: %v\n", size)
}
}
func TestExecuteScript(t *testing.T) {
t.Parallel()
wd := newRemote("TestExecuteScript", t).T(t)
defer wd.Quit()
script := "return arguments[0] + arguments[1]"
args := []interface{}{1, 2}
reply := wd.ExecuteScript(script, args)
result, ok := reply.(float64)
if !ok {
t.Fatal("Not an int reply")
}
if result != 3 {
t.Fatal("Bad result %d (expected 3)", result)
}
}
func TestScreenshot(t *testing.T) {
t.Parallel()
wd := newRemote("TestScreenshot", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
data := wd.Screenshot()
if len(data) == 0 {
t.Fatal("Empty reply")
}
}
func TestIsSelected(t *testing.T) {
t.Parallel()
wd := newRemote("TestIsSelected", t).T(t)
defer wd.Quit()
wd.Get(serverURL)
elem := wd.FindElement(ByID, "chuk")
selected := elem.IsSelected()
if selected {
t.Fatal("Already selected")
}
elem.Click()
selected = elem.IsSelected()
if !selected {
t.Fatal("Not selected")
}
}
// Test server
var homePage = `
<html>
<head>
<title>Go Selenium Test Suite</title>
</head>
<body>
The home page. <br />
<form action="/search">
<input name="q" /> <input type="submit" id="submit"/> <br />
<input id="chuk" type="checkbox" /> A checkbox.
</form>
<ol class="list">
<li>foo</li>
<li>bar</li>
</ol>
<ol class="otherlist">
<li>baz</li>
<li>qux</li>
</ol>
</body>
</html>
`
var otherPage = `
<html>
<head>
<title>Go Selenium Test Suite - Other Page</title>
</head>
<body>
The other page.
</body>
</html>
`
var searchPage = `
<html>
<head>
<title>Go Selenium Test Suite - Search Page</title>
</head>
<body>
You searched for "%s". I'll pretend I've found:
<p>
"The Go Programming Language"
</p>
</body>
</html>
`
var pages = map[string]string{
"/": homePage,
"/other": otherPage,
"/search": searchPage,
}
func handler(w http.ResponseWriter, r *http.Request) {
path := r.URL.Path
page, ok := pages[path]
if !ok {
http.NotFound(w, r)
return
}
if path == "/search" {
r.ParseForm()
page = fmt.Sprintf(page, r.Form["q"][0])
}
// Some cookies for the tests
for i := 0; i < 3; i++ {
name := fmt.Sprintf("cookie-%d", i)
value := fmt.Sprintf("value-%d", i)
http.SetCookie(w, &http.Cookie{Name: name, Value: value})
}
fmt.Fprintf(w, page)
}
var serverPort = ":4793"
var serverURL = "http://localhost" + serverPort + "/"
func init() {
go func() {
http.HandleFunc("/", handler)
http.ListenAndServe(serverPort, nil)
}()
}<|fim▁end|>
|
url1 := serverURL
wd.Get(url1)
|
<|file_name|>test_artificial_128_None_MovingMedian_12_12_100.py<|end_file_name|><|fim▁begin|>import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
<|fim▁hole|><|fim▁end|>
|
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 12, transform = "None", sigma = 0.0, exog_count = 100, ar_order = 12);
|
<|file_name|>win_find.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_find
version_added: "2.3"
short_description: Return a list of files based on specific criteria
description:
- Return a list of files based on specified criteria.
- Multiple criteria are AND'd together.
- For non-Windows targets, use the M(find) module instead.
options:
age:
description:
- Select files or folders whose age is equal to or greater than
the specified time.
- Use a negative age to find files equal to or less than
the specified time.
- You can choose seconds, minutes, hours, days or weeks
by specifying the first letter of an of
those words (e.g., "2s", "10d", 1w").
type: str
age_stamp:
description:
- Choose the file property against which we compare C(age).
- The default attribute we compare with is the last modification time.
type: str
choices: [ atime, ctime, mtime ]
default: mtime
checksum_algorithm:
description:
- Algorithm to determine the checksum of a file.
- Will throw an error if the host is unable to use specified algorithm.
type: str
choices: [ md5, sha1, sha256, sha384, sha512 ]
default: sha1
file_type:
description: Type of file to search for.
type: str
choices: [ directory, file ]
default: file
follow:
description:
- Set this to C(yes) to follow symlinks in the path.
- This needs to be used in conjunction with C(recurse).
type: bool
default: no
get_checksum:
description:
- Whether to return a checksum of the file in the return info (default sha1),
use C(checksum_algorithm) to change from the default.
type: bool
default: yes
hidden:
description: Set this to include hidden files or folders.
type: bool
default: no
paths:
description:
- List of paths of directories to search for files or folders in.
- This can be supplied as a single path or a list of paths.
type: list
required: yes
patterns:
description:
- One or more (powershell or regex) patterns to compare filenames with.
- The type of pattern matching is controlled by C(use_regex) option.
- The patterns retrict the list of files or folders to be returned based on the filenames.
- For a file to be matched it only has to match with one pattern in a list provided.
type: list
recurse:
description:
- Will recursively descend into the directory looking for files or folders.
type: bool
default: no
size:
description:
- Select files or folders whose size is equal to or greater than the specified size.
- Use a negative value to find files equal to or less than the specified size.
- You can specify the size with a suffix of the byte type i.e. kilo = k, mega = m...
- Size is not evaluated for symbolic links.
type: str
use_regex:
description:
- Will set patterns to run as a regex check if set to C(yes).
type: bool
default: no
author:
- Jordan Borean (@jborean93)
'''
EXAMPLES = r'''
- name: Find files in path
win_find:
paths: D:\Temp
- name: Find hidden files in path
win_find:
paths: D:\Temp
hidden: yes
- name: Find files in multiple paths
win_find:
paths:
- C:\Temp
- D:\Temp
- name: Find files in directory while searching recursively
win_find:
paths: D:\Temp
recurse: yes
- name: Find files in directory while following symlinks
win_find:
paths: D:\Temp
recurse: yes
follow: yes
- name: Find files with .log and .out extension using powershell wildcards
win_find:
paths: D:\Temp
patterns: [ '*.log', '*.out' ]
- name: Find files in path based on regex pattern
win_find:
paths: D:\Temp
patterns: out_\d{8}-\d{6}.log
- name: Find files older than 1 day
win_find:
paths: D:\Temp
age: 86400
- name: Find files older than 1 day based on create time
win_find:
paths: D:\Temp
age: 86400
age_stamp: ctime
- name: Find files older than 1 day with unit syntax
win_find:
paths: D:\Temp
age: 1d
- name: Find files newer than 1 hour
win_find:
paths: D:\Temp
age: -3600
- name: Find files newer than 1 hour with unit syntax
win_find:
paths: D:\Temp
age: -1h
- name: Find files larger than 1MB
win_find:
paths: D:\Temp
size: 1048576
- name: Find files larger than 1GB with unit syntax
win_find:
paths: D:\Temp
size: 1g
- name: Find files smaller than 1MB
win_find:
paths: D:\Temp
size: -1048576
- name: Find files smaller than 1GB with unit syntax
win_find:
paths: D:\Temp
size: -1g
- name: Find folders/symlinks in multiple paths
win_find:
paths:
- C:\Temp
- D:\Temp
file_type: directory
- name: Find files and return SHA256 checksum of files found
win_find:<|fim▁hole|>- name: Find files and do not return the checksum
win_find:
paths: C:\Temp
get_checksum: no
'''
RETURN = r'''
examined:
description: The number of files/folders that was checked.
returned: always
type: int
sample: 10
matched:
description: The number of files/folders that match the criteria.
returned: always
type: int
sample: 2
files:
description: Information on the files/folders that match the criteria returned as a list of dictionary elements for each file matched.
returned: success
type: complex
contains:
attributes:
description: attributes of the file at path in raw form.
returned: success, path exists
type: str
sample: "Archive, Hidden"
checksum:
description: The checksum of a file based on checksum_algorithm specified.
returned: success, path exists, path is a file, get_checksum == True
type: str
sample: 09cb79e8fc7453c84a07f644e441fd81623b7f98
creationtime:
description: The create time of the file represented in seconds since epoch.
returned: success, path exists
type: float
sample: 1477984205.15
extension:
description: The extension of the file at path.
returned: success, path exists, path is a file
type: str
sample: ".ps1"
isarchive:
description: If the path is ready for archiving or not.
returned: success, path exists
type: bool
sample: true
isdir:
description: If the path is a directory or not.
returned: success, path exists
type: bool
sample: true
ishidden:
description: If the path is hidden or not.
returned: success, path exists
type: bool
sample: true
islnk:
description: If the path is a symbolic link or junction or not.
returned: success, path exists
type: bool
sample: true
isreadonly:
description: If the path is read only or not.
returned: success, path exists
type: bool
sample: true
isshared:
description: If the path is shared or not.
returned: success, path exists
type: bool
sample: true
lastaccesstime:
description: The last access time of the file represented in seconds since epoch.
returned: success, path exists
type: float
sample: 1477984205.15
lastwritetime:
description: The last modification time of the file represented in seconds since epoch.
returned: success, path exists
type: float
sample: 1477984205.15
lnk_source:
description: The target of the symbolic link, will return null if not a link or the link is broken.
return: success, path exists, path is a symbolic link
type: str
sample: C:\temp
owner:
description: The owner of the file.
returned: success, path exists
type: str
sample: BUILTIN\Administrators
path:
description: The full absolute path to the file.
returned: success, path exists
type: str
sample: BUILTIN\Administrators
sharename:
description: The name of share if folder is shared.
returned: success, path exists, path is a directory and isshared == True
type: str
sample: file-share
size:
description: The size in bytes of a file or folder.
returned: success, path exists, path is not a link
type: int
sample: 1024
'''<|fim▁end|>
|
paths: C:\Temp
get_checksum: yes
checksum_algorithm: sha256
|
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use libc;
use option::{Some, None};
use cell::Cell;
use clone::Clone;
use container::Container;
use iter::{Iterator, range};
use super::io::net::ip::{SocketAddr, Ipv4Addr, Ipv6Addr};
use vec::{OwnedVector, MutableVector, ImmutableVector};
use path::GenericPath;
use rt::sched::Scheduler;
use unstable::{run_in_bare_thread};
use rt::thread::Thread;
use rt::task::Task;
use rt::uv::uvio::UvEventLoop;
use rt::work_queue::WorkQueue;
use rt::sleeper_list::SleeperList;
use rt::comm::oneshot;
use result::{Result, Ok, Err};
pub fn new_test_uv_sched() -> Scheduler {
let queue = WorkQueue::new();
let queues = ~[queue.clone()];
let mut sched = Scheduler::new(~UvEventLoop::new(),
queue,
queues,
SleeperList::new());
// Don't wait for the Shutdown message
sched.no_sleep = true;
return sched;
}
pub fn run_in_newsched_task(f: ~fn()) {
let f = Cell::new(f);
do run_in_bare_thread {
run_in_newsched_task_core(f.take());
}
}
pub fn run_in_newsched_task_core(f: ~fn()) {
use rt::sched::Shutdown;
let mut sched = ~new_test_uv_sched();
let exit_handle = Cell::new(sched.make_handle());
let on_exit: ~fn(bool) = |exit_status| {
exit_handle.take().send(Shutdown);
rtassert!(exit_status);
};
let mut task = ~Task::new_root(&mut sched.stack_pool, None, f);
task.death.on_exit = Some(on_exit);
sched.bootstrap(task);
}
#[cfg(target_os="macos")]
#[allow(non_camel_case_types)]
mod darwin_fd_limit {
/*!
* darwin_fd_limit exists to work around an issue where launchctl on Mac OS X defaults the
* rlimit maxfiles to 256/unlimited. The default soft limit of 256 ends up being far too low
* for our multithreaded scheduler testing, depending on the number of cores available.
*
* This fixes issue #7772.
*/
use libc;
type rlim_t = libc::uint64_t;
struct rlimit {
rlim_cur: rlim_t,
rlim_max: rlim_t
}
#[nolink]
extern {
// name probably doesn't need to be mut, but the C function doesn't specify const
fn sysctl(name: *mut libc::c_int, namelen: libc::c_uint,
oldp: *mut libc::c_void, oldlenp: *mut libc::size_t,
newp: *mut libc::c_void, newlen: libc::size_t) -> libc::c_int;
fn getrlimit(resource: libc::c_int, rlp: *mut rlimit) -> libc::c_int;
fn setrlimit(resource: libc::c_int, rlp: *rlimit) -> libc::c_int;
}
static CTL_KERN: libc::c_int = 1;
static KERN_MAXFILESPERPROC: libc::c_int = 29;
static RLIMIT_NOFILE: libc::c_int = 8;
pub unsafe fn raise_fd_limit() {
#[fixed_stack_segment]; #[inline(never)];
// The strategy here is to fetch the current resource limits, read the kern.maxfilesperproc
// sysctl value, and bump the soft resource limit for maxfiles up to the sysctl value.
use ptr::{to_unsafe_ptr, to_mut_unsafe_ptr, mut_null};
use sys::size_of_val;
use os::last_os_error;
// Fetch the kern.maxfilesperproc value
let mut mib: [libc::c_int, ..2] = [CTL_KERN, KERN_MAXFILESPERPROC];
let mut maxfiles: libc::c_int = 0;
let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t;
if sysctl(to_mut_unsafe_ptr(&mut mib[0]), 2,
to_mut_unsafe_ptr(&mut maxfiles) as *mut libc::c_void,
to_mut_unsafe_ptr(&mut size),
mut_null(), 0) != 0 {
let err = last_os_error();
error2!("raise_fd_limit: error calling sysctl: {}", err);
return;
}
// Fetch the current resource limits
let mut rlim = rlimit{rlim_cur: 0, rlim_max: 0};
if getrlimit(RLIMIT_NOFILE, to_mut_unsafe_ptr(&mut rlim)) != 0 {
let err = last_os_error();
error2!("raise_fd_limit: error calling getrlimit: {}", err);
return;
}
// Bump the soft limit to the smaller of kern.maxfilesperproc and the hard limit
rlim.rlim_cur = ::cmp::min(maxfiles as rlim_t, rlim.rlim_max);
// Set our newly-increased resource limit
if setrlimit(RLIMIT_NOFILE, to_unsafe_ptr(&rlim)) != 0 {
let err = last_os_error();
error2!("raise_fd_limit: error calling setrlimit: {}", err);
return;
}
}
}
#[cfg(not(target_os="macos"))]
mod darwin_fd_limit {
pub unsafe fn raise_fd_limit() {}
}
#[doc(hidden)]
pub fn prepare_for_lots_of_tests() {
// Bump the fd limit on OS X. See darwin_fd_limit for an explanation.
unsafe { darwin_fd_limit::raise_fd_limit() }
}
/// Create more than one scheduler and run a function in a task
/// in one of the schedulers. The schedulers will stay alive
/// until the function `f` returns.
pub fn run_in_mt_newsched_task(f: ~fn()) {
use os;
use from_str::FromStr;
use rt::sched::Shutdown;
use rt::util;
// see comment in other function (raising fd limits)
prepare_for_lots_of_tests();
let f = Cell::new(f);
do run_in_bare_thread {
let nthreads = match os::getenv("RUST_RT_TEST_THREADS") {
Some(nstr) => FromStr::from_str(nstr).unwrap(),
None => {
if util::limit_thread_creation_due_to_osx_and_valgrind() {
1
} else {
// Using more threads than cores in test code
// to force the OS to preempt them frequently.
// Assuming that this help stress test concurrent types.
util::num_cpus() * 2
}
}
};
let sleepers = SleeperList::new();
let mut handles = ~[];
let mut scheds = ~[];
let mut work_queues = ~[];
for _ in range(0u, nthreads) {
let work_queue = WorkQueue::new();
work_queues.push(work_queue);
}
for i in range(0u, nthreads) {
let loop_ = ~UvEventLoop::new();
let mut sched = ~Scheduler::new(loop_,
work_queues[i].clone(),
work_queues.clone(),
sleepers.clone());
let handle = sched.make_handle();
handles.push(handle);
scheds.push(sched);
}
let handles = Cell::new(handles);
let on_exit: ~fn(bool) = |exit_status| {
let mut handles = handles.take();
// Tell schedulers to exit
for handle in handles.mut_iter() {
handle.send(Shutdown);
}
rtassert!(exit_status);
};
let mut main_task = ~Task::new_root(&mut scheds[0].stack_pool, None, f.take());
main_task.death.on_exit = Some(on_exit);
let mut threads = ~[];
let main_task = Cell::new(main_task);
let main_thread = {
let sched = scheds.pop();
let sched_cell = Cell::new(sched);
do Thread::start {
let sched = sched_cell.take();
sched.bootstrap(main_task.take());
}
};
threads.push(main_thread);
while !scheds.is_empty() {
let mut sched = scheds.pop();
let bootstrap_task = ~do Task::new_root(&mut sched.stack_pool, None) || {
rtdebug!("bootstrapping non-primary scheduler");
};
let bootstrap_task_cell = Cell::new(bootstrap_task);
let sched_cell = Cell::new(sched);
let thread = do Thread::start {
let sched = sched_cell.take();
sched.bootstrap(bootstrap_task_cell.take());
};
threads.push(thread);
}
// Wait for schedulers
for thread in threads.move_iter() {
thread.join();
}
}
}
/// Test tasks will abort on failure instead of unwinding
pub fn spawntask(f: ~fn()) {
Scheduler::run_task(Task::build_child(None, f));
}
/// Create a new task and run it right now. Aborts on failure
pub fn spawntask_later(f: ~fn()) {
Scheduler::run_task_later(Task::build_child(None, f));
}
pub fn spawntask_random(f: ~fn()) {
use rand::{Rand, rng};
let mut rng = rng();
let run_now: bool = Rand::rand(&mut rng);
if run_now {
spawntask(f)
} else {
spawntask_later(f)
}
}
pub fn spawntask_try(f: ~fn()) -> Result<(),()> {
let (port, chan) = oneshot();
let chan = Cell::new(chan);
let on_exit: ~fn(bool) = |exit_status| chan.take().send(exit_status);
let mut new_task = Task::build_root(None, f);
new_task.death.on_exit = Some(on_exit);
Scheduler::run_task(new_task);
let exit_status = port.recv();
if exit_status { Ok(()) } else { Err(()) }
}
/// Spawn a new task in a new scheduler and return a thread handle.
pub fn spawntask_thread(f: ~fn()) -> Thread {
let f = Cell::new(f);
let thread = do Thread::start {
run_in_newsched_task_core(f.take());
};
return thread;
}
/// Get a ~Task for testing purposes other than actually scheduling it.
pub fn with_test_task(blk: ~fn(~Task) -> ~Task) {
do run_in_bare_thread {
let mut sched = ~new_test_uv_sched();
let task = blk(~Task::new_root(&mut sched.stack_pool, None, ||{}));
cleanup_task(task);
}
}
/// Use to cleanup tasks created for testing but not "run".
pub fn cleanup_task(mut task: ~Task) {
task.destroyed = true;
}
/// Get a port number, starting at 9600, for use in tests
#[fixed_stack_segment] #[inline(never)]
pub fn next_test_port() -> u16 {
unsafe {
return rust_dbg_next_port(base_port() as libc::uintptr_t) as u16;
}
extern {
fn rust_dbg_next_port(base: libc::uintptr_t) -> libc::uintptr_t;
}
}
/// Get a unique IPv4 localhost:port pair starting at 9600
pub fn next_test_ip4() -> SocketAddr {
SocketAddr { ip: Ipv4Addr(127, 0, 0, 1), port: next_test_port() }
}
/// Get a unique IPv6 localhost:port pair starting at 9600
pub fn next_test_ip6() -> SocketAddr {
SocketAddr { ip: Ipv6Addr(0, 0, 0, 0, 0, 0, 0, 1), port: next_test_port() }
}
/*
XXX: Welcome to MegaHack City.
The bots run multiple builds at the same time, and these builds
all want to use ports. This function figures out which workspace
it is running in and assigns a port range based on it.
*/
fn base_port() -> uint {
use os;
use str::StrSlice;
use vec::ImmutableVector;
let base = 9600u;
let range = 1000;
let bases = [
("32-opt", base + range * 1),
("32-noopt", base + range * 2),
("64-opt", base + range * 3),
("64-noopt", base + range * 4),
("64-opt-vg", base + range * 5),
("all-opt", base + range * 6),
("snap3", base + range * 7),<|fim▁hole|> ("dist", base + range * 8)
];
// FIXME (#9639): This needs to handle non-utf8 paths
let path = os::getcwd();
let path_s = path.as_str().unwrap();
let mut final_base = base;
for &(dir, base) in bases.iter() {
if path_s.contains(dir) {
final_base = base;
break;
}
}
return final_base;
}
/// Get a constant that represents the number of times to repeat
/// stress tests. Default 1.
pub fn stress_factor() -> uint {
use os::getenv;
use from_str::from_str;
match getenv("RUST_RT_STRESS") {
Some(val) => from_str::<uint>(val).unwrap(),
None => 1
}
}<|fim▁end|>
| |
<|file_name|>gecko.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// `data` comes from components/style/properties.mako.rs; see build.rs for more details.
<%!
from data import to_rust_ident, to_camel_case, to_camel_case_lower
from data import Keyword
%>
<%namespace name="helpers" file="/helpers.mako.rs" />
use app_units::Au;
use custom_properties::CustomPropertiesMap;
use gecko_bindings::bindings;
% for style_struct in data.style_structs:
use gecko_bindings::structs::${style_struct.gecko_ffi_name};
use gecko_bindings::bindings::Gecko_Construct_Default_${style_struct.gecko_ffi_name};
use gecko_bindings::bindings::Gecko_CopyConstruct_${style_struct.gecko_ffi_name};
use gecko_bindings::bindings::Gecko_Destroy_${style_struct.gecko_ffi_name};
% endfor
use gecko_bindings::bindings::Gecko_CopyCounterStyle;
use gecko_bindings::bindings::Gecko_CopyCursorArrayFrom;
use gecko_bindings::bindings::Gecko_CopyFontFamilyFrom;
use gecko_bindings::bindings::Gecko_CopyImageValueFrom;
use gecko_bindings::bindings::Gecko_CopyListStyleImageFrom;
use gecko_bindings::bindings::Gecko_EnsureImageLayersLength;
use gecko_bindings::bindings::Gecko_FontFamilyList_AppendGeneric;
use gecko_bindings::bindings::Gecko_FontFamilyList_AppendNamed;
use gecko_bindings::bindings::Gecko_FontFamilyList_Clear;
use gecko_bindings::bindings::Gecko_SetCursorArrayLength;
use gecko_bindings::bindings::Gecko_SetCursorImageValue;
use gecko_bindings::bindings::Gecko_StyleTransition_SetUnsupportedProperty;
use gecko_bindings::bindings::Gecko_NewCSSShadowArray;
use gecko_bindings::bindings::Gecko_nsStyleFont_SetLang;
use gecko_bindings::bindings::Gecko_nsStyleFont_CopyLangFrom;
use gecko_bindings::bindings::Gecko_SetListStyleImageNone;
use gecko_bindings::bindings::Gecko_SetListStyleImageImageValue;
use gecko_bindings::bindings::Gecko_SetNullImageValue;
use gecko_bindings::bindings::{Gecko_ResetFilters, Gecko_CopyFiltersFrom};
use gecko_bindings::bindings::RawGeckoPresContextBorrowed;
use gecko_bindings::structs;
use gecko_bindings::structs::nsCSSPropertyID;
use gecko_bindings::structs::mozilla::CSSPseudoElementType;
use gecko_bindings::structs::mozilla::CSSPseudoElementType_InheritingAnonBox;
use gecko_bindings::structs::root::NS_STYLE_CONTEXT_TYPE_SHIFT;
use gecko_bindings::sugar::ns_style_coord::{CoordDataValue, CoordData, CoordDataMut};
use gecko::values::convert_nscolor_to_rgba;
use gecko::values::convert_rgba_to_nscolor;
use gecko::values::GeckoStyleCoordConvertible;
use gecko::values::round_border_to_device_pixels;
use logical_geometry::WritingMode;
use media_queries::Device;
use properties::animated_properties::TransitionProperty;
use properties::computed_value_flags::*;
use properties::{default_font_size_keyword, longhands, FontComputationData, Importance, LonghandId};
use properties::{PropertyDeclaration, PropertyDeclarationBlock, PropertyDeclarationId};
use rule_tree::StrongRuleNode;
use selector_parser::PseudoElement;
use servo_arc::{Arc, RawOffsetArc};
use std::mem::{forget, uninitialized, transmute, zeroed};
use std::{cmp, ops, ptr};
use values::{self, Auto, CustomIdent, Either, KeyframesName, None_};
use values::computed::{NonNegativeLength, ToComputedValue, Percentage};
use values::computed::effects::{BoxShadow, Filter, SimpleShadow};
use computed_values::border_style;
pub mod style_structs {
% for style_struct in data.style_structs:
pub use super::${style_struct.gecko_struct_name} as ${style_struct.name};
% endfor
}
/// FIXME(emilio): This is completely duplicated with the other properties code.
pub type ComputedValuesInner = ::gecko_bindings::structs::ServoComputedData;
#[derive(Debug)]
#[repr(C)]
pub struct ComputedValues(::gecko_bindings::structs::mozilla::ServoStyleContext);
impl ComputedValues {
pub fn new(
device: &Device,
parent: Option<<&ComputedValues>,
pseudo: Option<<&PseudoElement>,
custom_properties: Option<Arc<CustomPropertiesMap>>,
writing_mode: WritingMode,
font_size_keyword: FontComputationData,
flags: ComputedValueFlags,
rules: Option<StrongRuleNode>,
visited_style: Option<Arc<ComputedValues>>,
% for style_struct in data.style_structs:
${style_struct.ident}: Arc<style_structs::${style_struct.name}>,
% endfor
) -> Arc<Self> {
ComputedValuesInner::new(
custom_properties,
writing_mode,
font_size_keyword,
flags,
rules,
visited_style,
% for style_struct in data.style_structs:
${style_struct.ident},
% endfor
).to_outer(
device.pres_context(),
parent,
pseudo.map(|p| p.pseudo_info())
)
}
pub fn default_values(pres_context: RawGeckoPresContextBorrowed) -> Arc<Self> {
ComputedValuesInner::new(
/* custom_properties = */ None,
/* writing_mode = */ WritingMode::empty(), // FIXME(bz): This seems dubious
default_font_size_keyword(),
ComputedValueFlags::empty(),
/* rules = */ None,
/* visited_style = */ None,
% for style_struct in data.style_structs:
style_structs::${style_struct.name}::default(pres_context),
% endfor
).to_outer(pres_context, None, None)
}
pub fn pseudo(&self) -> Option<PseudoElement> {
use string_cache::Atom;
let atom = (self.0)._base.mPseudoTag.raw::<structs::nsIAtom>();
if atom.is_null() {
return None;
}
let atom = Atom::from(atom);
PseudoElement::from_atom(&atom)
}
fn get_pseudo_type(&self) -> CSSPseudoElementType {
let bits = (self.0)._base.mBits;
let our_type = bits >> NS_STYLE_CONTEXT_TYPE_SHIFT;
unsafe { transmute(our_type as u8) }
}
pub fn is_anon_box(&self) -> bool {
let our_type = self.get_pseudo_type();
return our_type == CSSPseudoElementType_InheritingAnonBox ||
our_type == CSSPseudoElementType::NonInheritingAnonBox;
}
}
impl Drop for ComputedValues {
fn drop(&mut self) {
unsafe {
bindings::Gecko_ServoStyleContext_Destroy(&mut self.0);
}
}
}
unsafe impl Sync for ComputedValues {}
unsafe impl Send for ComputedValues {}
impl Clone for ComputedValues {
fn clone(&self) -> Self {
unreachable!()
}
}
impl Clone for ComputedValuesInner {
fn clone(&self) -> Self {
ComputedValuesInner {
% for style_struct in data.style_structs:
${style_struct.gecko_name}: self.${style_struct.gecko_name}.clone(),
% endfor
custom_properties: self.custom_properties.clone(),
writing_mode: self.writing_mode.clone(),
font_computation_data: self.font_computation_data.clone(),
flags: self.flags.clone(),
rules: self.rules.clone(),
visited_style: self.visited_style.clone(),
}
}
}
type PseudoInfo = (*mut structs::nsIAtom, structs::CSSPseudoElementType);
type ParentStyleContextInfo<'a> = Option< &'a ComputedValues>;
impl ComputedValuesInner {
pub fn new(custom_properties: Option<Arc<CustomPropertiesMap>>,
writing_mode: WritingMode,
font_size_keyword: FontComputationData,
flags: ComputedValueFlags,
rules: Option<StrongRuleNode>,
visited_style: Option<Arc<ComputedValues>>,
% for style_struct in data.style_structs:
${style_struct.ident}: Arc<style_structs::${style_struct.name}>,
% endfor
) -> Self {
ComputedValuesInner {
custom_properties: custom_properties,
writing_mode: writing_mode,
font_computation_data: font_size_keyword,
rules: rules,
visited_style: visited_style.map(|x| Arc::into_raw_offset(x)),
flags: flags,
% for style_struct in data.style_structs:
${style_struct.gecko_name}: Arc::into_raw_offset(${style_struct.ident}),
% endfor
}
}
fn to_outer(
self,
pres_context: RawGeckoPresContextBorrowed,
parent: ParentStyleContextInfo,
info: Option<PseudoInfo>
) -> Arc<ComputedValues> {
let (tag, ty) = if let Some(info) = info {
info
} else {
(ptr::null_mut(), structs::CSSPseudoElementType::NotPseudo)
};
unsafe { self.to_outer_helper(pres_context, parent, ty, tag) }
}
unsafe fn to_outer_helper(
self,
pres_context: bindings::RawGeckoPresContextBorrowed,
parent: ParentStyleContextInfo,
pseudo_ty: structs::CSSPseudoElementType,
pseudo_tag: *mut structs::nsIAtom
) -> Arc<ComputedValues> {
let arc = unsafe {
let arc: Arc<ComputedValues> = Arc::new(uninitialized());
bindings::Gecko_ServoStyleContext_Init(&arc.0 as *const _ as *mut _,
parent, pres_context,
&self, pseudo_ty, pseudo_tag);
// We're simulating a move by having C++ do a memcpy and then forgetting
// it on this end.
forget(self);
arc
};
arc
}
}
impl ops::Deref for ComputedValues {
type Target = ComputedValuesInner;
fn deref(&self) -> &ComputedValuesInner {
&self.0.mSource
}
}
impl ops::DerefMut for ComputedValues {
fn deref_mut(&mut self) -> &mut ComputedValuesInner {
&mut self.0.mSource
}
}
impl ComputedValuesInner {
/// Whether we're a visited style.
pub fn is_style_if_visited(&self) -> bool {
self.flags.contains(IS_STYLE_IF_VISITED)
}
#[inline]
pub fn is_display_contents(&self) -> bool {
self.get_box().clone_display() == longhands::display::computed_value::T::contents
}
/// Returns true if the value of the `content` property would make a
/// pseudo-element not rendered.
#[inline]
pub fn ineffective_content_property(&self) -> bool {
self.get_counters().ineffective_content_property()
}
% for style_struct in data.style_structs:
#[inline]
pub fn clone_${style_struct.name_lower}(&self) -> Arc<style_structs::${style_struct.name}> {
Arc::from_raw_offset(self.${style_struct.gecko_name}.clone())
}
#[inline]
pub fn get_${style_struct.name_lower}(&self) -> &style_structs::${style_struct.name} {
&self.${style_struct.gecko_name}
}
pub fn ${style_struct.name_lower}_arc(&self) -> &RawOffsetArc<style_structs::${style_struct.name}> {
&self.${style_struct.gecko_name}
}
#[inline]
pub fn mutate_${style_struct.name_lower}(&mut self) -> &mut style_structs::${style_struct.name} {
RawOffsetArc::make_mut(&mut self.${style_struct.gecko_name})
}
% endfor
/// Gets a reference to the rule node. Panic if no rule node exists.
pub fn rules(&self) -> &StrongRuleNode {
self.rules.as_ref().unwrap()
}
/// Whether there is a visited style.
pub fn has_visited_style(&self) -> bool {
self.visited_style.is_some()
}
/// Gets a reference to the visited style, if any.
pub fn get_visited_style(&self) -> Option< & ComputedValues> {
self.visited_style.as_ref().map(|x| &**x)
}
/// Gets the raw visited style. Useful for memory reporting.
pub fn get_raw_visited_style(&self) -> &Option<RawOffsetArc<ComputedValues>> {
&self.visited_style
}
/// Gets a reference to the visited style. Panic if no visited style exists.
pub fn visited_style(&self) -> &ComputedValues {
self.get_visited_style().unwrap()
}
/// Clone the visited style. Used for inheriting parent styles in
/// StyleBuilder::for_inheritance.
pub fn clone_visited_style(&self) -> Option<Arc<ComputedValues>> {
self.visited_style.as_ref().map(|x| x.clone_arc())
}
/// Gets a reference to the custom properties map (if one exists).
pub fn get_custom_properties(&self) -> Option<<&::custom_properties::CustomPropertiesMap> {
self.custom_properties.as_ref().map(|x| &**x)
}
pub fn custom_properties(&self) -> Option<Arc<CustomPropertiesMap>> {
self.custom_properties.clone()
}
#[allow(non_snake_case)]
pub fn has_moz_binding(&self) -> bool {
!self.get_box().gecko.mBinding.mPtr.mRawPtr.is_null()
}
// FIXME(bholley): Implement this properly.
#[inline]
pub fn is_multicol(&self) -> bool { false }
pub fn to_declaration_block(&self, property: PropertyDeclarationId) -> PropertyDeclarationBlock {
let value = match property {
% for prop in data.longhands:
% if prop.animatable:
PropertyDeclarationId::Longhand(LonghandId::${prop.camel_case}) => {
PropertyDeclaration::${prop.camel_case}(
% if prop.boxed:
Box::new(
% endif
longhands::${prop.ident}::SpecifiedValue::from_computed_value(
&self.get_${prop.style_struct.ident.strip("_")}().clone_${prop.ident}())
% if prop.boxed:
)
% endif
)
},
% endif
% endfor
PropertyDeclarationId::Custom(_name) => unimplemented!(),
_ => unimplemented!()
};
PropertyDeclarationBlock::with_one(value, Importance::Normal)
}
}
<%def name="declare_style_struct(style_struct)">
pub use ::gecko_bindings::structs::mozilla::Gecko${style_struct.gecko_name} as ${style_struct.gecko_struct_name};
impl ${style_struct.gecko_struct_name} {
pub fn gecko(&self) -> &${style_struct.gecko_ffi_name} {
&self.gecko
}
pub fn gecko_mut(&mut self) -> &mut ${style_struct.gecko_ffi_name} {
&mut self.gecko
}
}
</%def>
<%def name="impl_simple_setter(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
${set_gecko_property(gecko_ffi_name, "From::from(v)")}
}
</%def>
<%def name="impl_simple_clone(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
From::from(self.gecko.${gecko_ffi_name})
}
</%def>
<%def name="impl_simple_copy(ident, gecko_ffi_name, on_set=None, *kwargs)">
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
self.gecko.${gecko_ffi_name} = other.gecko.${gecko_ffi_name};
% if on_set:
self.${on_set}();
% endif
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
</%def>
<%def name="impl_coord_copy(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
self.gecko.${gecko_ffi_name}.copy_from(&other.gecko.${gecko_ffi_name});
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
</%def>
<%!
def get_gecko_property(ffi_name, self_param = "self"):
if "mBorderColor" in ffi_name:
return ffi_name.replace("mBorderColor",
"unsafe { *%s.gecko.__bindgen_anon_1.mBorderColor.as_ref() }"
% self_param)
return "%s.gecko.%s" % (self_param, ffi_name)
def set_gecko_property(ffi_name, expr):
if "mBorderColor" in ffi_name:
ffi_name = ffi_name.replace("mBorderColor",
"*self.gecko.__bindgen_anon_1.mBorderColor.as_mut()")
return "unsafe { %s = %s };" % (ffi_name, expr)
return "self.gecko.%s = %s;" % (ffi_name, expr)
%>
<%def name="impl_keyword_setter(ident, gecko_ffi_name, keyword, cast_type='u8', on_set=None)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
use properties::longhands::${ident}::computed_value::T as Keyword;
// FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts
let result = match v {
% for value in keyword.values_for('gecko'):
Keyword::${to_rust_ident(value)} =>
structs::${keyword.gecko_constant(value)} ${keyword.maybe_cast(cast_type)},
% endfor
};
${set_gecko_property(gecko_ffi_name, "result")}
% if on_set:
self.${on_set}();
% endif
}
</%def>
<%def name="impl_keyword_clone(ident, gecko_ffi_name, keyword, cast_type='u8')">
// FIXME: We introduced non_upper_case_globals for -moz-appearance only
// since the prefix of Gecko value starts with ThemeWidgetType_NS_THEME.
// We should remove this after fix bug 1371809.
#[allow(non_snake_case, non_upper_case_globals)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use properties::longhands::${ident}::computed_value::T as Keyword;
// FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts
// Some constant macros in the gecko are defined as negative integer(e.g. font-stretch).
// And they are convert to signed integer in Rust bindings. We need to cast then
// as signed type when we have both signed/unsigned integer in order to use them
// as match's arms.
// Also, to use same implementation here we use casted constant if we have only singed values.
% if keyword.gecko_enum_prefix is None:
% for value in keyword.values_for('gecko'):
const ${keyword.casted_constant_name(value, cast_type)} : ${cast_type} =
structs::${keyword.gecko_constant(value)} as ${cast_type};
% endfor
match ${get_gecko_property(gecko_ffi_name)} as ${cast_type} {
% for value in keyword.values_for('gecko'):
${keyword.casted_constant_name(value, cast_type)} => Keyword::${to_rust_ident(value)},
% endfor
% if keyword.gecko_inexhaustive:
x => panic!("Found unexpected value in style struct for ${ident} property: {:?}", x),
% endif
}
% else:
match ${get_gecko_property(gecko_ffi_name)} {
% for value in keyword.values_for('gecko'):
structs::${keyword.gecko_constant(value)} => Keyword::${to_rust_ident(value)},
% endfor
% if keyword.gecko_inexhaustive:
x => panic!("Found unexpected value in style struct for ${ident} property: {:?}", x),
% endif
}
% endif
}
</%def>
<%def name="impl_color_setter(ident, gecko_ffi_name)">
#[allow(unreachable_code)]
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
${set_gecko_property(gecko_ffi_name, "v.into()")}
}
</%def>
<%def name="impl_color_copy(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
let color = ${get_gecko_property(gecko_ffi_name, self_param = "other")};
${set_gecko_property(gecko_ffi_name, "color")};
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
</%def>
<%def name="impl_color_clone(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
${get_gecko_property(gecko_ffi_name)}.into()
}
</%def>
<%def name="impl_keyword(ident, gecko_ffi_name, keyword, cast_type='u8', **kwargs)">
<%call expr="impl_keyword_setter(ident, gecko_ffi_name, keyword, cast_type, **kwargs)"></%call>
<%call expr="impl_simple_copy(ident, gecko_ffi_name, **kwargs)"></%call>
<%call expr="impl_keyword_clone(ident, gecko_ffi_name, keyword, cast_type)"></%call>
</%def>
<%def name="impl_simple(ident, gecko_ffi_name)">
<%call expr="impl_simple_setter(ident, gecko_ffi_name)"></%call>
<%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call>
<%call expr="impl_simple_clone(ident, gecko_ffi_name)"></%call>
</%def>
<%def name="impl_absolute_length(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
${set_gecko_property(gecko_ffi_name, "v.to_i32_au()")}
}
<%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call>
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
Au(self.gecko.${gecko_ffi_name}).into()
}
</%def>
<%def name="impl_position(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
${set_gecko_property("%s.mXPosition" % gecko_ffi_name, "v.horizontal.into()")}
${set_gecko_property("%s.mYPosition" % gecko_ffi_name, "v.vertical.into()")}
}
<%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call>
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
longhands::${ident}::computed_value::T {
horizontal: self.gecko.${gecko_ffi_name}.mXPosition.into(),
vertical: self.gecko.${gecko_ffi_name}.mYPosition.into(),
}
}
</%def>
<%def name="impl_color(ident, gecko_ffi_name)">
<%call expr="impl_color_setter(ident, gecko_ffi_name)"></%call>
<%call expr="impl_color_copy(ident, gecko_ffi_name)"></%call>
<%call expr="impl_color_clone(ident, gecko_ffi_name)"></%call>
</%def>
<%def name="impl_rgba_color(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
${set_gecko_property(gecko_ffi_name, "convert_rgba_to_nscolor(&v)")}
}
<%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call>
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
convert_nscolor_to_rgba(${get_gecko_property(gecko_ffi_name)})
}
</%def>
<%def name="impl_svg_length(ident, gecko_ffi_name)">
// When context-value is used on an SVG length, the corresponding flag is
// set on mContextFlags, and the length field is set to the initial value.
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
use values::generics::svg::{SVGLength, SvgLengthOrPercentageOrNumber};
use gecko_bindings::structs::nsStyleSVG_${ident.upper()}_CONTEXT as CONTEXT_VALUE;
let length = match v {
SVGLength::Length(length) => {
self.gecko.mContextFlags &= !CONTEXT_VALUE;
length
}
SVGLength::ContextValue => {
self.gecko.mContextFlags |= CONTEXT_VALUE;
match longhands::${ident}::get_initial_value() {
SVGLength::Length(length) => length,
_ => unreachable!("Initial value should not be context-value"),
}
}
};
match length {
SvgLengthOrPercentageOrNumber::LengthOrPercentage(lop) =>
self.gecko.${gecko_ffi_name}.set(lop),
SvgLengthOrPercentageOrNumber::Number(num) =>
self.gecko.${gecko_ffi_name}.set_value(CoordDataValue::Factor(num.into())),
}
}
pub fn copy_${ident}_from(&mut self, other: &Self) {
use gecko_bindings::structs::nsStyleSVG_${ident.upper()}_CONTEXT as CONTEXT_VALUE;
self.gecko.${gecko_ffi_name}.copy_from(&other.gecko.${gecko_ffi_name});
self.gecko.mContextFlags =
(self.gecko.mContextFlags & !CONTEXT_VALUE) |
(other.gecko.mContextFlags & CONTEXT_VALUE);
}
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use values::generics::svg::{SVGLength, SvgLengthOrPercentageOrNumber};
use values::computed::LengthOrPercentage;
use gecko_bindings::structs::nsStyleSVG_${ident.upper()}_CONTEXT as CONTEXT_VALUE;
if (self.gecko.mContextFlags & CONTEXT_VALUE) != 0 {
return SVGLength::ContextValue;
}
let length = match self.gecko.${gecko_ffi_name}.as_value() {
CoordDataValue::Factor(number) =>
SvgLengthOrPercentageOrNumber::Number(number),
CoordDataValue::Coord(coord) =>
SvgLengthOrPercentageOrNumber::LengthOrPercentage(
LengthOrPercentage::Length(Au(coord).into())),
CoordDataValue::Percent(p) =>
SvgLengthOrPercentageOrNumber::LengthOrPercentage(
LengthOrPercentage::Percentage(Percentage(p))),
CoordDataValue::Calc(calc) =>
SvgLengthOrPercentageOrNumber::LengthOrPercentage(
LengthOrPercentage::Calc(calc.into())),
_ => unreachable!("Unexpected coordinate {:?} in ${ident}",
self.gecko.${gecko_ffi_name}.as_value()),
};
SVGLength::Length(length.into())
}
</%def>
<%def name="impl_svg_opacity(ident, gecko_ffi_name)">
<% source_prefix = ident.split("_")[0].upper() + "_OPACITY_SOURCE" %>
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
use gecko_bindings::structs::nsStyleSVG_${source_prefix}_MASK as MASK;
use gecko_bindings::structs::nsStyleSVG_${source_prefix}_SHIFT as SHIFT;
use gecko_bindings::structs::nsStyleSVGOpacitySource::*;
use values::generics::svg::SVGOpacity;
self.gecko.mContextFlags &= !MASK;
match v {
SVGOpacity::Opacity(opacity) => {
self.gecko.mContextFlags |=
(eStyleSVGOpacitySource_Normal as u8) << SHIFT;
self.gecko.${gecko_ffi_name} = opacity;
}
SVGOpacity::ContextFillOpacity => {
self.gecko.mContextFlags |=
(eStyleSVGOpacitySource_ContextFillOpacity as u8) << SHIFT;
self.gecko.${gecko_ffi_name} = 1.;
}
SVGOpacity::ContextStrokeOpacity => {
self.gecko.mContextFlags |=
(eStyleSVGOpacitySource_ContextStrokeOpacity as u8) << SHIFT;
self.gecko.${gecko_ffi_name} = 1.;
}
}
}
pub fn copy_${ident}_from(&mut self, other: &Self) {
use gecko_bindings::structs::nsStyleSVG_${source_prefix}_MASK as MASK;
self.gecko.${gecko_ffi_name} = other.gecko.${gecko_ffi_name};
self.gecko.mContextFlags =
(self.gecko.mContextFlags & !MASK) |
(other.gecko.mContextFlags & MASK);
}
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use gecko_bindings::structs::nsStyleSVG_${source_prefix}_MASK as MASK;
use gecko_bindings::structs::nsStyleSVG_${source_prefix}_SHIFT as SHIFT;
use gecko_bindings::structs::nsStyleSVGOpacitySource::*;
use values::generics::svg::SVGOpacity;
let source = (self.gecko.mContextFlags & MASK) >> SHIFT;
if source == eStyleSVGOpacitySource_Normal as u8 {
return SVGOpacity::Opacity(self.gecko.${gecko_ffi_name});
} else {
debug_assert_eq!(self.gecko.${gecko_ffi_name}, 1.0);
if source == eStyleSVGOpacitySource_ContextFillOpacity as u8 {
SVGOpacity::ContextFillOpacity
} else {
debug_assert_eq!(source, eStyleSVGOpacitySource_ContextStrokeOpacity as u8);
SVGOpacity::ContextStrokeOpacity
}
}
}
</%def>
<%def name="impl_svg_paint(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, mut v: longhands::${ident}::computed_value::T) {
use values::generics::svg::SVGPaintKind;
use self::structs::nsStyleSVGPaintType;
use self::structs::nsStyleSVGFallbackType;
let ref mut paint = ${get_gecko_property(gecko_ffi_name)};
unsafe {
bindings::Gecko_nsStyleSVGPaint_Reset(paint);
}
let fallback = v.fallback.take();
match v.kind {
SVGPaintKind::None => return,
SVGPaintKind::ContextFill => {
paint.mType = nsStyleSVGPaintType::eStyleSVGPaintType_ContextFill;
}
SVGPaintKind::ContextStroke => {
paint.mType = nsStyleSVGPaintType::eStyleSVGPaintType_ContextStroke;
}
SVGPaintKind::PaintServer(url) => {
unsafe {
bindings::Gecko_nsStyleSVGPaint_SetURLValue(paint, url.for_ffi());
}
}
SVGPaintKind::Color(color) => {
paint.mType = nsStyleSVGPaintType::eStyleSVGPaintType_Color;
unsafe {
*paint.mPaint.mColor.as_mut() = convert_rgba_to_nscolor(&color);
}
}
}
paint.mFallbackType = match fallback {
Some(Either::First(color)) => {
paint.mFallbackColor = convert_rgba_to_nscolor(&color);
nsStyleSVGFallbackType::eStyleSVGFallbackType_Color
},
Some(Either::Second(_)) => {
nsStyleSVGFallbackType::eStyleSVGFallbackType_None
},
None => nsStyleSVGFallbackType::eStyleSVGFallbackType_NotSet
};
}
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
unsafe {
bindings::Gecko_nsStyleSVGPaint_CopyFrom(
&mut ${get_gecko_property(gecko_ffi_name)},
& ${get_gecko_property(gecko_ffi_name, "other")}
);
}
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use values::generics::svg::{SVGPaint, SVGPaintKind};
use values::specified::url::SpecifiedUrl;
use self::structs::nsStyleSVGPaintType;
use self::structs::nsStyleSVGFallbackType;
let ref paint = ${get_gecko_property(gecko_ffi_name)};
let fallback = match paint.mFallbackType {
nsStyleSVGFallbackType::eStyleSVGFallbackType_Color => {
Some(Either::First(convert_nscolor_to_rgba(paint.mFallbackColor)))
},
nsStyleSVGFallbackType::eStyleSVGFallbackType_None => {
Some(Either::Second(None_))
},
nsStyleSVGFallbackType::eStyleSVGFallbackType_NotSet => None,
};
let kind = match paint.mType {
nsStyleSVGPaintType::eStyleSVGPaintType_None => SVGPaintKind::None,
nsStyleSVGPaintType::eStyleSVGPaintType_ContextFill => SVGPaintKind::ContextFill,
nsStyleSVGPaintType::eStyleSVGPaintType_ContextStroke => SVGPaintKind::ContextStroke,
nsStyleSVGPaintType::eStyleSVGPaintType_Server => {
unsafe {
SVGPaintKind::PaintServer(
SpecifiedUrl::from_url_value_data(
&(**paint.mPaint.mPaintServer.as_ref())._base
).unwrap()
)
}
}
nsStyleSVGPaintType::eStyleSVGPaintType_Color => {
unsafe { SVGPaintKind::Color(convert_nscolor_to_rgba(*paint.mPaint.mColor.as_ref())) }
}
};
SVGPaint {
kind: kind,
fallback: fallback,
}
}
</%def>
<%def name="impl_non_negative_length(ident, gecko_ffi_name, inherit_from=None,
round_to_pixels=False)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
let value = {
% if round_to_pixels:
let au_per_device_px = Au(self.gecko.mTwipsPerPixel);
round_border_to_device_pixels(Au::from(v), au_per_device_px).0
% else:
v.0.to_i32_au()
% endif
};
% if inherit_from:
self.gecko.${inherit_from} = value;
% endif
self.gecko.${gecko_ffi_name} = value;
}
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
% if inherit_from:
self.gecko.${inherit_from} = other.gecko.${inherit_from};
// NOTE: This is needed to easily handle the `unset` and `initial`
// keywords, which are implemented calling this function.
//
// In practice, this means that we may have an incorrect value here, but
// we'll adjust that properly in the style fixup phase.
//
// FIXME(emilio): We could clean this up a bit special-casing the reset_
// function below.
self.gecko.${gecko_ffi_name} = other.gecko.${inherit_from};
% else:
self.gecko.${gecko_ffi_name} = other.gecko.${gecko_ffi_name};
% endif
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
Au(self.gecko.${gecko_ffi_name}).into()
}
</%def>
<%def name="impl_split_style_coord(ident, gecko_ffi_name, index)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
v.to_gecko_style_coord(&mut self.gecko.${gecko_ffi_name}.data_at_mut(${index}));
}
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
self.gecko.${gecko_ffi_name}.data_at_mut(${index}).copy_from(&other.gecko.${gecko_ffi_name}.data_at(${index}));
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use properties::longhands::${ident}::computed_value::T;
T::from_gecko_style_coord(&self.gecko.${gecko_ffi_name}.data_at(${index}))
.expect("clone for ${ident} failed")
}
</%def>
<%def name="impl_style_coord(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
v.to_gecko_style_coord(&mut self.gecko.${gecko_ffi_name});
}
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
self.gecko.${gecko_ffi_name}.copy_from(&other.gecko.${gecko_ffi_name});
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use properties::longhands::${ident}::computed_value::T;
T::from_gecko_style_coord(&self.gecko.${gecko_ffi_name})
.expect("clone for ${ident} failed")
}
</%def>
<%def name="impl_style_sides(ident)">
<% gecko_ffi_name = "m" + to_camel_case(ident) %>
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
v.to_gecko_rect(&mut self.gecko.${gecko_ffi_name});
}
<%self:copy_sides_style_coord ident="${ident}"></%self:copy_sides_style_coord>
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
longhands::${ident}::computed_value::T::from_gecko_rect(&self.gecko.${gecko_ffi_name})
.expect("clone for ${ident} failed")
}
</%def>
<%def name="copy_sides_style_coord(ident)">
<% gecko_ffi_name = "m" + to_camel_case(ident) %>
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
% for side in SIDES:
self.gecko.${gecko_ffi_name}.data_at_mut(${side.index})
.copy_from(&other.gecko.${gecko_ffi_name}.data_at(${side.index}));
% endfor
${ caller.body() }
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
</%def>
<%def name="impl_corner_style_coord(ident, gecko_ffi_name, x_index, y_index)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
v.0.width().to_gecko_style_coord(&mut self.gecko.${gecko_ffi_name}.data_at_mut(${x_index}));
v.0.height().to_gecko_style_coord(&mut self.gecko.${gecko_ffi_name}.data_at_mut(${y_index}));
}
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
self.gecko.${gecko_ffi_name}.data_at_mut(${x_index})
.copy_from(&other.gecko.${gecko_ffi_name}.data_at(${x_index}));
self.gecko.${gecko_ffi_name}.data_at_mut(${y_index})
.copy_from(&other.gecko.${gecko_ffi_name}.data_at(${y_index}));
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use values::computed::border::BorderCornerRadius;
let width = GeckoStyleCoordConvertible::from_gecko_style_coord(
&self.gecko.${gecko_ffi_name}.data_at(${x_index}))
.expect("Failed to clone ${ident}");
let height = GeckoStyleCoordConvertible::from_gecko_style_coord(
&self.gecko.${gecko_ffi_name}.data_at(${y_index}))
.expect("Failed to clone ${ident}");
BorderCornerRadius::new(width, height)
}
</%def>
<%def name="impl_css_url(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
use gecko_bindings::sugar::refptr::RefPtr;
match v {
Either::First(url) => {
let refptr = unsafe {
let ptr = bindings::Gecko_NewURLValue(url.for_ffi());
if ptr.is_null() {
self.gecko.${gecko_ffi_name}.clear();
return;
}
RefPtr::from_addrefed(ptr)
};
self.gecko.${gecko_ffi_name}.set_move(refptr)
}
Either::Second(_none) => {
unsafe {
self.gecko.${gecko_ffi_name}.clear();
}
}
}
}
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
unsafe {
self.gecko.${gecko_ffi_name}.set(&other.gecko.${gecko_ffi_name});
}
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use values::specified::url::SpecifiedUrl;
use values::None_;
if self.gecko.${gecko_ffi_name}.mRawPtr.is_null() {
Either::Second(None_)
} else {
unsafe {
let ref gecko_url_value = *self.gecko.${gecko_ffi_name}.mRawPtr;
Either::First(SpecifiedUrl::from_url_value_data(&gecko_url_value._base)
.expect("${gecko_ffi_name} could not convert to SpecifiedUrl"))
}
}
}
</%def>
<%def name="impl_logical(name, **kwargs)">
${helpers.logical_setter(name)}
</%def>
<%def name="impl_style_struct(style_struct)">
impl ${style_struct.gecko_struct_name} {
#[allow(dead_code, unused_variables)]
pub fn default(pres_context: RawGeckoPresContextBorrowed) -> Arc<Self> {
let mut result = Arc::new(${style_struct.gecko_struct_name} { gecko: unsafe { zeroed() } });
unsafe {
Gecko_Construct_Default_${style_struct.gecko_ffi_name}(&mut Arc::get_mut(&mut result).unwrap().gecko,
pres_context);
}
result
}
pub fn get_gecko(&self) -> &${style_struct.gecko_ffi_name} {
&self.gecko
}
}
impl Drop for ${style_struct.gecko_struct_name} {
fn drop(&mut self) {
unsafe {
Gecko_Destroy_${style_struct.gecko_ffi_name}(&mut self.gecko);
}
}
}
impl Clone for ${style_struct.gecko_struct_name} {
fn clone(&self) -> Self {
unsafe {
let mut result = ${style_struct.gecko_struct_name} { gecko: zeroed() };
Gecko_CopyConstruct_${style_struct.gecko_ffi_name}(&mut result.gecko, &self.gecko);
result
}
}
}
</%def>
<%def name="impl_simple_type_with_conversion(ident, gecko_ffi_name=None)">
<%
if gecko_ffi_name is None:
gecko_ffi_name = "m" + to_camel_case(ident)
%>
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
self.gecko.${gecko_ffi_name} = From::from(v)
}
<% impl_simple_copy(ident, gecko_ffi_name) %>
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
From::from(self.gecko.${gecko_ffi_name})
}
</%def>
<%def name="impl_font_settings(ident, tag_type)">
<%
gecko_ffi_name = to_camel_case_lower(ident)
%>
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
use values::generics::FontSettings;
let current_settings = &mut self.gecko.mFont.${gecko_ffi_name};
current_settings.clear_pod();
match v {
FontSettings::Normal => (), // do nothing, length is already 0
FontSettings::Tag(other_settings) => {
unsafe { current_settings.set_len_pod(other_settings.len() as u32) };
for (current, other) in current_settings.iter_mut().zip(other_settings) {
current.mTag = other.tag;
current.mValue = other.value.0;
}
}
};
}
pub fn copy_${ident}_from(&mut self, other: &Self) {
let current_settings = &mut self.gecko.mFont.${gecko_ffi_name};
let other_settings = &other.gecko.mFont.${gecko_ffi_name};
let settings_length = other_settings.len() as u32;
current_settings.clear_pod();
unsafe { current_settings.set_len_pod(settings_length) };
for (current, other) in current_settings.iter_mut().zip(other_settings.iter()) {
current.mTag = other.mTag;
current.mValue = other.mValue;
}
}
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use values::generics::{FontSettings, FontSettingTag, ${tag_type}} ;
if self.gecko.mFont.${gecko_ffi_name}.len() == 0 {
FontSettings::Normal
} else {
FontSettings::Tag(
self.gecko.mFont.${gecko_ffi_name}.iter().map(|gecko_font_setting| {
FontSettingTag {
tag: gecko_font_setting.mTag,
value: ${tag_type}(gecko_font_setting.mValue),
}
}).collect()
)
}
}
</%def>
<%def name="impl_trait(style_struct_name, skip_longhands='')">
<%
style_struct = next(x for x in data.style_structs if x.name == style_struct_name)
longhands = [x for x in style_struct.longhands
if not (skip_longhands == "*" or x.name in skip_longhands.split())]
# Types used with predefined_type()-defined properties that we can auto-generate.
predefined_types = {
"length::LengthOrAuto": impl_style_coord,
"length::LengthOrNormal": impl_style_coord,
"length::NonNegativeLengthOrAuto": impl_style_coord,
"length::NonNegativeLengthOrNormal": impl_style_coord,
"GreaterThanOrEqualToOneNumber": impl_simple,
"Length": impl_absolute_length,
"Position": impl_position,
"LengthOrPercentage": impl_style_coord,
"LengthOrPercentageOrAuto": impl_style_coord,
"LengthOrPercentageOrNone": impl_style_coord,
"LengthOrNone": impl_style_coord,
"LengthOrNormal": impl_style_coord,
"MaxLength": impl_style_coord,
"MozLength": impl_style_coord,
"NonNegativeLengthOrPercentage": impl_style_coord,
"NonNegativeNumber": impl_simple,
"Number": impl_simple,
"Integer": impl_simple,
"Opacity": impl_simple,
"Color": impl_color,
"RGBAColor": impl_rgba_color,
"SVGLength": impl_svg_length,
"SVGOpacity": impl_svg_opacity,
"SVGPaint": impl_svg_paint,
"SVGWidth": impl_svg_length,
"UrlOrNone": impl_css_url,
}
def longhand_method(longhand):
args = dict(ident=longhand.ident, gecko_ffi_name=longhand.gecko_ffi_name)
# get the method and pass additional keyword or type-specific arguments
if longhand.logical:
method = impl_logical
args.update(name=longhand.name)
elif longhand.keyword:
method = impl_keyword
args.update(keyword=longhand.keyword)
if "font" in longhand.ident:
args.update(cast_type=longhand.cast_type)
else:
method = predefined_types[longhand.predefined_type]
method(**args)
picked_longhands = []
for x in longhands:
if x.keyword or x.predefined_type in predefined_types or x.logical:
picked_longhands.append(x)
%>
impl ${style_struct.gecko_struct_name} {
/*
* Manually-Implemented Methods.
*/
${caller.body().strip()}
/*
* Auto-Generated Methods.
*/
<%
for longhand in picked_longhands:
longhand_method(longhand)
%>
}
</%def>
<%!
class Side(object):
def __init__(self, name, index):
self.name = name
self.ident = name.lower()
self.index = index
class Corner(object):
def __init__(self, vert, horiz, index):
self.x_name = "HalfCorner::eCorner" + vert + horiz + "X"
self.y_name = "HalfCorner::eCorner" + vert + horiz + "Y"
self.ident = (vert + "_" + horiz).lower()
self.x_index = 2 * index
self.y_index = 2 * index + 1
class GridLine(object):
def __init__(self, name):
self.ident = "grid-" + name.lower()
self.name = self.ident.replace('-', '_')
self.gecko = "m" + to_camel_case(self.ident)
SIDES = [Side("Top", 0), Side("Right", 1), Side("Bottom", 2), Side("Left", 3)]
CORNERS = [Corner("Top", "Left", 0), Corner("Top", "Right", 1),
Corner("Bottom", "Right", 2), Corner("Bottom", "Left", 3)]
GRID_LINES = map(GridLine, ["row-start", "row-end", "column-start", "column-end"])
%>
#[allow(dead_code)]
fn static_assert() {
unsafe {
% for corner in CORNERS:
transmute::<_, [u32; ${corner.x_index}]>([1; structs::${corner.x_name} as usize]);
transmute::<_, [u32; ${corner.y_index}]>([1; structs::${corner.y_name} as usize]);
% endfor
}
// Note: using the above technique with an enum hits a rust bug when |structs| is in a different crate.
% for side in SIDES:
{ const DETAIL: u32 = [0][(structs::Side::eSide${side.name} as usize != ${side.index}) as usize]; let _ = DETAIL; }
% endfor
}
<% border_style_keyword = Keyword("border-style",
"none solid double dotted dashed hidden groove ridge inset outset") %>
<% skip_border_longhands = " ".join(["border-{0}-{1}".format(x.ident, y)
for x in SIDES
for y in ["color", "style", "width"]] +
["border-{0}-radius".format(x.ident.replace("_", "-"))
for x in CORNERS]) %>
<% skip_moz_border_color_longhands = " ".join("-moz-border-{0}-colors".format(x.ident)
for x in SIDES) %>
<%self:impl_trait style_struct_name="Border"
skip_longhands="${skip_border_longhands} border-image-source border-image-outset
border-image-repeat border-image-width border-image-slice
${skip_moz_border_color_longhands}">
fn set_moz_border_colors(&mut self, side: structs::Side, v: Option<Vec<::cssparser::RGBA>>) {
match v {
None => {
let ptr = self.gecko.mBorderColors.mPtr;
if let Some(colors) = unsafe { ptr.as_mut() } {
unsafe { colors.mColors[side as usize].clear() };
}
}
Some(ref colors) => {
unsafe { bindings::Gecko_EnsureMozBorderColors(&mut self.gecko) };
let border_colors = unsafe { self.gecko.mBorderColors.mPtr.as_mut().unwrap() };
let dest_colors = &mut border_colors.mColors[side as usize];
unsafe { dest_colors.set_len_pod(colors.len() as u32) };
for (dst, src) in dest_colors.iter_mut().zip(colors.into_iter()) {
*dst = convert_rgba_to_nscolor(src);
}
}
}
}
fn clone_moz_border_colors(&self, side: structs::Side) -> Option<Vec<::cssparser::RGBA>> {
unsafe { self.gecko.mBorderColors.mPtr.as_ref() }.map(|colors| {
colors.mColors[side as usize].iter()
.map(|color| convert_nscolor_to_rgba(*color))
.collect()
})
}
% for side in SIDES:
<% impl_keyword("border_%s_style" % side.ident,
"mBorderStyle[%s]" % side.index,
border_style_keyword,
on_set="update_border_%s" % side.ident) %>
// This is needed because the initial mComputedBorder value is set to zero.
//
// In order to compute stuff, we start from the initial struct, and keep
// going down the tree applying properties.
//
// That means, effectively, that when we set border-style to something
// non-hidden, we should use the initial border instead.
//
// Servo stores the initial border-width in the initial struct, and then
// adjusts as needed in the fixup phase. This means that the initial struct
// is technically not valid without fixups, and that you lose pretty much
// any sharing of the initial struct, which is kind of unfortunate.
//
// Gecko has two fields for this, one that stores the "specified" border,
// and other that stores the actual computed one. That means that when we
// set border-style, border-width may change and we need to sync back to the
// specified one. This is what this function does.
//
// Note that this doesn't impose any dependency in the order of computation
// of the properties. This is only relevant if border-style is specified,
// but border-width isn't. If border-width is specified at some point, the
// two mBorder and mComputedBorder fields would be the same already.
//
// Once we're here, we know that we'll run style fixups, so it's fine to
// just copy the specified border here, we'll adjust it if it's incorrect
// later.
fn update_border_${side.ident}(&mut self) {
self.gecko.mComputedBorder.${side.ident} = self.gecko.mBorder.${side.ident};
}
<% impl_color("border_%s_color" % side.ident, "(mBorderColor)[%s]" % side.index) %>
<% impl_non_negative_length("border_%s_width" % side.ident,
"mComputedBorder.%s" % side.ident,
inherit_from="mBorder.%s" % side.ident,
round_to_pixels=True) %>
pub fn border_${side.ident}_has_nonzero_width(&self) -> bool {
self.gecko.mComputedBorder.${side.ident} != 0
}
#[allow(non_snake_case)]
pub fn set__moz_border_${side.ident}_colors(&mut self,
v: longhands::_moz_border_${side.ident}_colors::computed_value::T) {
self.set_moz_border_colors(structs::Side::eSide${to_camel_case(side.ident)}, v.0);
}
#[allow(non_snake_case)]
pub fn copy__moz_border_${side.ident}_colors_from(&mut self, other: &Self) {
unsafe {
bindings::Gecko_CopyMozBorderColors(&mut self.gecko, &other.gecko,
structs::Side::eSide${to_camel_case(side.ident)});
}
}
#[allow(non_snake_case)]
pub fn reset__moz_border_${side.ident}_colors(&mut self, other: &Self) {
self.copy__moz_border_${side.ident}_colors_from(other)
}
#[allow(non_snake_case)]
pub fn clone__moz_border_${side.ident}_colors(&self)
-> longhands::_moz_border_${side.ident}_colors::computed_value::T {
use self::longhands::_moz_border_${side.ident}_colors::computed_value::T;
T(self.clone_moz_border_colors(structs::Side::eSide${to_camel_case(side.ident)}))
}
% endfor
% for corner in CORNERS:
<% impl_corner_style_coord("border_%s_radius" % corner.ident,
"mBorderRadius",
corner.x_index,
corner.y_index) %>
% endfor
pub fn set_border_image_source(&mut self, image: longhands::border_image_source::computed_value::T) {
unsafe {
// Prevent leaking of the last elements we did set
Gecko_SetNullImageValue(&mut self.gecko.mBorderImageSource);
}
if let Either::Second(image) = image {
self.gecko.mBorderImageSource.set(image);
}
}
pub fn copy_border_image_source_from(&mut self, other: &Self) {
unsafe {
Gecko_CopyImageValueFrom(&mut self.gecko.mBorderImageSource,
&other.gecko.mBorderImageSource);
}
}
pub fn reset_border_image_source(&mut self, other: &Self) {
self.copy_border_image_source_from(other)
}
pub fn clone_border_image_source(&self) -> longhands::border_image_source::computed_value::T {
use values::None_;
match unsafe { self.gecko.mBorderImageSource.into_image() } {
Some(image) => Either::Second(image),
None => Either::First(None_),
}
}
<% impl_style_sides("border_image_outset") %>
<%
border_image_repeat_keywords = ["Stretch", "Repeat", "Round", "Space"]
%>
pub fn set_border_image_repeat(&mut self, v: longhands::border_image_repeat::computed_value::T) {
use properties::longhands::border_image_repeat::computed_value::RepeatKeyword;
use gecko_bindings::structs;
% for i, side in enumerate(["H", "V"]):
let k = match v.${i} {
% for keyword in border_image_repeat_keywords:
RepeatKeyword::${keyword} => structs::NS_STYLE_BORDER_IMAGE_REPEAT_${keyword.upper()},
% endfor
};
self.gecko.mBorderImageRepeat${side} = k as u8;
% endfor
}
pub fn copy_border_image_repeat_from(&mut self, other: &Self) {
self.gecko.mBorderImageRepeatH = other.gecko.mBorderImageRepeatH;
self.gecko.mBorderImageRepeatV = other.gecko.mBorderImageRepeatV;
}
pub fn reset_border_image_repeat(&mut self, other: &Self) {
self.copy_border_image_repeat_from(other)
}
pub fn clone_border_image_repeat(&self) -> longhands::border_image_repeat::computed_value::T {
use properties::longhands::border_image_repeat::computed_value::RepeatKeyword;
use gecko_bindings::structs;
% for side in ["H", "V"]:
let servo_${side.lower()} = match self.gecko.mBorderImageRepeat${side} as u32 {
% for keyword in border_image_repeat_keywords:
structs::NS_STYLE_BORDER_IMAGE_REPEAT_${keyword.upper()} => RepeatKeyword::${keyword},
% endfor
x => panic!("Found unexpected value in mBorderImageRepeat${side}: {:?}", x),
};
% endfor
longhands::border_image_repeat::computed_value::T(servo_h, servo_v)
}
<% impl_style_sides("border_image_width") %>
pub fn set_border_image_slice(&mut self, v: longhands::border_image_slice::computed_value::T) {
use gecko_bindings::structs::{NS_STYLE_BORDER_IMAGE_SLICE_NOFILL, NS_STYLE_BORDER_IMAGE_SLICE_FILL};
v.offsets.to_gecko_rect(&mut self.gecko.mBorderImageSlice);
let fill = if v.fill {
NS_STYLE_BORDER_IMAGE_SLICE_FILL
} else {
NS_STYLE_BORDER_IMAGE_SLICE_NOFILL
};
self.gecko.mBorderImageFill = fill as u8;
}
<%self:copy_sides_style_coord ident="border_image_slice">
self.gecko.mBorderImageFill = other.gecko.mBorderImageFill;
</%self:copy_sides_style_coord>
pub fn clone_border_image_slice(&self) -> longhands::border_image_slice::computed_value::T {
use gecko_bindings::structs::NS_STYLE_BORDER_IMAGE_SLICE_FILL;
use values::computed::{BorderImageSlice, NumberOrPercentage};
type NumberOrPercentageRect = ::values::generics::rect::Rect<NumberOrPercentage>;
BorderImageSlice {
offsets:
NumberOrPercentageRect::from_gecko_rect(&self.gecko.mBorderImageSlice)
.expect("mBorderImageSlice[${side}] could not convert to NumberOrPercentageRect"),
fill: self.gecko.mBorderImageFill as u32 == NS_STYLE_BORDER_IMAGE_SLICE_FILL
}
}
</%self:impl_trait>
<% skip_margin_longhands = " ".join(["margin-%s" % x.ident for x in SIDES]) %>
<%self:impl_trait style_struct_name="Margin"
skip_longhands="${skip_margin_longhands}">
% for side in SIDES:
<% impl_split_style_coord("margin_%s" % side.ident,
"mMargin",
side.index) %>
% endfor
</%self:impl_trait>
<% skip_padding_longhands = " ".join(["padding-%s" % x.ident for x in SIDES]) %>
<%self:impl_trait style_struct_name="Padding"
skip_longhands="${skip_padding_longhands}">
% for side in SIDES:
<% impl_split_style_coord("padding_%s" % side.ident,
"mPadding",
side.index) %>
% endfor
</%self:impl_trait>
<% skip_position_longhands = " ".join(x.ident for x in SIDES + GRID_LINES) %>
<%self:impl_trait style_struct_name="Position"
skip_longhands="${skip_position_longhands} z-index order align-content
justify-content align-self justify-self align-items
justify-items grid-auto-rows grid-auto-columns grid-auto-flow
grid-template-areas grid-template-rows grid-template-columns">
% for side in SIDES:
<% impl_split_style_coord("%s" % side.ident,
"mOffset",
side.index) %>
% endfor
pub fn set_z_index(&mut self, v: longhands::z_index::computed_value::T) {
match v {
Either::First(n) => self.gecko.mZIndex.set_value(CoordDataValue::Integer(n)),
Either::Second(Auto) => self.gecko.mZIndex.set_value(CoordDataValue::Auto),
}
}
pub fn copy_z_index_from(&mut self, other: &Self) {
use gecko_bindings::structs::nsStyleUnit;
// z-index is never a calc(). If it were, we'd be leaking here, so
// assert that it isn't.
debug_assert!(self.gecko.mZIndex.unit() != nsStyleUnit::eStyleUnit_Calc);
unsafe {
self.gecko.mZIndex.copy_from_unchecked(&other.gecko.mZIndex);
}
}
pub fn reset_z_index(&mut self, other: &Self) {
self.copy_z_index_from(other)
}
pub fn clone_z_index(&self) -> longhands::z_index::computed_value::T {
return match self.gecko.mZIndex.as_value() {
CoordDataValue::Integer(n) => Either::First(n),
CoordDataValue::Auto => Either::Second(Auto),
_ => {
debug_assert!(false);
Either::First(0)
}
}
}
% for kind in ["align", "justify"]:
${impl_simple_type_with_conversion(kind + "_content")}
${impl_simple_type_with_conversion(kind + "_self")}
% endfor
${impl_simple_type_with_conversion("align_items")}
pub fn set_justify_items(&mut self, v: longhands::justify_items::computed_value::T) {
self.gecko.mSpecifiedJustifyItems = v.specified.into();
self.set_computed_justify_items(v.computed);
}
pub fn set_computed_justify_items(&mut self, v: values::specified::JustifyItems) {
debug_assert!(v.0 != ::values::specified::align::ALIGN_AUTO);
self.gecko.mJustifyItems = v.into();
}
pub fn reset_justify_items(&mut self, reset_style: &Self) {
self.gecko.mJustifyItems = reset_style.gecko.mJustifyItems;
self.gecko.mSpecifiedJustifyItems = reset_style.gecko.mSpecifiedJustifyItems;
}
pub fn copy_justify_items_from(&mut self, other: &Self) {
self.gecko.mJustifyItems = other.gecko.mJustifyItems;
self.gecko.mSpecifiedJustifyItems = other.gecko.mJustifyItems;
}
pub fn clone_justify_items(&self) -> longhands::justify_items::computed_value::T {
longhands::justify_items::computed_value::T {
computed: self.gecko.mJustifyItems.into(),
specified: self.gecko.mSpecifiedJustifyItems.into(),
}
}
pub fn set_order(&mut self, v: longhands::order::computed_value::T) {
self.gecko.mOrder = v;
}
pub fn clone_order(&self) -> longhands::order::computed_value::T {
self.gecko.mOrder
}
${impl_simple_copy('order', 'mOrder')}
% for value in GRID_LINES:
pub fn set_${value.name}(&mut self, v: longhands::${value.name}::computed_value::T) {
use gecko_bindings::structs::{nsStyleGridLine_kMinLine, nsStyleGridLine_kMaxLine};
let ident = v.ident.as_ref().map_or(&[] as &[_], |ident| ident.0.as_slice());
self.gecko.${value.gecko}.mLineName.assign(ident);
self.gecko.${value.gecko}.mHasSpan = v.is_span;
if let Some(integer) = v.line_num {
// clamping the integer between a range
self.gecko.${value.gecko}.mInteger = cmp::max(nsStyleGridLine_kMinLine,
cmp::min(integer, nsStyleGridLine_kMaxLine));
}
}
pub fn copy_${value.name}_from(&mut self, other: &Self) {
self.gecko.${value.gecko}.mHasSpan = other.gecko.${value.gecko}.mHasSpan;
self.gecko.${value.gecko}.mInteger = other.gecko.${value.gecko}.mInteger;
self.gecko.${value.gecko}.mLineName.assign(&*other.gecko.${value.gecko}.mLineName);
}
pub fn reset_${value.name}(&mut self, other: &Self) {
self.copy_${value.name}_from(other)
}
pub fn clone_${value.name}(&self) -> longhands::${value.name}::computed_value::T {
use gecko_bindings::structs::{nsStyleGridLine_kMinLine, nsStyleGridLine_kMaxLine};
use string_cache::Atom;
longhands::${value.name}::computed_value::T {
is_span: self.gecko.${value.gecko}.mHasSpan,
ident: {
let name = self.gecko.${value.gecko}.mLineName.to_string();
if name.len() == 0 {
None
} else {
Some(CustomIdent(Atom::from(name)))
}
},
line_num:
if self.gecko.${value.gecko}.mInteger == 0 {
None
} else {
debug_assert!(nsStyleGridLine_kMinLine <= self.gecko.${value.gecko}.mInteger);
debug_assert!(self.gecko.${value.gecko}.mInteger <= nsStyleGridLine_kMaxLine);
Some(self.gecko.${value.gecko}.mInteger)
},
}
}
% endfor
% for kind in ["rows", "columns"]:
pub fn set_grid_auto_${kind}(&mut self, v: longhands::grid_auto_${kind}::computed_value::T) {
v.to_gecko_style_coords(&mut self.gecko.mGridAuto${kind.title()}Min,
&mut self.gecko.mGridAuto${kind.title()}Max)
}
pub fn copy_grid_auto_${kind}_from(&mut self, other: &Self) {
self.gecko.mGridAuto${kind.title()}Min.copy_from(&other.gecko.mGridAuto${kind.title()}Min);
self.gecko.mGridAuto${kind.title()}Max.copy_from(&other.gecko.mGridAuto${kind.title()}Max);
}
pub fn reset_grid_auto_${kind}(&mut self, other: &Self) {
self.copy_grid_auto_${kind}_from(other)
}
pub fn clone_grid_auto_${kind}(&self) -> longhands::grid_auto_${kind}::computed_value::T {
::values::generics::grid::TrackSize::from_gecko_style_coords(&self.gecko.mGridAuto${kind.title()}Min,
&self.gecko.mGridAuto${kind.title()}Max)
}
pub fn set_grid_template_${kind}(&mut self, v: longhands::grid_template_${kind}::computed_value::T) {
<% self_grid = "self.gecko.mGridTemplate%s" % kind.title() %>
use gecko_bindings::structs::{nsTArray, nsStyleGridLine_kMaxLine};
use nsstring::nsStringRepr;
use std::usize;
use values::CustomIdent;
use values::generics::grid::TrackListType::Auto;
use values::generics::grid::{GridTemplateComponent, RepeatCount};
#[inline]
fn set_line_names(servo_names: &[CustomIdent], gecko_names: &mut nsTArray<nsStringRepr>) {
unsafe {
bindings::Gecko_ResizeTArrayForStrings(gecko_names, servo_names.len() as u32);
}
for (servo_name, gecko_name) in servo_names.iter().zip(gecko_names.iter_mut()) {
gecko_name.assign(servo_name.0.as_slice());
}
}
let max_lines = nsStyleGridLine_kMaxLine as usize - 1; // for accounting the final <line-names>
let result = match v {
GridTemplateComponent::None => ptr::null_mut(),
GridTemplateComponent::TrackList(track) => {
let mut num_values = track.values.len();
if let Auto(_) = track.list_type {
num_values += 1;
}
num_values = cmp::min(num_values, max_lines);
let value = unsafe {
bindings::Gecko_CreateStyleGridTemplate(num_values as u32,
(num_values + 1) as u32).as_mut().unwrap()
};
let mut auto_idx = usize::MAX;
let mut auto_track_size = None;
if let Auto(idx) = track.list_type {
auto_idx = idx as usize;
let auto_repeat = track.auto_repeat.as_ref().expect("expected <auto-track-repeat> value");
if auto_repeat.count == RepeatCount::AutoFill {
value.set_mIsAutoFill(true);
}
value.mRepeatAutoIndex = idx as i16;
// NOTE: Gecko supports only one set of values in <auto-repeat>
// i.e., it can only take repeat(auto-fill, [a] 10px [b]), and no more.
set_line_names(&auto_repeat.line_names[0], &mut value.mRepeatAutoLineNameListBefore);
set_line_names(&auto_repeat.line_names[1], &mut value.mRepeatAutoLineNameListAfter);
auto_track_size = Some(auto_repeat.track_sizes.get(0).unwrap().clone());
} else {
unsafe {
bindings::Gecko_ResizeTArrayForStrings(
&mut value.mRepeatAutoLineNameListBefore, 0);
bindings::Gecko_ResizeTArrayForStrings(
&mut value.mRepeatAutoLineNameListAfter, 0);
}
}
let mut line_names = track.line_names.into_iter();
let mut values_iter = track.values.into_iter();
{
let min_max_iter = value.mMinTrackSizingFunctions.iter_mut()
.zip(value.mMaxTrackSizingFunctions.iter_mut());
for (i, (gecko_min, gecko_max)) in min_max_iter.enumerate().take(max_lines) {
let name_list = line_names.next().expect("expected line-names");
set_line_names(&name_list, &mut value.mLineNameLists[i]);
if i == auto_idx {
let track_size = auto_track_size.take()
.expect("expected <track-size> for <auto-track-repeat>");
track_size.to_gecko_style_coords(gecko_min, gecko_max);
continue
}
let track_size = values_iter.next().expect("expected <track-size> value");
track_size.to_gecko_style_coords(gecko_min, gecko_max);
}
}
let final_names = line_names.next().unwrap();
set_line_names(&final_names, value.mLineNameLists.last_mut().unwrap());
value
},
GridTemplateComponent::Subgrid(list) => {
let names_length = match list.fill_idx {
Some(_) => list.names.len() - 1,
None => list.names.len(),
};
let num_values = cmp::min(names_length, max_lines + 1);
let value = unsafe {
bindings::Gecko_CreateStyleGridTemplate(0, num_values as u32).as_mut().unwrap()
};
value.set_mIsSubgrid(true);
let mut names = list.names.into_vec();
if let Some(idx) = list.fill_idx {
value.set_mIsAutoFill(true);
value.mRepeatAutoIndex = idx as i16;
set_line_names(&names.swap_remove(idx as usize),
&mut value.mRepeatAutoLineNameListBefore);
}
for (servo_names, gecko_names) in names.iter().zip(value.mLineNameLists.iter_mut()) {
set_line_names(servo_names, gecko_names);
}
value
},
};
unsafe { bindings::Gecko_SetStyleGridTemplate(&mut ${self_grid}, result); }
}
pub fn copy_grid_template_${kind}_from(&mut self, other: &Self) {
unsafe {
bindings::Gecko_CopyStyleGridTemplateValues(&mut ${self_grid},
other.gecko.mGridTemplate${kind.title()}.mPtr);
}
}
pub fn reset_grid_template_${kind}(&mut self, other: &Self) {
self.copy_grid_template_${kind}_from(other)
}
pub fn clone_grid_template_${kind}(&self) -> longhands::grid_template_${kind}::computed_value::T {
<% self_grid = "self.gecko.mGridTemplate%s" % kind.title() %>
use Atom;
use gecko_bindings::structs::nsTArray;
use nsstring::nsStringRepr;
use values::CustomIdent;
use values::generics::grid::{GridTemplateComponent, LineNameList, RepeatCount};
use values::generics::grid::{TrackList, TrackListType, TrackListValue, TrackRepeat, TrackSize};
let value = match unsafe { ${self_grid}.mPtr.as_ref() } {
None => return GridTemplateComponent::None,
Some(value) => value,
};
#[inline]
fn to_boxed_customident_slice(gecko_names: &nsTArray<nsStringRepr>) -> Box<[CustomIdent]> {
let idents: Vec<CustomIdent> = gecko_names.iter().map(|gecko_name| {
CustomIdent(Atom::from(gecko_name.to_string()))
}).collect();
idents.into_boxed_slice()
}
#[inline]
fn to_line_names_vec(gecko_line_names: &nsTArray<nsTArray<nsStringRepr>>)
-> Vec<Box<[CustomIdent]>> {
gecko_line_names.iter().map(|gecko_names| {
to_boxed_customident_slice(gecko_names)
}).collect()
}
let repeat_auto_index = value.mRepeatAutoIndex as usize;
if value.mIsSubgrid() {
let mut names_vec = to_line_names_vec(&value.mLineNameLists);
let fill_idx = if value.mIsAutoFill() {
names_vec.insert(
repeat_auto_index,
to_boxed_customident_slice(&value.mRepeatAutoLineNameListBefore));
Some(repeat_auto_index as u32)
} else {
None
};
let names = names_vec.into_boxed_slice();
GridTemplateComponent::Subgrid(LineNameList{names, fill_idx})
} else {
let mut auto_repeat = None;
let mut list_type = TrackListType::Normal;
let line_names = to_line_names_vec(&value.mLineNameLists).into_boxed_slice();
let mut values = Vec::with_capacity(value.mMinTrackSizingFunctions.len());
let min_max_iter = value.mMinTrackSizingFunctions.iter()
.zip(value.mMaxTrackSizingFunctions.iter());
for (i, (gecko_min, gecko_max)) in min_max_iter.enumerate() {
let track_size = TrackSize::from_gecko_style_coords(gecko_min, gecko_max);
if i == repeat_auto_index {
list_type = TrackListType::Auto(repeat_auto_index as u16);
let count = if value.mIsAutoFill() {
RepeatCount::AutoFill
} else {
RepeatCount::AutoFit
};
let line_names = {
let mut vec: Vec<Box<[CustomIdent]>> = Vec::with_capacity(2);
vec.push(to_boxed_customident_slice(
&value.mRepeatAutoLineNameListBefore));
vec.push(to_boxed_customident_slice(
&value.mRepeatAutoLineNameListAfter));
vec.into_boxed_slice()
};
let track_sizes = vec!(track_size);
auto_repeat = Some(TrackRepeat{count, line_names, track_sizes});
} else {
values.push(TrackListValue::TrackSize(track_size));
}
}
GridTemplateComponent::TrackList(TrackList{list_type, values, line_names, auto_repeat})
}
}
% endfor
${impl_simple_type_with_conversion("grid_auto_flow")}
pub fn set_grid_template_areas(&mut self, v: longhands::grid_template_areas::computed_value::T) {
use gecko_bindings::bindings::Gecko_NewGridTemplateAreasValue;
use gecko_bindings::sugar::refptr::UniqueRefPtr;
let v = match v {
Either::First(areas) => areas,
Either::Second(_) => {
unsafe { self.gecko.mGridTemplateAreas.clear() }
return;
},
};
let mut refptr = unsafe {
UniqueRefPtr::from_addrefed(
Gecko_NewGridTemplateAreasValue(v.areas.len() as u32, v.strings.len() as u32, v.width))
};
for (servo, gecko) in v.areas.into_iter().zip(refptr.mNamedAreas.iter_mut()) {
gecko.mName.assign_utf8(&*servo.name);
gecko.mColumnStart = servo.columns.start;
gecko.mColumnEnd = servo.columns.end;
gecko.mRowStart = servo.rows.start;
gecko.mRowEnd = servo.rows.end;
}
for (servo, gecko) in v.strings.into_iter().zip(refptr.mTemplates.iter_mut()) {
gecko.assign_utf8(&*servo);
}
unsafe { self.gecko.mGridTemplateAreas.set_move(refptr.get()) }
}
pub fn copy_grid_template_areas_from(&mut self, other: &Self) {
unsafe { self.gecko.mGridTemplateAreas.set(&other.gecko.mGridTemplateAreas) }
}
pub fn reset_grid_template_areas(&mut self, other: &Self) {
self.copy_grid_template_areas_from(other)
}
pub fn clone_grid_template_areas(&self) -> longhands::grid_template_areas::computed_value::T {
use properties::longhands::grid_template_areas::{NamedArea, TemplateAreas};
use std::ops::Range;
use values::None_;
if self.gecko.mGridTemplateAreas.mRawPtr.is_null() {
return Either::Second(None_);
}
let gecko_grid_template_areas = self.gecko.mGridTemplateAreas.mRawPtr;
let areas = unsafe {
let vec: Vec<NamedArea> =
(*gecko_grid_template_areas).mNamedAreas.iter().map(|gecko_name_area| {
let name = gecko_name_area.mName.to_string().into_boxed_str();
let rows = Range {
start: gecko_name_area.mRowStart,
end: gecko_name_area.mRowEnd
};
let columns = Range {
start: gecko_name_area.mColumnStart,
end: gecko_name_area.mColumnEnd
};
NamedArea{ name, rows, columns }
}).collect();
vec.into_boxed_slice()
};
let strings = unsafe {
let vec: Vec<Box<str>> =
(*gecko_grid_template_areas).mTemplates.iter().map(|gecko_template| {
gecko_template.to_string().into_boxed_str()
}).collect();
vec.into_boxed_slice()
};
let width = unsafe {
(*gecko_grid_template_areas).mNColumns
};
Either::First(TemplateAreas{ areas, strings, width })
}
</%self:impl_trait>
<% skip_outline_longhands = " ".join("outline-style outline-width".split() +
["-moz-outline-radius-{0}".format(x.ident.replace("_", ""))
for x in CORNERS]) %>
<%self:impl_trait style_struct_name="Outline"
skip_longhands="${skip_outline_longhands}">
#[allow(non_snake_case)]
pub fn set_outline_style(&mut self, v: longhands::outline_style::computed_value::T) {
// FIXME(bholley): Align binary representations and ditch |match| for
// cast + static_asserts
let result = match v {
% for value in border_style_keyword.values_for('gecko'):
Either::Second(border_style::T::${to_rust_ident(value)}) =>
structs::${border_style_keyword.gecko_constant(value)} ${border_style_keyword.maybe_cast("u8")},
% endfor
Either::First(Auto) =>
structs::${border_style_keyword.gecko_constant('auto')} ${border_style_keyword.maybe_cast("u8")},
};
${set_gecko_property("mOutlineStyle", "result")}
// NB: This is needed to correctly handling the initial value of
// outline-width when outline-style changes, see the
// update_border_${side.ident} comment for more details.
self.gecko.mActualOutlineWidth = self.gecko.mOutlineWidth;
}
#[allow(non_snake_case)]
pub fn copy_outline_style_from(&mut self, other: &Self) {
self.gecko.mOutlineStyle = other.gecko.mOutlineStyle;
}
#[allow(non_snake_case)]
pub fn reset_outline_style(&mut self, other: &Self) {
self.copy_outline_style_from(other)
}
#[allow(non_snake_case)]
pub fn clone_outline_style(&self) -> longhands::outline_style::computed_value::T {
// FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts
match ${get_gecko_property("mOutlineStyle")} ${border_style_keyword.maybe_cast("u32")} {
% for value in border_style_keyword.values_for('gecko'):
structs::${border_style_keyword.gecko_constant(value)} => Either::Second(border_style::T::${value}),
% endfor
structs::${border_style_keyword.gecko_constant('auto')} => Either::First(Auto),
% if border_style_keyword.gecko_inexhaustive:
x => panic!("Found unexpected value in style struct for outline_style property: {:?}", x),
% endif
}
}
<% impl_non_negative_length("outline_width", "mActualOutlineWidth",
inherit_from="mOutlineWidth",
round_to_pixels=True) %>
% for corner in CORNERS:
<% impl_corner_style_coord("_moz_outline_radius_%s" % corner.ident.replace("_", ""),
"mOutlineRadius",
corner.x_index,
corner.y_index) %>
% endfor
pub fn outline_has_nonzero_width(&self) -> bool {
self.gecko.mActualOutlineWidth != 0
}
</%self:impl_trait>
<%
skip_font_longhands = """font-family font-size font-size-adjust font-weight
font-synthesis -x-lang font-variant-alternates
font-variant-east-asian font-variant-ligatures
font-variant-numeric font-language-override
font-feature-settings font-variation-settings
-moz-min-font-size-ratio -x-text-zoom"""
%>
<%self:impl_trait style_struct_name="Font"
skip_longhands="${skip_font_longhands}">
<% impl_font_settings("font_feature_settings", "FontSettingTagInt") %>
<% impl_font_settings("font_variation_settings", "FontSettingTagFloat") %>
pub fn fixup_none_generic(&mut self, device: &Device) {
self.gecko.mFont.systemFont = false;
unsafe {
bindings::Gecko_nsStyleFont_FixupNoneGeneric(&mut self.gecko, device.pres_context())
}
}
pub fn fixup_system(&mut self, default_font_type: structs::FontFamilyType) {
self.gecko.mFont.systemFont = true;
self.gecko.mGenericID = structs::kGenericFont_NONE;
self.gecko.mFont.fontlist.mDefaultFontType = default_font_type;
}
pub fn set_font_family(&mut self, v: longhands::font_family::computed_value::T) {
use properties::longhands::font_family::computed_value::{FontFamily, FamilyNameSyntax};
let list = &mut self.gecko.mFont.fontlist;
unsafe { Gecko_FontFamilyList_Clear(list); }
self.gecko.mGenericID = structs::kGenericFont_NONE;
for family in &v.0 {
match *family {
FontFamily::FamilyName(ref f) => {
let quoted = matches!(f.syntax, FamilyNameSyntax::Quoted);
unsafe { Gecko_FontFamilyList_AppendNamed(list, f.name.as_ptr(), quoted); }
}
FontFamily::Generic(ref name) => {
let (family_type, generic) = FontFamily::generic(name);
if v.0.len() == 1 {
self.gecko.mGenericID = generic;
}
unsafe { Gecko_FontFamilyList_AppendGeneric(list, family_type); }
}
}
}
}
pub fn font_family_count(&self) -> usize {
0
}
pub fn font_family_at(&self, _: usize) -> longhands::font_family::computed_value::FontFamily {
unimplemented!()
}
pub fn copy_font_family_from(&mut self, other: &Self) {
unsafe { Gecko_CopyFontFamilyFrom(&mut self.gecko.mFont, &other.gecko.mFont); }
self.gecko.mGenericID = other.gecko.mGenericID;
self.gecko.mFont.systemFont = other.gecko.mFont.systemFont;
}
pub fn reset_font_family(&mut self, other: &Self) {
self.copy_font_family_from(other)
}
pub fn clone_font_family(&self) -> longhands::font_family::computed_value::T {
use cssparser::serialize_identifier;
use properties::longhands::font_family::computed_value::{FontFamily, FamilyName, FamilyNameSyntax};
use gecko_bindings::structs::FontFamilyType;
use gecko_string_cache::Atom;
if self.gecko.mFont.fontlist.mFontlist.is_empty() {
let default = match self.gecko.mFont.fontlist.mDefaultFontType {
FontFamilyType::eFamily_serif => FontFamily::Generic(atom!("serif")),
FontFamilyType::eFamily_sans_serif => FontFamily::Generic(atom!("sans-serif")),
_ => panic!("Default generic must be serif or sans-serif"),
};
return longhands::font_family::computed_value::T(vec![default]);
}
longhands::font_family::computed_value::T(
self.gecko.mFont.fontlist.mFontlist.iter().map(|gecko_font_family_name| {
match gecko_font_family_name.mType {
FontFamilyType::eFamily_serif => FontFamily::Generic(atom!("serif")),
FontFamilyType::eFamily_sans_serif => FontFamily::Generic(atom!("sans-serif")),
FontFamilyType::eFamily_monospace => FontFamily::Generic(atom!("monospace")),
FontFamilyType::eFamily_cursive => FontFamily::Generic(atom!("cursive")),
FontFamilyType::eFamily_fantasy => FontFamily::Generic(atom!("fantasy")),
FontFamilyType::eFamily_moz_fixed => FontFamily::Generic(Atom::from("-moz-fixed")),
FontFamilyType::eFamily_named => {
let name = Atom::from(&*gecko_font_family_name.mName);
let mut serialization = String::new();
serialize_identifier(&name.to_string(), &mut serialization).unwrap();
FontFamily::FamilyName(FamilyName {
name: name.clone(),
syntax: FamilyNameSyntax::Identifiers(serialization),
})
},
FontFamilyType::eFamily_named_quoted => FontFamily::FamilyName(FamilyName {
name: (&*gecko_font_family_name.mName).into(),
syntax: FamilyNameSyntax::Quoted,
}),
x => panic!("Found unexpected font FontFamilyType: {:?}", x),
}
}).collect()
)
}
pub fn unzoom_fonts(&mut self, device: &Device) {
self.gecko.mSize = device.unzoom_text(Au(self.gecko.mSize)).0;
self.gecko.mScriptUnconstrainedSize = device.unzoom_text(Au(self.gecko.mScriptUnconstrainedSize)).0;
self.gecko.mFont.size = device.unzoom_text(Au(self.gecko.mFont.size)).0;
}
pub fn set_font_size(&mut self, v: longhands::font_size::computed_value::T) {
self.gecko.mSize = v.0.to_i32_au();
self.gecko.mScriptUnconstrainedSize = v.0.to_i32_au();
}
/// Set font size, taking into account scriptminsize and scriptlevel
/// Returns Some(size) if we have to recompute the script unconstrained size
pub fn apply_font_size(&mut self, v: longhands::font_size::computed_value::T,
parent: &Self,
device: &Device) -> Option<NonNegativeLength> {
let (adjusted_size, adjusted_unconstrained_size) =
self.calculate_script_level_size(parent, device);
// In this case, we have been unaffected by scriptminsize, ignore it
if parent.gecko.mSize == parent.gecko.mScriptUnconstrainedSize &&
adjusted_size == adjusted_unconstrained_size {
self.set_font_size(v);
self.fixup_font_min_size(device);
None
} else {
self.gecko.mSize = v.0.to_i32_au();
self.fixup_font_min_size(device);
Some(Au(parent.gecko.mScriptUnconstrainedSize).into())
}
}
pub fn fixup_font_min_size(&mut self, device: &Device) {
unsafe { bindings::Gecko_nsStyleFont_FixupMinFontSize(&mut self.gecko, device.pres_context()) }
}
pub fn apply_unconstrained_font_size(&mut self, v: NonNegativeLength) {
self.gecko.mScriptUnconstrainedSize = v.0.to_i32_au();
}
/// Calculates the constrained and unconstrained font sizes to be inherited
/// from the parent.
///
/// See ComputeScriptLevelSize in Gecko's nsRuleNode.cpp
///
/// scriptlevel is a property that affects how font-size is inherited. If scriptlevel is
/// +1, for example, it will inherit as the script size multiplier times
/// the parent font. This does not affect cases where the font-size is
/// explicitly set.
///
/// However, this transformation is not allowed to reduce the size below
/// scriptminsize. If this inheritance will reduce it to below
/// scriptminsize, it will be set to scriptminsize or the parent size,
/// whichever is smaller (the parent size could be smaller than the min size
/// because it was explicitly specified).
///
/// Now, within a node that has inherited a font-size which was
/// crossing scriptminsize once the scriptlevel was applied, a negative
/// scriptlevel may be used to increase the size again.
///
/// This should work, however if we have already been capped by the
/// scriptminsize multiple times, this can lead to a jump in the size.
///
/// For example, if we have text of the form:
///
/// huge large medium small tiny reallytiny tiny small medium huge
///
/// which is represented by progressive nesting and scriptlevel values of
/// +1 till the center after which the scriptlevel is -1, the "tiny"s should
/// be the same size, as should be the "small"s and "medium"s, etc.
///
/// However, if scriptminsize kicked it at around "medium", then
/// medium/tiny/reallytiny will all be the same size (the min size).
/// A -1 scriptlevel change after this will increase the min size by the
/// multiplier, making the second tiny larger than medium.
///
/// Instead, we wish for the second "tiny" to still be capped by the script
/// level, and when we reach the second "large", it should be the same size
/// as the original one.
///
/// We do this by cascading two separate font sizes. The font size (mSize)
/// is the actual displayed font size. The unconstrained font size
/// (mScriptUnconstrainedSize) is the font size in the situation where
/// scriptminsize never applied.
///
/// We calculate the proposed inherited font size based on scriptlevel and
/// the parent unconstrained size, instead of using the parent font size.
/// This is stored in the node's unconstrained size and will also be stored
/// in the font size provided that it is above the min size.
///
/// All of this only applies when inheriting. When the font size is
/// manually set, scriptminsize does not apply, and both the real and
/// unconstrained size are set to the explicit value. However, if the font
/// size is manually set to an em or percent unit, the unconstrained size
/// will be set to the value of that unit computed against the parent
/// unconstrained size, whereas the font size will be set computing against
/// the parent font size.
pub fn calculate_script_level_size(&self, parent: &Self, device: &Device) -> (Au, Au) {
use std::cmp;
let delta = self.gecko.mScriptLevel.saturating_sub(parent.gecko.mScriptLevel);
let parent_size = Au(parent.gecko.mSize);
let parent_unconstrained_size = Au(parent.gecko.mScriptUnconstrainedSize);
if delta == 0 {
return (parent_size, parent_unconstrained_size)
}
let mut min = Au(parent.gecko.mScriptMinSize);
if self.gecko.mAllowZoom {
min = device.zoom_text(min);
}
let scale = (parent.gecko.mScriptSizeMultiplier as f32).powi(delta as i32);
let new_size = parent_size.scale_by(scale);
let new_unconstrained_size = parent_unconstrained_size.scale_by(scale);
if scale < 1. {
// The parent size can be smaller than scriptminsize,
// e.g. if it was specified explicitly. Don't scale
// in this case, but we don't want to set it to scriptminsize
// either since that will make it larger.
if parent_size < min {
(parent_size, new_unconstrained_size)
} else {
(cmp::max(min, new_size), new_unconstrained_size)
}
} else {
// If the new unconstrained size is larger than the min size,
// this means we have escaped the grasp of scriptminsize
// and can revert to using the unconstrained size.
// However, if the new size is even larger (perhaps due to usage
// of em units), use that instead.
(cmp::min(new_size, cmp::max(new_unconstrained_size, min)),
new_unconstrained_size)
}
}
/// This function will also handle scriptminsize and scriptlevel
/// so should not be called when you just want the font sizes to be copied.
/// Hence the different name.
///
/// Returns true if the inherited keyword size was actually used
pub fn inherit_font_size_from(&mut self, parent: &Self,
kw_inherited_size: Option<NonNegativeLength>,
device: &Device) -> bool {
let (adjusted_size, adjusted_unconstrained_size)
= self.calculate_script_level_size(parent, device);
if adjusted_size.0 != parent.gecko.mSize ||
adjusted_unconstrained_size.0 != parent.gecko.mScriptUnconstrainedSize {
// This is incorrect. When there is both a keyword size being inherited
// and a scriptlevel change, we must handle the keyword size the same
// way we handle em units. This complicates things because we now have
// to keep track of the adjusted and unadjusted ratios in the kw font size.
// This only affects the use case of a generic font being used in MathML.
//
// If we were to fix this I would prefer doing it by removing the
// ruletree walk on the Gecko side in nsRuleNode::SetGenericFont
// and instead using extra bookkeeping in the mSize and mScriptUnconstrainedSize
// values, and reusing those instead of font_size_keyword.
// In the case that MathML has given us an adjusted size, apply it.
// Keep track of the unconstrained adjusted size.
self.gecko.mSize = adjusted_size.0;
self.gecko.mScriptUnconstrainedSize = adjusted_unconstrained_size.0;
self.fixup_font_min_size(device);
false
} else if let Some(size) = kw_inherited_size {
// Parent element was a keyword-derived size.
self.gecko.mSize = size.0.to_i32_au();
// MathML constraints didn't apply here, so we can ignore this.
self.gecko.mScriptUnconstrainedSize = size.0.to_i32_au();
self.fixup_font_min_size(device);
true
} else {
// MathML isn't affecting us, and our parent element does not
// have a keyword-derived size. Set things normally.
self.gecko.mSize = parent.gecko.mSize;
self.gecko.mScriptUnconstrainedSize = parent.gecko.mScriptUnconstrainedSize;
self.fixup_font_min_size(device);
false
}
}
pub fn clone_font_size(&self) -> longhands::font_size::computed_value::T {
Au(self.gecko.mSize).into()
}
pub fn set_font_weight(&mut self, v: longhands::font_weight::computed_value::T) {
self.gecko.mFont.weight = v.0;
}
${impl_simple_copy('font_weight', 'mFont.weight')}
pub fn clone_font_weight(&self) -> longhands::font_weight::computed_value::T {
debug_assert!(self.gecko.mFont.weight <= ::std::u16::MAX);
longhands::font_weight::computed_value::T(self.gecko.mFont.weight)
}
${impl_simple_type_with_conversion("font_synthesis", "mFont.synthesis")}
pub fn set_font_size_adjust(&mut self, v: longhands::font_size_adjust::computed_value::T) {
use properties::longhands::font_size_adjust::computed_value::T;
match v {
T::None => self.gecko.mFont.sizeAdjust = -1.0 as f32,
T::Number(n) => self.gecko.mFont.sizeAdjust = n,
}
}
pub fn copy_font_size_adjust_from(&mut self, other: &Self) {
self.gecko.mFont.sizeAdjust = other.gecko.mFont.sizeAdjust;
}
pub fn reset_font_size_adjust(&mut self, other: &Self) {
self.copy_font_size_adjust_from(other)
}
pub fn clone_font_size_adjust(&self) -> longhands::font_size_adjust::computed_value::T {
use properties::longhands::font_size_adjust::computed_value::T;
T::from_gecko_adjust(self.gecko.mFont.sizeAdjust)
}
#[allow(non_snake_case)]
pub fn set__x_lang(&mut self, v: longhands::_x_lang::computed_value::T) {
let ptr = v.0.as_ptr();
forget(v);
unsafe {
Gecko_nsStyleFont_SetLang(&mut self.gecko, ptr);
}
}
#[allow(non_snake_case)]
pub fn copy__x_lang_from(&mut self, other: &Self) {
unsafe {
Gecko_nsStyleFont_CopyLangFrom(&mut self.gecko, &other.gecko);
}
}
#[allow(non_snake_case)]
pub fn set__x_text_zoom(&mut self, v: longhands::_x_text_zoom::computed_value::T) {
self.gecko.mAllowZoom = v.0;
}
#[allow(non_snake_case)]
pub fn copy__x_text_zoom_from(&mut self, other: &Self) {
self.gecko.mAllowZoom = other.gecko.mAllowZoom;
}
#[allow(non_snake_case)]
pub fn reset__x_text_zoom(&mut self, other: &Self) {
self.copy__x_text_zoom_from(other)
}
#[allow(non_snake_case)]
pub fn reset__x_lang(&mut self, other: &Self) {
self.copy__x_lang_from(other)
}
<% impl_simple_type_with_conversion("font_language_override", "mFont.languageOverride") %>
pub fn set_font_variant_alternates(&mut self,
v: longhands::font_variant_alternates::computed_value::T,
device: &Device) {
use gecko_bindings::bindings::{Gecko_ClearAlternateValues, Gecko_AppendAlternateValues};
use gecko_bindings::bindings::Gecko_nsFont_ResetFontFeatureValuesLookup;
use gecko_bindings::bindings::Gecko_nsFont_SetFontFeatureValuesLookup;
% for value in "normal swash stylistic ornaments annotation styleset character_variant historical".split():
use gecko_bindings::structs::NS_FONT_VARIANT_ALTERNATES_${value.upper()};
% endfor
use self::longhands::font_variant_alternates::VariantAlternates;
unsafe {
Gecko_ClearAlternateValues(&mut self.gecko.mFont, v.len());
}
if v.0.is_empty() {
self.gecko.mFont.variantAlternates = NS_FONT_VARIANT_ALTERNATES_NORMAL as u16;
unsafe { Gecko_nsFont_ResetFontFeatureValuesLookup(&mut self.gecko.mFont); }
return;
}
for val in v.0.iter() {
match *val {
% for value in "Swash Stylistic Ornaments Annotation".split():
VariantAlternates::${value}(ref ident) => {
self.gecko.mFont.variantAlternates |= NS_FONT_VARIANT_ALTERNATES_${value.upper()} as u16;
unsafe {
Gecko_AppendAlternateValues(&mut self.gecko.mFont,
NS_FONT_VARIANT_ALTERNATES_${value.upper()},
ident.0.as_ptr());
}
},
% endfor
% for value in "styleset character_variant".split():
VariantAlternates::${to_camel_case(value)}(ref slice) => {
self.gecko.mFont.variantAlternates |= NS_FONT_VARIANT_ALTERNATES_${value.upper()} as u16;
for ident in slice.iter() {
unsafe {
Gecko_AppendAlternateValues(&mut self.gecko.mFont,
NS_FONT_VARIANT_ALTERNATES_${value.upper()},
ident.0.as_ptr());
}
}
},
% endfor
VariantAlternates::HistoricalForms => {
self.gecko.mFont.variantAlternates |= NS_FONT_VARIANT_ALTERNATES_HISTORICAL as u16;
}
}
}
unsafe {
Gecko_nsFont_SetFontFeatureValuesLookup(&mut self.gecko.mFont, device.pres_context());
}
}
#[allow(non_snake_case)]
pub fn copy_font_variant_alternates_from(&mut self, other: &Self) {
use gecko_bindings::bindings::Gecko_CopyAlternateValuesFrom;
self.gecko.mFont.variantAlternates = other.gecko.mFont.variantAlternates;
unsafe {
Gecko_CopyAlternateValuesFrom(&mut self.gecko.mFont, &other.gecko.mFont);
}
}
pub fn reset_font_variant_alternates(&mut self, other: &Self) {
self.copy_font_variant_alternates_from(other)
}
pub fn clone_font_variant_alternates(&self) -> longhands::font_variant_alternates::computed_value::T {
use Atom;
% for value in "normal swash stylistic ornaments annotation styleset character_variant historical".split():
use gecko_bindings::structs::NS_FONT_VARIANT_ALTERNATES_${value.upper()};
% endfor
use properties::longhands::font_variant_alternates::VariantAlternates;
use properties::longhands::font_variant_alternates::VariantAlternatesList;
use values::CustomIdent;
if self.gecko.mFont.variantAlternates == NS_FONT_VARIANT_ALTERNATES_NORMAL as u16 {
return VariantAlternatesList(vec![].into_boxed_slice());
}
let mut alternates = Vec::with_capacity(self.gecko.mFont.alternateValues.len());
if self.gecko.mFont.variantAlternates & (NS_FONT_VARIANT_ALTERNATES_HISTORICAL as u16) != 0 {
alternates.push(VariantAlternates::HistoricalForms);
}
<%
property_need_ident_list = "styleset character_variant".split()
%>
% for value in property_need_ident_list:
let mut ${value}_list = Vec::new();
% endfor
for gecko_alternate_value in self.gecko.mFont.alternateValues.iter() {
let ident = Atom::from(gecko_alternate_value.value.to_string());
match gecko_alternate_value.alternate {
% for value in "Swash Stylistic Ornaments Annotation".split():
NS_FONT_VARIANT_ALTERNATES_${value.upper()} => {
alternates.push(VariantAlternates::${value}(CustomIdent(ident)));
},
% endfor
% for value in property_need_ident_list:
NS_FONT_VARIANT_ALTERNATES_${value.upper()} => {
${value}_list.push(CustomIdent(ident));
},
% endfor
x => {
panic!("Found unexpected value for font-variant-alternates: {:?}", x);
}
}
}
% for value in property_need_ident_list:
if !${value}_list.is_empty() {
alternates.push(VariantAlternates::${to_camel_case(value)}(${value}_list.into_boxed_slice()));
}
% endfor
VariantAlternatesList(alternates.into_boxed_slice())
}
${impl_simple_type_with_conversion("font_variant_ligatures", "mFont.variantLigatures")}
${impl_simple_type_with_conversion("font_variant_east_asian", "mFont.variantEastAsian")}
${impl_simple_type_with_conversion("font_variant_numeric", "mFont.variantNumeric")}
#[allow(non_snake_case)]
pub fn set__moz_min_font_size_ratio(&mut self, v: longhands::_moz_min_font_size_ratio::computed_value::T) {
let scaled = v.0 * 100.;
let percentage = if scaled > 255. {
255.
} else if scaled < 0. {
0.
} else {
scaled
};
self.gecko.mMinFontSizeRatio = percentage as u8;
}
${impl_simple_copy('_moz_min_font_size_ratio', 'mMinFontSizeRatio')}
</%self:impl_trait>
<%def name="impl_copy_animation_or_transition_value(type, ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn copy_${type}_${ident}_from(&mut self, other: &Self) {
unsafe { self.gecko.m${type.capitalize()}s.ensure_len(other.gecko.m${type.capitalize()}s.len()) };
let count = other.gecko.m${type.capitalize()}${gecko_ffi_name}Count;
self.gecko.m${type.capitalize()}${gecko_ffi_name}Count = count;
// The length of mTransitions or mAnimations is often greater than m{Transition|Animation}XXCount,
// don't copy values over the count.
for (index, gecko) in self.gecko.m${type.capitalize()}s.iter_mut().enumerate().take(count as usize) {
gecko.m${gecko_ffi_name} = other.gecko.m${type.capitalize()}s[index].m${gecko_ffi_name};
}
}
#[allow(non_snake_case)]
pub fn reset_${type}_${ident}(&mut self, other: &Self) {
self.copy_${type}_${ident}_from(other)
}
</%def>
<%def name="impl_animation_or_transition_count(type, ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn ${type}_${ident}_count(&self) -> usize {
self.gecko.m${type.capitalize()}${gecko_ffi_name}Count as usize
}
</%def>
<%def name="impl_animation_or_transition_time_value(type, ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${type}_${ident}<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::${type}_${ident}::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator + Clone
{
let v = v.into_iter();
debug_assert!(v.len() != 0);
let input_len = v.len();
unsafe { self.gecko.m${type.capitalize()}s.ensure_len(input_len) };
self.gecko.m${type.capitalize()}${gecko_ffi_name}Count = input_len as u32;
for (gecko, servo) in self.gecko.m${type.capitalize()}s.iter_mut().zip(v.cycle()) {
gecko.m${gecko_ffi_name} = servo.seconds() * 1000.;
}
}
#[allow(non_snake_case)]
pub fn ${type}_${ident}_at(&self, index: usize)
-> longhands::${type}_${ident}::computed_value::SingleComputedValue {
use values::computed::Time;
Time::from_seconds(self.gecko.m${type.capitalize()}s[index].m${gecko_ffi_name} / 1000.)
}
${impl_animation_or_transition_count(type, ident, gecko_ffi_name)}
${impl_copy_animation_or_transition_value(type, ident, gecko_ffi_name)}
</%def>
<%def name="impl_animation_or_transition_timing_function(type)">
pub fn set_${type}_timing_function<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::${type}_timing_function::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator + Clone
{
let v = v.into_iter();
debug_assert!(v.len() != 0);
let input_len = v.len();
unsafe { self.gecko.m${type.capitalize()}s.ensure_len(input_len) };
self.gecko.m${type.capitalize()}TimingFunctionCount = input_len as u32;
for (gecko, servo) in self.gecko.m${type.capitalize()}s.iter_mut().zip(v.cycle()) {
gecko.mTimingFunction = servo.into();
}
}
${impl_animation_or_transition_count(type, 'timing_function', 'TimingFunction')}
${impl_copy_animation_or_transition_value(type, 'timing_function', 'TimingFunction')}
pub fn ${type}_timing_function_at(&self, index: usize)
-> longhands::${type}_timing_function::computed_value::SingleComputedValue {
self.gecko.m${type.capitalize()}s[index].mTimingFunction.into()
}
</%def>
<%def name="impl_transition_time_value(ident, gecko_ffi_name)">
${impl_animation_or_transition_time_value('transition', ident, gecko_ffi_name)}
</%def>
<%def name="impl_transition_count(ident, gecko_ffi_name)">
${impl_animation_or_transition_count('transition', ident, gecko_ffi_name)}
</%def>
<%def name="impl_copy_animation_value(ident, gecko_ffi_name)">
${impl_copy_animation_or_transition_value('animation', ident, gecko_ffi_name)}
</%def>
<%def name="impl_transition_timing_function()">
${impl_animation_or_transition_timing_function('transition')}
</%def>
<%def name="impl_animation_count(ident, gecko_ffi_name)">
${impl_animation_or_transition_count('animation', ident, gecko_ffi_name)}
</%def>
<%def name="impl_animation_time_value(ident, gecko_ffi_name)">
${impl_animation_or_transition_time_value('animation', ident, gecko_ffi_name)}
</%def>
<%def name="impl_animation_timing_function()">
${impl_animation_or_transition_timing_function('animation')}
</%def>
<%def name="impl_animation_keyword(ident, gecko_ffi_name, keyword, cast_type='u8')">
#[allow(non_snake_case)]
pub fn set_animation_${ident}<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::animation_${ident}::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator + Clone
{
use properties::longhands::animation_${ident}::single_value::computed_value::T as Keyword;
use gecko_bindings::structs;
let v = v.into_iter();
debug_assert!(v.len() != 0);
let input_len = v.len();
unsafe { self.gecko.mAnimations.ensure_len(input_len) };
self.gecko.mAnimation${gecko_ffi_name}Count = input_len as u32;
for (gecko, servo) in self.gecko.mAnimations.iter_mut().zip(v.cycle()) {
let result = match servo {
% for value in keyword.gecko_values():
Keyword::${to_rust_ident(value)} =>
structs::${keyword.gecko_constant(value)} ${keyword.maybe_cast(cast_type)},
% endfor
};
gecko.m${gecko_ffi_name} = result;
}
}
#[allow(non_snake_case)]
pub fn animation_${ident}_at(&self, index: usize)
-> longhands::animation_${ident}::computed_value::SingleComputedValue {
use properties::longhands::animation_${ident}::single_value::computed_value::T as Keyword;
match self.gecko.mAnimations[index].m${gecko_ffi_name} ${keyword.maybe_cast("u32")} {
% for value in keyword.gecko_values():
structs::${keyword.gecko_constant(value)} => Keyword::${to_rust_ident(value)},
% endfor
x => panic!("Found unexpected value for animation-${ident}: {:?}", x),
}
}
${impl_animation_count(ident, gecko_ffi_name)}
${impl_copy_animation_value(ident, gecko_ffi_name)}
</%def>
<% skip_box_longhands= """display overflow-y vertical-align
animation-name animation-delay animation-duration
animation-direction animation-fill-mode animation-play-state
animation-iteration-count animation-timing-function
transition-duration transition-delay
transition-timing-function transition-property
page-break-before page-break-after
scroll-snap-points-x scroll-snap-points-y transform
scroll-snap-type-y scroll-snap-coordinate
perspective-origin transform-origin -moz-binding will-change
shape-outside contain touch-action""" %>
<%self:impl_trait style_struct_name="Box" skip_longhands="${skip_box_longhands}">
// We manually-implement the |display| property until we get general
// infrastructure for preffing certain values.
<% display_keyword = Keyword("display", "inline block inline-block table inline-table table-row-group " +
"table-header-group table-footer-group table-row table-column-group " +
"table-column table-cell table-caption list-item flex none " +
"inline-flex grid inline-grid ruby ruby-base ruby-base-container " +
"ruby-text ruby-text-container contents flow-root -webkit-box " +
"-webkit-inline-box -moz-box -moz-inline-box -moz-grid -moz-inline-grid " +
"-moz-grid-group -moz-grid-line -moz-stack -moz-inline-stack -moz-deck " +
"-moz-popup -moz-groupbox",
gecko_enum_prefix="StyleDisplay",
gecko_strip_moz_prefix=False) %>
pub fn set_display(&mut self, v: longhands::display::computed_value::T) {
use properties::longhands::display::computed_value::T as Keyword;
// FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts
let result = match v {
% for value in display_keyword.values_for('gecko'):
Keyword::${to_rust_ident(value)} =>
structs::${display_keyword.gecko_constant(value)},
% endfor
};
self.gecko.mDisplay = result;
self.gecko.mOriginalDisplay = result;
}
/// Set the display value from the style adjustment code. This is pretty
/// much like set_display, but without touching the mOriginalDisplay field,
/// which we want to keep.
pub fn set_adjusted_display(&mut self,
v: longhands::display::computed_value::T,
_is_item_or_root: bool) {
use properties::longhands::display::computed_value::T as Keyword;
let result = match v {
% for value in display_keyword.values_for('gecko'):
Keyword::${to_rust_ident(value)} =>
structs::${display_keyword.gecko_constant(value)},
% endfor
};
self.gecko.mDisplay = result;
}
pub fn copy_display_from(&mut self, other: &Self) {
self.gecko.mDisplay = other.gecko.mDisplay;
self.gecko.mOriginalDisplay = other.gecko.mDisplay;
}
pub fn reset_display(&mut self, other: &Self) {
self.copy_display_from(other)
}
<%call expr="impl_keyword_clone('display', 'mDisplay', display_keyword)"></%call>
<% overflow_x = data.longhands_by_name["overflow-x"] %>
pub fn set_overflow_y(&mut self, v: longhands::overflow_y::computed_value::T) {
use properties::longhands::overflow_x::computed_value::T as BaseType;
// FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts
self.gecko.mOverflowY = match v {
% for value in overflow_x.keyword.values_for('gecko'):
BaseType::${to_rust_ident(value)} => structs::${overflow_x.keyword.gecko_constant(value)} as u8,
% endfor
};
}
${impl_simple_copy('overflow_y', 'mOverflowY')}
pub fn clone_overflow_y(&self) -> longhands::overflow_y::computed_value::T {
use properties::longhands::overflow_x::computed_value::T as BaseType;
// FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts
match self.gecko.mOverflowY as u32 {
% for value in overflow_x.keyword.values_for('gecko'):
structs::${overflow_x.keyword.gecko_constant(value)} => BaseType::${to_rust_ident(value)},
% endfor
x => panic!("Found unexpected value in style struct for overflow_y property: {}", x),
}
}
pub fn set_vertical_align(&mut self, v: longhands::vertical_align::computed_value::T) {
use values::generics::box_::VerticalAlign;
let value = match v {
VerticalAlign::Baseline => structs::NS_STYLE_VERTICAL_ALIGN_BASELINE,
VerticalAlign::Sub => structs::NS_STYLE_VERTICAL_ALIGN_SUB,
VerticalAlign::Super => structs::NS_STYLE_VERTICAL_ALIGN_SUPER,
VerticalAlign::Top => structs::NS_STYLE_VERTICAL_ALIGN_TOP,
VerticalAlign::TextTop => structs::NS_STYLE_VERTICAL_ALIGN_TEXT_TOP,
VerticalAlign::Middle => structs::NS_STYLE_VERTICAL_ALIGN_MIDDLE,
VerticalAlign::Bottom => structs::NS_STYLE_VERTICAL_ALIGN_BOTTOM,
VerticalAlign::TextBottom => structs::NS_STYLE_VERTICAL_ALIGN_TEXT_BOTTOM,
VerticalAlign::MozMiddleWithBaseline => {
structs::NS_STYLE_VERTICAL_ALIGN_MIDDLE_WITH_BASELINE
},
VerticalAlign::Length(length) => {
self.gecko.mVerticalAlign.set(length);
return;
},
};
self.gecko.mVerticalAlign.set_value(CoordDataValue::Enumerated(value));
}
pub fn clone_vertical_align(&self) -> longhands::vertical_align::computed_value::T {
use values::computed::LengthOrPercentage;
use values::generics::box_::VerticalAlign;
let gecko = &self.gecko.mVerticalAlign;
match gecko.as_value() {
CoordDataValue::Enumerated(value) => VerticalAlign::from_gecko_keyword(value),
_ => {
VerticalAlign::Length(
LengthOrPercentage::from_gecko_style_coord(gecko).expect(
"expected <length-percentage> for vertical-align",
),
)
},
}
}
<%call expr="impl_coord_copy('vertical_align', 'mVerticalAlign')"></%call>
% for kind in ["before", "after"]:
// Temp fix for Bugzilla bug 24000.
// Map 'auto' and 'avoid' to false, and 'always', 'left', and 'right' to true.
// "A conforming user agent may interpret the values 'left' and 'right'
// as 'always'." - CSS2.1, section 13.3.1
pub fn set_page_break_${kind}(&mut self, v: longhands::page_break_${kind}::computed_value::T) {
use computed_values::page_break_${kind}::T;
let result = match v {
T::auto => false,
T::always => true,
T::avoid => false,
T::left => true,
T::right => true
};
self.gecko.mBreak${kind.title()} = result;
}
${impl_simple_copy('page_break_' + kind, 'mBreak' + kind.title())}
// Temp fix for Bugzilla bug 24000.
// See set_page_break_before/after for detail.
pub fn clone_page_break_${kind}(&self) -> longhands::page_break_${kind}::computed_value::T {
use computed_values::page_break_${kind}::T;
match self.gecko.mBreak${kind.title()} {
true => T::always,
false => T::auto,
}
}
% endfor
${impl_style_coord("scroll_snap_points_x", "mScrollSnapPointsX")}
${impl_style_coord("scroll_snap_points_y", "mScrollSnapPointsY")}
pub fn set_scroll_snap_coordinate<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::scroll_snap_coordinate::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
let v = v.into_iter();
unsafe { self.gecko.mScrollSnapCoordinate.set_len_pod(v.len() as u32); }
for (gecko, servo) in self.gecko.mScrollSnapCoordinate
.iter_mut()
.zip(v) {
gecko.mXPosition = servo.horizontal.into();
gecko.mYPosition = servo.vertical.into();
}
}
pub fn copy_scroll_snap_coordinate_from(&mut self, other: &Self) {
unsafe {
self.gecko.mScrollSnapCoordinate
.set_len_pod(other.gecko.mScrollSnapCoordinate.len() as u32);
}
for (this, that) in self.gecko.mScrollSnapCoordinate
.iter_mut()
.zip(other.gecko.mScrollSnapCoordinate.iter()) {
*this = *that;
}
}
pub fn reset_scroll_snap_coordinate(&mut self, other: &Self) {
self.copy_scroll_snap_coordinate_from(other)
}
pub fn clone_scroll_snap_coordinate(&self) -> longhands::scroll_snap_coordinate::computed_value::T {
let vec = self.gecko.mScrollSnapCoordinate.iter().map(|f| f.into()).collect();
longhands::scroll_snap_coordinate::computed_value::T(vec)
}
${impl_css_url('_moz_binding', 'mBinding.mPtr')}
<%def name="transform_function_arm(name, keyword, items)">
<%
pattern = None
if keyword == "matrix3d":
# m11: number1, m12: number2, ..
single_patterns = ["m%s: %s" % (str(a / 4 + 1) + str(a % 4 + 1), b + str(a + 1)) for (a, b)
in enumerate(items)]
if name == "Matrix":
pattern = "(ComputedMatrix { %s })" % ", ".join(single_patterns)
else:
pattern = "(ComputedMatrixWithPercents { %s })" % ", ".join(single_patterns)
elif keyword == "interpolatematrix":
pattern = " { from_list: ref list1, to_list: ref list2, progress: percentage3 }"
elif keyword == "accumulatematrix":
pattern = " { from_list: ref list1, to_list: ref list2, count: integer_to_percentage3 }"
else:
# Generate contents of pattern from items
pattern = "(%s)" % ", ".join([b + str(a+1) for (a,b) in enumerate(items)])
# First %s substituted with the call to GetArrayItem, the second
# %s substituted with the corresponding variable
css_value_setters = {
"length" : "bindings::Gecko_CSSValue_SetPixelLength(%s, %s.px())",
"percentage" : "bindings::Gecko_CSSValue_SetPercentage(%s, %s.0)",
# Note: This is an integer type, but we use it as a percentage value in Gecko, so
# need to cast it to f32.
"integer_to_percentage" : "bindings::Gecko_CSSValue_SetPercentage(%s, %s as f32)",
"lop" : "%s.set_lop(%s)",
"angle" : "%s.set_angle(%s)",
"number" : "bindings::Gecko_CSSValue_SetNumber(%s, %s)",
# Note: We use nsCSSValueSharedList here, instead of nsCSSValueList_heap
# because this function is not called on the main thread and
# nsCSSValueList_heap is not thread safe.
"list" : "%s.set_shared_list(%s.0.as_ref().unwrap().into_iter().map(&convert_to_ns_css_value));",
}
%>
longhands::transform::computed_value::ComputedOperation::${name}${pattern} => {
bindings::Gecko_CSSValue_SetFunction(gecko_value, ${len(items) + 1});
bindings::Gecko_CSSValue_SetKeyword(
bindings::Gecko_CSSValue_GetArrayItem(gecko_value, 0),
structs::nsCSSKeyword::eCSSKeyword_${keyword}
);
% for index, item in enumerate(items):
% if item == "list":
debug_assert!(${item}${index + 1}.0.is_some());
% endif
${css_value_setters[item] % (
"bindings::Gecko_CSSValue_GetArrayItem(gecko_value, %d)" % (index + 1),
item + str(index + 1)
)};
% endfor
}
</%def>
fn set_single_transform_function(servo_value: &longhands::transform::computed_value::ComputedOperation,
gecko_value: &mut structs::nsCSSValue /* output */) {
use properties::longhands::transform::computed_value::ComputedMatrix;
use properties::longhands::transform::computed_value::ComputedMatrixWithPercents;
use properties::longhands::transform::computed_value::ComputedOperation;
let convert_to_ns_css_value = |item: &ComputedOperation| -> structs::nsCSSValue {
let mut value = structs::nsCSSValue::null();
Self::set_single_transform_function(item, &mut value);
value
};
unsafe {
match *servo_value {
${transform_function_arm("Matrix", "matrix3d", ["number"] * 16)}
${transform_function_arm("MatrixWithPercents", "matrix3d", ["number"] * 12 + ["lop"] * 2
+ ["length"] + ["number"])}
${transform_function_arm("Skew", "skew", ["angle"] * 2)}
${transform_function_arm("Translate", "translate3d", ["lop", "lop", "length"])}
${transform_function_arm("Scale", "scale3d", ["number"] * 3)}
${transform_function_arm("Rotate", "rotate3d", ["number"] * 3 + ["angle"])}
${transform_function_arm("Perspective", "perspective", ["length"])}
${transform_function_arm("InterpolateMatrix", "interpolatematrix",
["list"] * 2 + ["percentage"])}
${transform_function_arm("AccumulateMatrix", "accumulatematrix",
["list"] * 2 + ["integer_to_percentage"])}
}
}
}
pub fn convert_transform(input: &[longhands::transform::computed_value::ComputedOperation],
output: &mut structs::root::RefPtr<structs::root::nsCSSValueSharedList>) {
use gecko_bindings::sugar::refptr::RefPtr;
unsafe { output.clear() };
let list = unsafe {
RefPtr::from_addrefed(bindings::Gecko_NewCSSValueSharedList(input.len() as u32))
};
let value_list = unsafe { list.mHead.as_mut() };
if let Some(value_list) = value_list {
for (gecko, servo) in value_list.into_iter().zip(input.into_iter()) {
Self::set_single_transform_function(servo, gecko);
}
}
unsafe { output.set_move(list) };
}
pub fn set_transform(&mut self, other: longhands::transform::computed_value::T) {
let vec = if let Some(v) = other.0 {
v
} else {
unsafe {
self.gecko.mSpecifiedTransform.clear();
}
return;
};
Self::convert_transform(&vec, &mut self.gecko.mSpecifiedTransform);
}
pub fn copy_transform_from(&mut self, other: &Self) {
unsafe { self.gecko.mSpecifiedTransform.set(&other.gecko.mSpecifiedTransform); }
}
pub fn reset_transform(&mut self, other: &Self) {
self.copy_transform_from(other)
}
<%def name="computed_operation_arm(name, keyword, items)">
<%
# %s is substituted with the call to GetArrayItem.
css_value_getters = {
"length" : "Length::new(bindings::Gecko_CSSValue_GetNumber(%s))",
"lop" : "%s.get_lop()",
"angle" : "%s.get_angle()",
"number" : "bindings::Gecko_CSSValue_GetNumber(%s)",
"percentage" : "Percentage(bindings::Gecko_CSSValue_GetPercentage(%s))",
"percentage_to_integer" : "bindings::Gecko_CSSValue_GetPercentage(%s) as i32",
"list" : "TransformList(Some(convert_shared_list_to_operations(%s)))",
}
pre_symbols = "("
post_symbols = ")"
if keyword == "interpolatematrix" or keyword == "accumulatematrix":
# We generate this like: "ComputedOperation::InterpolateMatrix {", so the space is
# between "InterpolateMatrix"/"AccumulateMatrix" and '{'
pre_symbols = " {"
post_symbols = "}"
elif keyword == "matrix3d":
pre_symbols = "(ComputedMatrix {"
post_symbols = "})"
field_names = None
if keyword == "interpolatematrix":
field_names = ["from_list", "to_list", "progress"]
elif keyword == "accumulatematrix":
field_names = ["from_list", "to_list", "count"]
%>
structs::nsCSSKeyword::eCSSKeyword_${keyword} => {
ComputedOperation::${name}${pre_symbols}
% for index, item in enumerate(items):
% if keyword == "matrix3d":
m${index / 4 + 1}${index % 4 + 1}:
% elif keyword == "interpolatematrix" or keyword == "accumulatematrix":
${field_names[index]}:
% endif
${css_value_getters[item] % (
"bindings::Gecko_CSSValue_GetArrayItemConst(gecko_value, %d)" % (index + 1)
)},
% endfor
${post_symbols}
},
</%def>
fn clone_single_transform_function(gecko_value: &structs::nsCSSValue)
-> longhands::transform::computed_value::ComputedOperation {
use properties::longhands::transform::computed_value::ComputedMatrix;
use properties::longhands::transform::computed_value::ComputedOperation;
use properties::longhands::transform::computed_value::T as TransformList;
use values::computed::{Length, Percentage};
let convert_shared_list_to_operations = |value: &structs::nsCSSValue|
-> Vec<ComputedOperation> {
debug_assert!(value.mUnit == structs::nsCSSUnit::eCSSUnit_SharedList);
let value_list = unsafe {
value.mValue.mSharedList.as_ref()
.as_mut().expect("List pointer should be non-null").mHead.as_ref()
};
debug_assert!(value_list.is_some(), "An empty shared list is not allowed");
value_list.unwrap().into_iter()
.map(|item| Self::clone_single_transform_function(item))
.collect()
};
let transform_function = unsafe {
bindings::Gecko_CSSValue_GetKeyword(bindings::Gecko_CSSValue_GetArrayItemConst(gecko_value, 0))
};
unsafe {
match transform_function {
${computed_operation_arm("Matrix", "matrix3d", ["number"] * 16)}
${computed_operation_arm("Skew", "skew", ["angle"] * 2)}
${computed_operation_arm("Translate", "translate3d", ["lop", "lop", "length"])}
${computed_operation_arm("Scale", "scale3d", ["number"] * 3)}
${computed_operation_arm("Rotate", "rotate3d", ["number"] * 3 + ["angle"])}
${computed_operation_arm("Perspective", "perspective", ["length"])}
${computed_operation_arm("InterpolateMatrix", "interpolatematrix",
["list"] * 2 + ["percentage"])}
${computed_operation_arm("AccumulateMatrix", "accumulatematrix",
["list"] * 2 + ["percentage_to_integer"])}
_ => panic!("We shouldn't set any other transform function types"),
}
}
}
pub fn clone_transform(&self) -> longhands::transform::computed_value::T {
use properties::longhands::transform::computed_value;
if self.gecko.mSpecifiedTransform.mRawPtr.is_null() {
return computed_value::T(None);
}
let list = unsafe { (*self.gecko.mSpecifiedTransform.to_safe().get()).mHead.as_ref() };
let result = list.map(|list| {
list.into_iter()
.map(|value| Self::clone_single_transform_function(value))
.collect()
});
computed_value::T(result)
}
${impl_transition_time_value('delay', 'Delay')}
${impl_transition_time_value('duration', 'Duration')}
${impl_transition_timing_function()}
pub fn transition_combined_duration_at(&self, index: usize) -> f32 {
// https://drafts.csswg.org/css-transitions/#transition-combined-duration
self.gecko.mTransitions[index].mDuration.max(0.0) + self.gecko.mTransitions[index].mDelay
}
pub fn set_transition_property<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::transition_property::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
use gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_no_properties;
let v = v.into_iter();
if v.len() != 0 {
unsafe { self.gecko.mTransitions.ensure_len(v.len()) };
self.gecko.mTransitionPropertyCount = v.len() as u32;
for (servo, gecko) in v.zip(self.gecko.mTransitions.iter_mut()) {
match servo {
TransitionProperty::Unsupported(ref ident) => unsafe {
Gecko_StyleTransition_SetUnsupportedProperty(gecko, ident.0.as_ptr())
},
_ => gecko.mProperty = (&servo).into(),
}
}
} else {
// In gecko |none| is represented by eCSSPropertyExtra_no_properties.
self.gecko.mTransitionPropertyCount = 1;
self.gecko.mTransitions[0].mProperty = eCSSPropertyExtra_no_properties;
}
}
/// Returns whether there are any transitions specified.
pub fn specifies_transitions(&self) -> bool {
use gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_all_properties;
if self.gecko.mTransitionPropertyCount == 1 &&
self.gecko.mTransitions[0].mProperty == eCSSPropertyExtra_all_properties &&
self.gecko.mTransitions[0].mDuration.max(0.0) + self.gecko.mTransitions[0].mDelay <= 0.0f32 {
return false;
}
self.gecko.mTransitionPropertyCount > 0
}
pub fn transition_property_at(&self, index: usize)
-> longhands::transition_property::computed_value::SingleComputedValue {
use gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_no_properties;
use gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_variable;
use gecko_bindings::structs::nsCSSPropertyID::eCSSProperty_UNKNOWN;
use gecko_bindings::structs::nsIAtom;
let property = self.gecko.mTransitions[index].mProperty;
if property == eCSSProperty_UNKNOWN || property == eCSSPropertyExtra_variable {
let atom = self.gecko.mTransitions[index].mUnknownProperty.raw::<nsIAtom>();
debug_assert!(!atom.is_null());
TransitionProperty::Unsupported(CustomIdent(atom.into()))
} else if property == eCSSPropertyExtra_no_properties {
// Actually, we don't expect TransitionProperty::Unsupported also represents "none",
// but if the caller wants to convert it, it is fine. Please use it carefully.
TransitionProperty::Unsupported(CustomIdent(atom!("none")))
} else {
property.into()
}
}
pub fn transition_nscsspropertyid_at(&self, index: usize) -> nsCSSPropertyID {
self.gecko.mTransitions[index].mProperty
}
pub fn copy_transition_property_from(&mut self, other: &Self) {
use gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_variable;
use gecko_bindings::structs::nsCSSPropertyID::eCSSProperty_UNKNOWN;
use gecko_bindings::structs::nsIAtom;
unsafe { self.gecko.mTransitions.ensure_len(other.gecko.mTransitions.len()) };
let count = other.gecko.mTransitionPropertyCount;
self.gecko.mTransitionPropertyCount = count;
for (index, transition) in self.gecko.mTransitions.iter_mut().enumerate().take(count as usize) {
transition.mProperty = other.gecko.mTransitions[index].mProperty;
if transition.mProperty == eCSSProperty_UNKNOWN ||
transition.mProperty == eCSSPropertyExtra_variable {
let atom = other.gecko.mTransitions[index].mUnknownProperty.raw::<nsIAtom>();
debug_assert!(!atom.is_null());
unsafe { Gecko_StyleTransition_SetUnsupportedProperty(transition, atom) };
}
}
}
pub fn reset_transition_property(&mut self, other: &Self) {
self.copy_transition_property_from(other)
}
${impl_transition_count('property', 'Property')}
pub fn animations_equals(&self, other: &Self) -> bool {
unsafe { bindings::Gecko_StyleAnimationsEquals(&self.gecko.mAnimations, &other.gecko.mAnimations) }
}
pub fn set_animation_name<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::animation_name::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
let v = v.into_iter();
debug_assert!(v.len() != 0);
unsafe { self.gecko.mAnimations.ensure_len(v.len()) };
self.gecko.mAnimationNameCount = v.len() as u32;
for (servo, gecko) in v.zip(self.gecko.mAnimations.iter_mut()) {
// TODO This is inefficient. We should fix this in bug 1329169.
gecko.mName.assign(match servo.0 {
Some(ref name) => name.as_atom().as_slice(),
None => &[], // Empty string for 'none'
});
}
}
pub fn animation_name_at(&self, index: usize)
-> longhands::animation_name::computed_value::SingleComputedValue {
use properties::longhands::animation_name::single_value::SpecifiedValue as AnimationName;
// XXX: Is there any effective ways?
let atom = &self.gecko.mAnimations[index].mName;
if atom.is_empty() {
AnimationName(None)
} else {
AnimationName(Some(KeyframesName::from_ident(&atom.to_string())))
}
}
pub fn copy_animation_name_from(&mut self, other: &Self) {
unsafe { self.gecko.mAnimations.ensure_len(other.gecko.mAnimations.len()) };
let count = other.gecko.mAnimationNameCount;
self.gecko.mAnimationNameCount = count;
// The length of mAnimations is often greater than mAnimationXXCount,
// don't copy values over the count.
for (index, animation) in self.gecko.mAnimations.iter_mut().enumerate().take(count as usize) {
animation.mName.assign(&*other.gecko.mAnimations[index].mName);
}
}
pub fn reset_animation_name(&mut self, other: &Self) {
self.copy_animation_name_from(other)
}
${impl_animation_count('name', 'Name')}
${impl_animation_time_value('delay', 'Delay')}
${impl_animation_time_value('duration', 'Duration')}
${impl_animation_keyword('direction', 'Direction',
data.longhands_by_name["animation-direction"].keyword)}
${impl_animation_keyword('fill_mode', 'FillMode',
data.longhands_by_name["animation-fill-mode"].keyword)}
${impl_animation_keyword('play_state', 'PlayState',
data.longhands_by_name["animation-play-state"].keyword)}
pub fn set_animation_iteration_count<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::animation_iteration_count::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator + Clone
{
use std::f32;
use properties::longhands::animation_iteration_count::single_value::SpecifiedValue as AnimationIterationCount;
let v = v.into_iter();
debug_assert!(v.len() != 0);
let input_len = v.len();
unsafe { self.gecko.mAnimations.ensure_len(input_len) };
self.gecko.mAnimationIterationCountCount = input_len as u32;
for (gecko, servo) in self.gecko.mAnimations.iter_mut().zip(v.cycle()) {
match servo {
AnimationIterationCount::Number(n) => gecko.mIterationCount = n,
AnimationIterationCount::Infinite => gecko.mIterationCount = f32::INFINITY,
}
}
}
pub fn animation_iteration_count_at(&self, index: usize)
-> longhands::animation_iteration_count::computed_value::SingleComputedValue {
use properties::longhands::animation_iteration_count::single_value::computed_value::T
as AnimationIterationCount;<|fim▁hole|> AnimationIterationCount::Number(self.gecko.mAnimations[index].mIterationCount)
}
}
${impl_animation_count('iteration_count', 'IterationCount')}
${impl_copy_animation_value('iteration_count', 'IterationCount')}
${impl_animation_timing_function()}
<% scroll_snap_type_keyword = Keyword("scroll-snap-type", "none mandatory proximity") %>
${impl_keyword('scroll_snap_type_y', 'mScrollSnapTypeY', scroll_snap_type_keyword)}
pub fn set_perspective_origin(&mut self, v: longhands::perspective_origin::computed_value::T) {
self.gecko.mPerspectiveOrigin[0].set(v.horizontal);
self.gecko.mPerspectiveOrigin[1].set(v.vertical);
}
pub fn copy_perspective_origin_from(&mut self, other: &Self) {
self.gecko.mPerspectiveOrigin[0].copy_from(&other.gecko.mPerspectiveOrigin[0]);
self.gecko.mPerspectiveOrigin[1].copy_from(&other.gecko.mPerspectiveOrigin[1]);
}
pub fn reset_perspective_origin(&mut self, other: &Self) {
self.copy_perspective_origin_from(other)
}
pub fn clone_perspective_origin(&self) -> longhands::perspective_origin::computed_value::T {
use properties::longhands::perspective_origin::computed_value::T;
use values::computed::LengthOrPercentage;
T {
horizontal: LengthOrPercentage::from_gecko_style_coord(&self.gecko.mPerspectiveOrigin[0])
.expect("Expected length or percentage for horizontal value of perspective-origin"),
vertical: LengthOrPercentage::from_gecko_style_coord(&self.gecko.mPerspectiveOrigin[1])
.expect("Expected length or percentage for vertical value of perspective-origin"),
}
}
pub fn set_transform_origin(&mut self, v: longhands::transform_origin::computed_value::T) {
self.gecko.mTransformOrigin[0].set(v.horizontal);
self.gecko.mTransformOrigin[1].set(v.vertical);
self.gecko.mTransformOrigin[2].set(v.depth);
}
pub fn copy_transform_origin_from(&mut self, other: &Self) {
self.gecko.mTransformOrigin[0].copy_from(&other.gecko.mTransformOrigin[0]);
self.gecko.mTransformOrigin[1].copy_from(&other.gecko.mTransformOrigin[1]);
self.gecko.mTransformOrigin[2].copy_from(&other.gecko.mTransformOrigin[2]);
}
pub fn reset_transform_origin(&mut self, other: &Self) {
self.copy_transform_origin_from(other)
}
pub fn clone_transform_origin(&self) -> longhands::transform_origin::computed_value::T {
use properties::longhands::transform_origin::computed_value::T;
use values::computed::{Length, LengthOrPercentage};
T {
horizontal: LengthOrPercentage::from_gecko_style_coord(&self.gecko.mTransformOrigin[0])
.expect("clone for LengthOrPercentage failed"),
vertical: LengthOrPercentage::from_gecko_style_coord(&self.gecko.mTransformOrigin[1])
.expect("clone for LengthOrPercentage failed"),
depth: Length::from_gecko_style_coord(&self.gecko.mTransformOrigin[2])
.expect("clone for Length failed"),
}
}
pub fn set_will_change(&mut self, v: longhands::will_change::computed_value::T) {
use gecko_bindings::bindings::{Gecko_AppendWillChange, Gecko_ClearWillChange};
use gecko_bindings::structs::NS_STYLE_WILL_CHANGE_OPACITY;
use gecko_bindings::structs::NS_STYLE_WILL_CHANGE_SCROLL;
use gecko_bindings::structs::NS_STYLE_WILL_CHANGE_TRANSFORM;
use properties::PropertyId;
use properties::longhands::will_change::computed_value::T;
fn will_change_bitfield_from_prop_flags(prop: &LonghandId) -> u8 {
use properties::{ABSPOS_CB, CREATES_STACKING_CONTEXT, FIXPOS_CB};
use gecko_bindings::structs::NS_STYLE_WILL_CHANGE_ABSPOS_CB;
use gecko_bindings::structs::NS_STYLE_WILL_CHANGE_FIXPOS_CB;
use gecko_bindings::structs::NS_STYLE_WILL_CHANGE_STACKING_CONTEXT;
let servo_flags = prop.flags();
let mut bitfield = 0;
if servo_flags.contains(CREATES_STACKING_CONTEXT) {
bitfield |= NS_STYLE_WILL_CHANGE_STACKING_CONTEXT;
}
if servo_flags.contains(FIXPOS_CB) {
bitfield |= NS_STYLE_WILL_CHANGE_FIXPOS_CB;
}
if servo_flags.contains(ABSPOS_CB) {
bitfield |= NS_STYLE_WILL_CHANGE_ABSPOS_CB;
}
bitfield as u8
}
self.gecko.mWillChangeBitField = 0;
match v {
T::AnimateableFeatures(features) => {
unsafe {
Gecko_ClearWillChange(&mut self.gecko, features.len());
}
for feature in features.iter() {
if feature.0 == atom!("scroll-position") {
self.gecko.mWillChangeBitField |= NS_STYLE_WILL_CHANGE_SCROLL as u8;
} else if feature.0 == atom!("opacity") {
self.gecko.mWillChangeBitField |= NS_STYLE_WILL_CHANGE_OPACITY as u8;
} else if feature.0 == atom!("transform") {
self.gecko.mWillChangeBitField |= NS_STYLE_WILL_CHANGE_TRANSFORM as u8;
}
unsafe {
Gecko_AppendWillChange(&mut self.gecko, feature.0.as_ptr());
}
if let Ok(prop_id) = PropertyId::parse(&feature.0.to_string(), None) {
match prop_id.as_shorthand() {
Ok(shorthand) => {
for longhand in shorthand.longhands() {
self.gecko.mWillChangeBitField |=
will_change_bitfield_from_prop_flags(longhand);
}
},
Err(longhand_or_custom) => {
if let PropertyDeclarationId::Longhand(longhand)
= longhand_or_custom {
self.gecko.mWillChangeBitField |=
will_change_bitfield_from_prop_flags(&longhand);
}
},
}
}
}
},
T::Auto => {
unsafe {
Gecko_ClearWillChange(&mut self.gecko, 0);
}
},
};
}
pub fn copy_will_change_from(&mut self, other: &Self) {
use gecko_bindings::bindings::Gecko_CopyWillChangeFrom;
self.gecko.mWillChangeBitField = other.gecko.mWillChangeBitField;
unsafe {
Gecko_CopyWillChangeFrom(&mut self.gecko, &other.gecko as *const _ as *mut _);
}
}
pub fn reset_will_change(&mut self, other: &Self) {
self.copy_will_change_from(other)
}
pub fn clone_will_change(&self) -> longhands::will_change::computed_value::T {
use properties::longhands::will_change::computed_value::T;
use gecko_bindings::structs::nsIAtom;
use gecko_string_cache::Atom;
use values::CustomIdent;
if self.gecko.mWillChange.mBuffer.len() == 0 {
T::Auto
} else {
T::AnimateableFeatures(
self.gecko.mWillChange.mBuffer.iter().map(|gecko_atom| {
CustomIdent(
unsafe { Atom::from_addrefed(*gecko_atom as *mut nsIAtom) }
)
}).collect()
)
}
}
<% impl_shape_source("shape_outside", "mShapeOutside") %>
pub fn set_contain(&mut self, v: longhands::contain::computed_value::T) {
use gecko_bindings::structs::NS_STYLE_CONTAIN_NONE;
use gecko_bindings::structs::NS_STYLE_CONTAIN_STRICT;
use gecko_bindings::structs::NS_STYLE_CONTAIN_LAYOUT;
use gecko_bindings::structs::NS_STYLE_CONTAIN_STYLE;
use gecko_bindings::structs::NS_STYLE_CONTAIN_PAINT;
use gecko_bindings::structs::NS_STYLE_CONTAIN_ALL_BITS;
use properties::longhands::contain;
if v.is_empty() {
self.gecko.mContain = NS_STYLE_CONTAIN_NONE as u8;
return;
}
if v.contains(contain::STRICT) {
self.gecko.mContain = (NS_STYLE_CONTAIN_STRICT | NS_STYLE_CONTAIN_ALL_BITS) as u8;
return;
}
let mut bitfield = 0;
if v.contains(contain::LAYOUT) {
bitfield |= NS_STYLE_CONTAIN_LAYOUT;
}
if v.contains(contain::STYLE) {
bitfield |= NS_STYLE_CONTAIN_STYLE;
}
if v.contains(contain::PAINT) {
bitfield |= NS_STYLE_CONTAIN_PAINT;
}
self.gecko.mContain = bitfield as u8;
}
pub fn clone_contain(&self) -> longhands::contain::computed_value::T {
use gecko_bindings::structs::NS_STYLE_CONTAIN_STRICT;
use gecko_bindings::structs::NS_STYLE_CONTAIN_LAYOUT;
use gecko_bindings::structs::NS_STYLE_CONTAIN_STYLE;
use gecko_bindings::structs::NS_STYLE_CONTAIN_PAINT;
use gecko_bindings::structs::NS_STYLE_CONTAIN_ALL_BITS;
use properties::longhands::contain;
let mut servo_flags = contain::computed_value::T::empty();
let gecko_flags = self.gecko.mContain;
if gecko_flags & (NS_STYLE_CONTAIN_STRICT as u8) != 0 &&
gecko_flags & (NS_STYLE_CONTAIN_ALL_BITS as u8) != 0 {
servo_flags.insert(contain::STRICT | contain::STRICT_BITS);
return servo_flags;
}
if gecko_flags & (NS_STYLE_CONTAIN_LAYOUT as u8) != 0 {
servo_flags.insert(contain::LAYOUT);
}
if gecko_flags & (NS_STYLE_CONTAIN_STYLE as u8) != 0{
servo_flags.insert(contain::STYLE);
}
if gecko_flags & (NS_STYLE_CONTAIN_PAINT as u8) != 0 {
servo_flags.insert(contain::PAINT);
}
return servo_flags;
}
${impl_simple_copy("contain", "mContain")}
${impl_simple_type_with_conversion("touch_action")}
</%self:impl_trait>
<%def name="simple_image_array_property(name, shorthand, field_name)">
<%
image_layers_field = "mImage" if shorthand == "background" else "mMask"
copy_simple_image_array_property(name, shorthand, image_layers_field, field_name)
%>
pub fn set_${shorthand}_${name}<I>(&mut self, v: I)
where I: IntoIterator<Item=longhands::${shorthand}_${name}::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let v = v.into_iter();
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field}, v.len(),
LayerType::${shorthand.title()});
}
self.gecko.${image_layers_field}.${field_name}Count = v.len() as u32;
for (servo, geckolayer) in v.zip(self.gecko.${image_layers_field}.mLayers.iter_mut()) {
geckolayer.${field_name} = {
${caller.body()}
};
}
}
</%def>
<%def name="copy_simple_image_array_property(name, shorthand, layers_field_name, field_name)">
pub fn copy_${shorthand}_${name}_from(&mut self, other: &Self) {
use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let count = other.gecko.${layers_field_name}.${field_name}Count;
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${layers_field_name},
count as usize,
LayerType::${shorthand.title()});
}
for (layer, other) in self.gecko.${layers_field_name}.mLayers.iter_mut()
.zip(other.gecko.${layers_field_name}.mLayers.iter())
.take(count as usize) {
layer.${field_name} = other.${field_name};
}
self.gecko.${layers_field_name}.${field_name}Count = count;
}
pub fn reset_${shorthand}_${name}(&mut self, other: &Self) {
self.copy_${shorthand}_${name}_from(other)
}
</%def>
<%def name="impl_simple_image_array_property(name, shorthand, layer_field_name, field_name, struct_name)">
<%
ident = "%s_%s" % (shorthand, name)
style_struct = next(x for x in data.style_structs if x.name == struct_name)
longhand = next(x for x in style_struct.longhands if x.ident == ident)
keyword = longhand.keyword
%>
<% copy_simple_image_array_property(name, shorthand, layer_field_name, field_name) %>
pub fn set_${ident}<I>(&mut self, v: I)
where I: IntoIterator<Item=longhands::${ident}::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
use properties::longhands::${ident}::single_value::computed_value::T as Keyword;
use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let v = v.into_iter();
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${layer_field_name}, v.len(),
LayerType::${shorthand.title()});
}
self.gecko.${layer_field_name}.${field_name}Count = v.len() as u32;
for (servo, geckolayer) in v.zip(self.gecko.${layer_field_name}.mLayers.iter_mut()) {
geckolayer.${field_name} = {
match servo {
% for value in keyword.values_for("gecko"):
Keyword::${to_rust_ident(value)} =>
structs::${keyword.gecko_constant(value)} ${keyword.maybe_cast('u8')},
% endfor
}
};
}
}
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use properties::longhands::${ident}::single_value::computed_value::T as Keyword;
% if keyword.needs_cast():
% for value in keyword.values_for('gecko'):
const ${keyword.casted_constant_name(value, "u8")} : u8 =
structs::${keyword.gecko_constant(value)} as u8;
% endfor
% endif
longhands::${ident}::computed_value::T (
self.gecko.${layer_field_name}.mLayers.iter()
.take(self.gecko.${layer_field_name}.${field_name}Count as usize)
.map(|ref layer| {
match layer.${field_name} {
% for value in longhand.keyword.values_for("gecko"):
% if keyword.needs_cast():
${keyword.casted_constant_name(value, "u8")}
% else:
structs::${keyword.gecko_constant(value)}
% endif
=> Keyword::${to_rust_ident(value)},
% endfor
x => panic!("Found unexpected value in style struct for ${ident} property: {:?}", x),
}
}).collect()
)
}
</%def>
<%def name="impl_common_image_layer_properties(shorthand)">
<%
if shorthand == "background":
image_layers_field = "mImage"
struct_name = "Background"
else:
image_layers_field = "mMask"
struct_name = "SVG"
%>
<%self:simple_image_array_property name="repeat" shorthand="${shorthand}" field_name="mRepeat">
use properties::longhands::${shorthand}_repeat::single_value::computed_value::RepeatKeyword;
use gecko_bindings::structs::nsStyleImageLayers_Repeat;
use gecko_bindings::structs::StyleImageLayerRepeat;
fn to_ns(repeat: RepeatKeyword) -> StyleImageLayerRepeat {
match repeat {
RepeatKeyword::Repeat => StyleImageLayerRepeat::Repeat,
RepeatKeyword::Space => StyleImageLayerRepeat::Space,
RepeatKeyword::Round => StyleImageLayerRepeat::Round,
RepeatKeyword::NoRepeat => StyleImageLayerRepeat::NoRepeat,
}
}
let repeat_x = to_ns(servo.0);
let repeat_y = to_ns(servo.1);
nsStyleImageLayers_Repeat {
mXRepeat: repeat_x,
mYRepeat: repeat_y,
}
</%self:simple_image_array_property>
pub fn clone_${shorthand}_repeat(&self) -> longhands::${shorthand}_repeat::computed_value::T {
use properties::longhands::${shorthand}_repeat::single_value::computed_value::T;
use properties::longhands::${shorthand}_repeat::single_value::computed_value::RepeatKeyword;
use gecko_bindings::structs::StyleImageLayerRepeat;
fn to_servo(repeat: StyleImageLayerRepeat) -> RepeatKeyword {
match repeat {
StyleImageLayerRepeat::Repeat => RepeatKeyword::Repeat,
StyleImageLayerRepeat::Space => RepeatKeyword::Space,
StyleImageLayerRepeat::Round => RepeatKeyword::Round,
StyleImageLayerRepeat::NoRepeat => RepeatKeyword::NoRepeat,
x => panic!("Found unexpected value in style struct for ${shorthand}_repeat property: {:?}", x),
}
}
longhands::${shorthand}_repeat::computed_value::T (
self.gecko.${image_layers_field}.mLayers.iter()
.take(self.gecko.${image_layers_field}.mRepeatCount as usize)
.map(|ref layer| {
T(to_servo(layer.mRepeat.mXRepeat), to_servo(layer.mRepeat.mYRepeat))
}).collect()
)
}
<% impl_simple_image_array_property("clip", shorthand, image_layers_field, "mClip", struct_name) %>
<% impl_simple_image_array_property("origin", shorthand, image_layers_field, "mOrigin", struct_name) %>
% for orientation in ["x", "y"]:
pub fn copy_${shorthand}_position_${orientation}_from(&mut self, other: &Self) {
use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let count = other.gecko.${image_layers_field}.mPosition${orientation.upper()}Count;
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field},
count as usize,
LayerType::${shorthand.capitalize()});
}
for (layer, other) in self.gecko.${image_layers_field}.mLayers.iter_mut()
.zip(other.gecko.${image_layers_field}.mLayers.iter())
.take(count as usize) {
layer.mPosition.m${orientation.upper()}Position
= other.mPosition.m${orientation.upper()}Position;
}
self.gecko.${image_layers_field}.mPosition${orientation.upper()}Count = count;
}
pub fn reset_${shorthand}_position_${orientation}(&mut self, other: &Self) {
self.copy_${shorthand}_position_${orientation}_from(other)
}
pub fn clone_${shorthand}_position_${orientation}(&self)
-> longhands::${shorthand}_position_${orientation}::computed_value::T {
longhands::${shorthand}_position_${orientation}::computed_value::T(
self.gecko.${image_layers_field}.mLayers.iter()
.take(self.gecko.${image_layers_field}.mPosition${orientation.upper()}Count as usize)
.map(|position| position.mPosition.m${orientation.upper()}Position.into())
.collect()
)
}
pub fn set_${shorthand}_position_${orientation[0]}<I>(&mut self,
v: I)
where I: IntoIterator<Item = longhands::${shorthand}_position_${orientation[0]}
::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let v = v.into_iter();
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field}, v.len(),
LayerType::${shorthand.capitalize()});
}
self.gecko.${image_layers_field}.mPosition${orientation[0].upper()}Count = v.len() as u32;
for (servo, geckolayer) in v.zip(self.gecko.${image_layers_field}
.mLayers.iter_mut()) {
geckolayer.mPosition.m${orientation[0].upper()}Position = servo.into();
}
}
% endfor
<%self:simple_image_array_property name="size" shorthand="${shorthand}" field_name="mSize">
use gecko_bindings::structs::nsStyleImageLayers_Size_Dimension;
use gecko_bindings::structs::nsStyleImageLayers_Size_DimensionType;
use gecko_bindings::structs::{nsStyleCoord_CalcValue, nsStyleImageLayers_Size};
use values::generics::background::BackgroundSize;
let mut width = nsStyleCoord_CalcValue::new();
let mut height = nsStyleCoord_CalcValue::new();
let (w_type, h_type) = match servo {
BackgroundSize::Explicit { width: explicit_width, height: explicit_height } => {
let mut w_type = nsStyleImageLayers_Size_DimensionType::eAuto;
let mut h_type = nsStyleImageLayers_Size_DimensionType::eAuto;
if let Some(w) = explicit_width.to_calc_value() {
width = w;
w_type = nsStyleImageLayers_Size_DimensionType::eLengthPercentage;
}
if let Some(h) = explicit_height.to_calc_value() {
height = h;
h_type = nsStyleImageLayers_Size_DimensionType::eLengthPercentage;
}
(w_type, h_type)
}
BackgroundSize::Cover => {
(
nsStyleImageLayers_Size_DimensionType::eCover,
nsStyleImageLayers_Size_DimensionType::eCover,
)
},
BackgroundSize::Contain => {
(
nsStyleImageLayers_Size_DimensionType::eContain,
nsStyleImageLayers_Size_DimensionType::eContain,
)
},
};
nsStyleImageLayers_Size {
mWidth: nsStyleImageLayers_Size_Dimension { _base: width },
mHeight: nsStyleImageLayers_Size_Dimension { _base: height },
mWidthType: w_type as u8,
mHeightType: h_type as u8,
}
</%self:simple_image_array_property>
pub fn clone_${shorthand}_size(&self) -> longhands::background_size::computed_value::T {
use gecko_bindings::structs::nsStyleCoord_CalcValue as CalcValue;
use gecko_bindings::structs::nsStyleImageLayers_Size_DimensionType as DimensionType;
use values::computed::LengthOrPercentageOrAuto;
use values::generics::background::BackgroundSize;
fn to_servo(value: CalcValue, ty: u8) -> LengthOrPercentageOrAuto {
if ty == DimensionType::eAuto as u8 {
LengthOrPercentageOrAuto::Auto
} else {
debug_assert!(ty == DimensionType::eLengthPercentage as u8);
value.into()
}
}
longhands::background_size::computed_value::T(
self.gecko.${image_layers_field}.mLayers.iter().map(|ref layer| {
if DimensionType::eCover as u8 == layer.mSize.mWidthType {
debug_assert!(layer.mSize.mHeightType == DimensionType::eCover as u8);
return BackgroundSize::Cover
}
if DimensionType::eContain as u8 == layer.mSize.mWidthType {
debug_assert!(layer.mSize.mHeightType == DimensionType::eContain as u8);
return BackgroundSize::Contain
}
BackgroundSize::Explicit {
width: to_servo(layer.mSize.mWidth._base, layer.mSize.mWidthType),
height: to_servo(layer.mSize.mHeight._base, layer.mSize.mHeightType),
}
}).collect()
)
}
pub fn copy_${shorthand}_image_from(&mut self, other: &Self) {
use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
unsafe {
let count = other.gecko.${image_layers_field}.mImageCount;
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field},
count as usize,
LayerType::${shorthand.capitalize()});
}
for (layer, other) in self.gecko.${image_layers_field}.mLayers.iter_mut()
.zip(other.gecko.${image_layers_field}.mLayers.iter())
.take(count as usize) {
Gecko_CopyImageValueFrom(&mut layer.mImage, &other.mImage);
}
self.gecko.${image_layers_field}.mImageCount = count;
}
}
pub fn reset_${shorthand}_image(&mut self, other: &Self) {
self.copy_${shorthand}_image_from(other)
}
#[allow(unused_variables)]
pub fn set_${shorthand}_image<I>(&mut self, images: I)
where I: IntoIterator<Item = longhands::${shorthand}_image::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let images = images.into_iter();
unsafe {
// Prevent leaking of the last elements we did set
for image in &mut self.gecko.${image_layers_field}.mLayers {
Gecko_SetNullImageValue(&mut image.mImage)
}
// XXXManishearth clear mSourceURI for masks
Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field}, images.len(),
LayerType::${shorthand.title()});
}
self.gecko.${image_layers_field}.mImageCount = images.len() as u32;
for (image, geckoimage) in images.zip(self.gecko.${image_layers_field}
.mLayers.iter_mut()) {
if let Either::Second(image) = image {
geckoimage.mImage.set(image)
}
}
}
pub fn clone_${shorthand}_image(&self) -> longhands::${shorthand}_image::computed_value::T {
use values::None_;
longhands::${shorthand}_image::computed_value::T(
self.gecko.${image_layers_field}.mLayers.iter()
.take(self.gecko.${image_layers_field}.mImageCount as usize)
.map(|ref layer| {
match unsafe { layer.mImage.into_image() } {
Some(image) => Either::Second(image),
None => Either::First(None_),
}
}).collect()
)
}
<%
fill_fields = "mRepeat mClip mOrigin mPositionX mPositionY mImage mSize"
if shorthand == "background":
fill_fields += " mAttachment mBlendMode"
else:
# mSourceURI uses mImageCount
fill_fields += " mMaskMode mComposite"
%>
pub fn fill_arrays(&mut self) {
use gecko_bindings::bindings::Gecko_FillAll${shorthand.title()}Lists;
use std::cmp;
let mut max_len = 1;
% for member in fill_fields.split():
max_len = cmp::max(max_len, self.gecko.${image_layers_field}.${member}Count);
% endfor
unsafe {
// While we could do this manually, we'd need to also manually
// run all the copy constructors, so we just delegate to gecko
Gecko_FillAll${shorthand.title()}Lists(&mut self.gecko.${image_layers_field}, max_len);
}
}
</%def>
// TODO: Gecko accepts lists in most background-related properties. We just use
// the first element (which is the common case), but at some point we want to
// add support for parsing these lists in servo and pushing to nsTArray's.
<% skip_background_longhands = """background-repeat
background-image background-clip
background-origin background-attachment
background-size background-position
background-blend-mode
background-position-x
background-position-y""" %>
<%self:impl_trait style_struct_name="Background"
skip_longhands="${skip_background_longhands}">
<% impl_common_image_layer_properties("background") %>
<% impl_simple_image_array_property("attachment", "background", "mImage", "mAttachment", "Background") %>
<% impl_simple_image_array_property("blend_mode", "background", "mImage", "mBlendMode", "Background") %>
</%self:impl_trait>
<%self:impl_trait style_struct_name="List"
skip_longhands="list-style-image list-style-type quotes -moz-image-region">
pub fn set_list_style_image(&mut self, image: longhands::list_style_image::computed_value::T) {
use values::Either;
match image {
longhands::list_style_image::computed_value::T(Either::Second(_none)) => {
unsafe {
Gecko_SetListStyleImageNone(&mut self.gecko);
}
}
longhands::list_style_image::computed_value::T(Either::First(ref url)) => {
unsafe {
Gecko_SetListStyleImageImageValue(&mut self.gecko,
url.image_value.clone().unwrap().get());
}
// We don't need to record this struct as uncacheable, like when setting
// background-image to a url() value, since only properties in reset structs
// are re-used from the applicable declaration cache, and the List struct
// is an inherited struct.
}
}
}
pub fn copy_list_style_image_from(&mut self, other: &Self) {
unsafe { Gecko_CopyListStyleImageFrom(&mut self.gecko, &other.gecko); }
}
pub fn reset_list_style_image(&mut self, other: &Self) {
self.copy_list_style_image_from(other)
}
pub fn clone_list_style_image(&self) -> longhands::list_style_image::computed_value::T {
use values::specified::url::SpecifiedUrl;
use values::{Either, None_};
longhands::list_style_image::computed_value::T(
match self.gecko.mListStyleImage.mRawPtr.is_null() {
true => Either::Second(None_),
false => {
unsafe {
let ref gecko_image_request = *self.gecko.mListStyleImage.mRawPtr;
Either::First(SpecifiedUrl::from_image_request(gecko_image_request)
.expect("mListStyleImage could not convert to SpecifiedUrl"))
}
}
}
)
}
pub fn set_list_style_type(&mut self, v: longhands::list_style_type::computed_value::T, device: &Device) {
use gecko_bindings::bindings::Gecko_SetCounterStyleToString;
use nsstring::{nsACString, nsCString};
use self::longhands::list_style_type::computed_value::T;
match v {
T::CounterStyle(s) => s.to_gecko_value(&mut self.gecko.mCounterStyle, device),
T::String(s) => unsafe {
Gecko_SetCounterStyleToString(&mut self.gecko.mCounterStyle,
&nsCString::from(s) as &nsACString)
}
}
}
pub fn copy_list_style_type_from(&mut self, other: &Self) {
unsafe {
Gecko_CopyCounterStyle(&mut self.gecko.mCounterStyle, &other.gecko.mCounterStyle);
}
}
pub fn reset_list_style_type(&mut self, other: &Self) {
self.copy_list_style_type_from(other)
}
pub fn clone_list_style_type(&self) -> longhands::list_style_type::computed_value::T {
use self::longhands::list_style_type::computed_value::T;
use values::Either;
use values::generics::CounterStyleOrNone;
let result = CounterStyleOrNone::from_gecko_value(&self.gecko.mCounterStyle);
match result {
Either::First(counter_style) => T::CounterStyle(counter_style),
Either::Second(string) => T::String(string),
}
}
pub fn set_quotes(&mut self, other: longhands::quotes::computed_value::T) {
use gecko_bindings::bindings::Gecko_NewStyleQuoteValues;
use gecko_bindings::sugar::refptr::UniqueRefPtr;
let mut refptr = unsafe {
UniqueRefPtr::from_addrefed(Gecko_NewStyleQuoteValues(other.0.len() as u32))
};
for (servo, gecko) in other.0.into_iter().zip(refptr.mQuotePairs.iter_mut()) {
gecko.first.assign_utf8(&servo.0);
gecko.second.assign_utf8(&servo.1);
}
unsafe { self.gecko.mQuotes.set_move(refptr.get()) }
}
pub fn copy_quotes_from(&mut self, other: &Self) {
unsafe { self.gecko.mQuotes.set(&other.gecko.mQuotes); }
}
pub fn reset_quotes(&mut self, other: &Self) {
self.copy_quotes_from(other)
}
pub fn clone_quotes(&self) -> longhands::quotes::computed_value::T {
unsafe {
let ref gecko_quote_values = *self.gecko.mQuotes.mRawPtr;
longhands::quotes::computed_value::T(
gecko_quote_values.mQuotePairs.iter().map(|gecko_pair| {
(gecko_pair.first.to_string(), gecko_pair.second.to_string())
}).collect()
)
}
}
#[allow(non_snake_case)]
pub fn set__moz_image_region(&mut self, v: longhands::_moz_image_region::computed_value::T) {
use values::Either;
match v {
Either::Second(_auto) => {
self.gecko.mImageRegion.x = 0;
self.gecko.mImageRegion.y = 0;
self.gecko.mImageRegion.width = 0;
self.gecko.mImageRegion.height = 0;
}
Either::First(rect) => {
self.gecko.mImageRegion.x = rect.left.map(Au::from).unwrap_or(Au(0)).0;
self.gecko.mImageRegion.y = rect.top.map(Au::from).unwrap_or(Au(0)).0;
self.gecko.mImageRegion.height = match rect.bottom {
Some(value) => (Au::from(value) - Au(self.gecko.mImageRegion.y)).0,
None => 0,
};
self.gecko.mImageRegion.width = match rect.right {
Some(value) => (Au::from(value) - Au(self.gecko.mImageRegion.x)).0,
None => 0,
};
}
}
}
#[allow(non_snake_case)]
pub fn clone__moz_image_region(&self) -> longhands::_moz_image_region::computed_value::T {
use values::{Auto, Either};
use values::computed::ClipRect;
// There is no ideal way to detect auto type for structs::nsRect and its components, so
// if all components are zero, we use Auto.
if self.gecko.mImageRegion.x == 0 &&
self.gecko.mImageRegion.y == 0 &&
self.gecko.mImageRegion.width == 0 &&
self.gecko.mImageRegion.height == 0 {
return Either::Second(Auto);
}
Either::First(ClipRect {
top: Some(Au(self.gecko.mImageRegion.y).into()),
right: Some(Au(self.gecko.mImageRegion.width + self.gecko.mImageRegion.x).into()),
bottom: Some(Au(self.gecko.mImageRegion.height + self.gecko.mImageRegion.y).into()),
left: Some(Au(self.gecko.mImageRegion.x).into()),
})
}
${impl_simple_copy('_moz_image_region', 'mImageRegion')}
</%self:impl_trait>
<%self:impl_trait style_struct_name="Table" skip_longhands="-x-span">
#[allow(non_snake_case)]
pub fn set__x_span(&mut self, v: longhands::_x_span::computed_value::T) {
self.gecko.mSpan = v.0
}
${impl_simple_copy('_x_span', 'mSpan')}
</%self:impl_trait>
<%self:impl_trait style_struct_name="Effects"
skip_longhands="box-shadow clip filter">
pub fn set_box_shadow<I>(&mut self, v: I)
where I: IntoIterator<Item = BoxShadow>,
I::IntoIter: ExactSizeIterator
{
let v = v.into_iter();
self.gecko.mBoxShadow.replace_with_new(v.len() as u32);
for (servo, gecko_shadow) in v.zip(self.gecko.mBoxShadow.iter_mut()) {
gecko_shadow.set_from_box_shadow(servo);
}
}
pub fn copy_box_shadow_from(&mut self, other: &Self) {
self.gecko.mBoxShadow.copy_from(&other.gecko.mBoxShadow);
}
pub fn reset_box_shadow(&mut self, other: &Self) {
self.copy_box_shadow_from(other)
}
pub fn clone_box_shadow(&self) -> longhands::box_shadow::computed_value::T {
let buf = self.gecko.mBoxShadow.iter().map(|v| v.to_box_shadow()).collect();
longhands::box_shadow::computed_value::T(buf)
}
pub fn set_clip(&mut self, v: longhands::clip::computed_value::T) {
use gecko_bindings::structs::NS_STYLE_CLIP_AUTO;
use gecko_bindings::structs::NS_STYLE_CLIP_RECT;
use gecko_bindings::structs::NS_STYLE_CLIP_LEFT_AUTO;
use gecko_bindings::structs::NS_STYLE_CLIP_TOP_AUTO;
use gecko_bindings::structs::NS_STYLE_CLIP_RIGHT_AUTO;
use gecko_bindings::structs::NS_STYLE_CLIP_BOTTOM_AUTO;
use values::Either;
match v {
Either::First(rect) => {
self.gecko.mClipFlags = NS_STYLE_CLIP_RECT as u8;
if let Some(left) = rect.left {
self.gecko.mClip.x = left.to_i32_au();
} else {
self.gecko.mClip.x = 0;
self.gecko.mClipFlags |= NS_STYLE_CLIP_LEFT_AUTO as u8;
}
if let Some(top) = rect.top {
self.gecko.mClip.y = top.to_i32_au();
} else {
self.gecko.mClip.y = 0;
self.gecko.mClipFlags |= NS_STYLE_CLIP_TOP_AUTO as u8;
}
if let Some(bottom) = rect.bottom {
self.gecko.mClip.height = (Au::from(bottom) - Au(self.gecko.mClip.y)).0;
} else {
self.gecko.mClip.height = 1 << 30; // NS_MAXSIZE
self.gecko.mClipFlags |= NS_STYLE_CLIP_BOTTOM_AUTO as u8;
}
if let Some(right) = rect.right {
self.gecko.mClip.width = (Au::from(right) - Au(self.gecko.mClip.x)).0;
} else {
self.gecko.mClip.width = 1 << 30; // NS_MAXSIZE
self.gecko.mClipFlags |= NS_STYLE_CLIP_RIGHT_AUTO as u8;
}
},
Either::Second(_auto) => {
self.gecko.mClipFlags = NS_STYLE_CLIP_AUTO as u8;
self.gecko.mClip.x = 0;
self.gecko.mClip.y = 0;
self.gecko.mClip.width = 0;
self.gecko.mClip.height = 0;
}
}
}
pub fn copy_clip_from(&mut self, other: &Self) {
self.gecko.mClip = other.gecko.mClip;
self.gecko.mClipFlags = other.gecko.mClipFlags;
}
pub fn reset_clip(&mut self, other: &Self) {
self.copy_clip_from(other)
}
pub fn clone_clip(&self) -> longhands::clip::computed_value::T {
use gecko_bindings::structs::NS_STYLE_CLIP_AUTO;
use gecko_bindings::structs::NS_STYLE_CLIP_BOTTOM_AUTO;
use gecko_bindings::structs::NS_STYLE_CLIP_LEFT_AUTO;
use gecko_bindings::structs::NS_STYLE_CLIP_RIGHT_AUTO;
use gecko_bindings::structs::NS_STYLE_CLIP_TOP_AUTO;
use values::computed::{ClipRect, ClipRectOrAuto};
use values::Either;
if self.gecko.mClipFlags == NS_STYLE_CLIP_AUTO as u8 {
ClipRectOrAuto::auto()
} else {
let left = if self.gecko.mClipFlags & NS_STYLE_CLIP_LEFT_AUTO as u8 != 0 {
debug_assert!(self.gecko.mClip.x == 0);
None
} else {
Some(Au(self.gecko.mClip.x).into())
};
let top = if self.gecko.mClipFlags & NS_STYLE_CLIP_TOP_AUTO as u8 != 0 {
debug_assert!(self.gecko.mClip.y == 0);
None
} else {
Some(Au(self.gecko.mClip.y).into())
};
let bottom = if self.gecko.mClipFlags & NS_STYLE_CLIP_BOTTOM_AUTO as u8 != 0 {
debug_assert!(self.gecko.mClip.height == 1 << 30); // NS_MAXSIZE
None
} else {
Some(Au(self.gecko.mClip.y + self.gecko.mClip.height).into())
};
let right = if self.gecko.mClipFlags & NS_STYLE_CLIP_RIGHT_AUTO as u8 != 0 {
debug_assert!(self.gecko.mClip.width == 1 << 30); // NS_MAXSIZE
None
} else {
Some(Au(self.gecko.mClip.x + self.gecko.mClip.width).into())
};
Either::First(ClipRect { top: top, right: right, bottom: bottom, left: left, })
}
}
<%
# This array is several filter function which has percentage or
# number value for function of clone / set.
# The setting / cloning process of other function(e.g. Blur / HueRotate) is
# different from these function. So this array don't include such function.
FILTER_FUNCTIONS = [ 'Brightness', 'Contrast', 'Grayscale', 'Invert',
'Opacity', 'Saturate', 'Sepia' ]
%>
pub fn set_filter<I>(&mut self, v: I)
where
I: IntoIterator<Item = Filter>,
I::IntoIter: ExactSizeIterator,
{
use values::generics::effects::Filter::*;
use gecko_bindings::structs::nsCSSShadowArray;
use gecko_bindings::structs::nsStyleFilter;
use gecko_bindings::structs::NS_STYLE_FILTER_BLUR;
use gecko_bindings::structs::NS_STYLE_FILTER_BRIGHTNESS;
use gecko_bindings::structs::NS_STYLE_FILTER_CONTRAST;
use gecko_bindings::structs::NS_STYLE_FILTER_GRAYSCALE;
use gecko_bindings::structs::NS_STYLE_FILTER_INVERT;
use gecko_bindings::structs::NS_STYLE_FILTER_OPACITY;
use gecko_bindings::structs::NS_STYLE_FILTER_SATURATE;
use gecko_bindings::structs::NS_STYLE_FILTER_SEPIA;
use gecko_bindings::structs::NS_STYLE_FILTER_HUE_ROTATE;
use gecko_bindings::structs::NS_STYLE_FILTER_DROP_SHADOW;
fn fill_filter(m_type: u32, value: CoordDataValue, gecko_filter: &mut nsStyleFilter){
gecko_filter.mType = m_type;
gecko_filter.mFilterParameter.set_value(value);
}
let v = v.into_iter();
unsafe {
Gecko_ResetFilters(&mut self.gecko, v.len());
}
debug_assert_eq!(v.len(), self.gecko.mFilters.len());
for (servo, gecko_filter) in v.zip(self.gecko.mFilters.iter_mut()) {
match servo {
% for func in FILTER_FUNCTIONS:
${func}(factor) => fill_filter(NS_STYLE_FILTER_${func.upper()},
CoordDataValue::Factor(factor.0),
gecko_filter),
% endfor
Blur(length) => fill_filter(NS_STYLE_FILTER_BLUR,
CoordDataValue::Coord(length.0.to_i32_au()),
gecko_filter),
HueRotate(angle) => fill_filter(NS_STYLE_FILTER_HUE_ROTATE,
CoordDataValue::from(angle),
gecko_filter),
DropShadow(shadow) => {
gecko_filter.mType = NS_STYLE_FILTER_DROP_SHADOW;
fn init_shadow(filter: &mut nsStyleFilter) -> &mut nsCSSShadowArray {
unsafe {
let ref mut union = filter.__bindgen_anon_1;
let shadow_array: &mut *mut nsCSSShadowArray = union.mDropShadow.as_mut();
*shadow_array = Gecko_NewCSSShadowArray(1);
&mut **shadow_array
}
}
let gecko_shadow = init_shadow(gecko_filter);
gecko_shadow.mArray[0].set_from_simple_shadow(shadow);
},
Url(ref url) => {
unsafe {
bindings::Gecko_nsStyleFilter_SetURLValue(gecko_filter, url.for_ffi());
}
},
}
}
}
pub fn copy_filter_from(&mut self, other: &Self) {
unsafe {
Gecko_CopyFiltersFrom(&other.gecko as *const _ as *mut _, &mut self.gecko);
}
}
pub fn reset_filter(&mut self, other: &Self) {
self.copy_filter_from(other)
}
pub fn clone_filter(&self) -> longhands::filter::computed_value::T {
use values::generics::effects::Filter;
use values::specified::url::SpecifiedUrl;
use gecko_bindings::structs::NS_STYLE_FILTER_BLUR;
use gecko_bindings::structs::NS_STYLE_FILTER_BRIGHTNESS;
use gecko_bindings::structs::NS_STYLE_FILTER_CONTRAST;
use gecko_bindings::structs::NS_STYLE_FILTER_GRAYSCALE;
use gecko_bindings::structs::NS_STYLE_FILTER_INVERT;
use gecko_bindings::structs::NS_STYLE_FILTER_OPACITY;
use gecko_bindings::structs::NS_STYLE_FILTER_SATURATE;
use gecko_bindings::structs::NS_STYLE_FILTER_SEPIA;
use gecko_bindings::structs::NS_STYLE_FILTER_HUE_ROTATE;
use gecko_bindings::structs::NS_STYLE_FILTER_DROP_SHADOW;
use gecko_bindings::structs::NS_STYLE_FILTER_URL;
let mut filters = Vec::new();
for filter in self.gecko.mFilters.iter(){
match filter.mType {
% for func in FILTER_FUNCTIONS:
NS_STYLE_FILTER_${func.upper()} => {
filters.push(Filter::${func}(
GeckoStyleCoordConvertible::from_gecko_style_coord(
&filter.mFilterParameter).unwrap()));
},
% endfor
NS_STYLE_FILTER_BLUR => {
filters.push(Filter::Blur(NonNegativeLength::from_gecko_style_coord(
&filter.mFilterParameter).unwrap()));
},
NS_STYLE_FILTER_HUE_ROTATE => {
filters.push(Filter::HueRotate(
GeckoStyleCoordConvertible::from_gecko_style_coord(
&filter.mFilterParameter).unwrap()));
},
NS_STYLE_FILTER_DROP_SHADOW => {
filters.push(unsafe {
Filter::DropShadow(
(**filter.__bindgen_anon_1.mDropShadow.as_ref()).mArray[0].to_simple_shadow(),
)
});
},
NS_STYLE_FILTER_URL => {
filters.push(unsafe {
Filter::Url(
SpecifiedUrl::from_url_value_data(&(**filter.__bindgen_anon_1.mURL.as_ref())._base).unwrap()
)
});
}
_ => {},
}
}
longhands::filter::computed_value::T(filters)
}
</%self:impl_trait>
<%self:impl_trait style_struct_name="InheritedBox"
skip_longhands="image-orientation">
// FIXME: Gecko uses a tricky way to store computed value of image-orientation
// within an u8. We could inline following glue codes by implementing all
// those tricky parts for Servo as well. But, it's not done yet just for
// convenience.
pub fn set_image_orientation(&mut self, v: longhands::image_orientation::computed_value::T) {
use properties::longhands::image_orientation::computed_value::T;
match v {
T::FromImage => {
unsafe {
bindings::Gecko_SetImageOrientationAsFromImage(&mut self.gecko);
}
},
T::AngleWithFlipped(ref orientation, flipped) => {
unsafe {
bindings::Gecko_SetImageOrientation(&mut self.gecko, *orientation as u8, flipped);
}
}
}
}
pub fn copy_image_orientation_from(&mut self, other: &Self) {
unsafe {
bindings::Gecko_CopyImageOrientationFrom(&mut self.gecko, &other.gecko);
}
}
pub fn reset_image_orientation(&mut self, other: &Self) {
self.copy_image_orientation_from(other)
}
pub fn clone_image_orientation(&self) -> longhands::image_orientation::computed_value::T {
use gecko_bindings::structs::{nsStyleImageOrientation_Bits, nsStyleImageOrientation_Angles};
use properties::longhands::image_orientation::computed_value::{Orientation, T};
let gecko_orientation = self.gecko.mImageOrientation.mOrientation;
if gecko_orientation & nsStyleImageOrientation_Bits::FROM_IMAGE_MASK as u8 != 0 {
T::FromImage
} else {
const ANGLE0: u8 = nsStyleImageOrientation_Angles::ANGLE_0 as u8;
const ANGLE90: u8 = nsStyleImageOrientation_Angles::ANGLE_90 as u8;
const ANGLE180: u8 = nsStyleImageOrientation_Angles::ANGLE_180 as u8;
const ANGLE270: u8 = nsStyleImageOrientation_Angles::ANGLE_270 as u8;
let flip = gecko_orientation & nsStyleImageOrientation_Bits::FLIP_MASK as u8 != 0;
let orientation = match gecko_orientation & nsStyleImageOrientation_Bits::ORIENTATION_MASK as u8 {
ANGLE0 => Orientation::Angle0,
ANGLE90 => Orientation::Angle90,
ANGLE180 => Orientation::Angle180,
ANGLE270 => Orientation::Angle270,
_ => unreachable!()
};
T::AngleWithFlipped(orientation, flip)
}
}
</%self:impl_trait>
<%self:impl_trait style_struct_name="InheritedTable"
skip_longhands="border-spacing">
pub fn set_border_spacing(&mut self, v: longhands::border_spacing::computed_value::T) {
self.gecko.mBorderSpacingCol = v.horizontal().0;
self.gecko.mBorderSpacingRow = v.vertical().0;
}
pub fn copy_border_spacing_from(&mut self, other: &Self) {
self.gecko.mBorderSpacingCol = other.gecko.mBorderSpacingCol;
self.gecko.mBorderSpacingRow = other.gecko.mBorderSpacingRow;
}
pub fn reset_border_spacing(&mut self, other: &Self) {
self.copy_border_spacing_from(other)
}
pub fn clone_border_spacing(&self) -> longhands::border_spacing::computed_value::T {
longhands::border_spacing::computed_value::T::new(
Au(self.gecko.mBorderSpacingCol).into(),
Au(self.gecko.mBorderSpacingRow).into()
)
}
</%self:impl_trait>
<%self:impl_trait style_struct_name="InheritedText"
skip_longhands="text-align text-emphasis-style text-shadow line-height letter-spacing word-spacing
-webkit-text-stroke-width text-emphasis-position -moz-tab-size">
<% text_align_keyword = Keyword("text-align",
"start end left right center justify -moz-center -moz-left -moz-right char",
gecko_strip_moz_prefix=False) %>
${impl_keyword('text_align', 'mTextAlign', text_align_keyword)}
pub fn set_text_shadow<I>(&mut self, v: I)
where I: IntoIterator<Item = SimpleShadow>,
I::IntoIter: ExactSizeIterator
{
let v = v.into_iter();
self.gecko.mTextShadow.replace_with_new(v.len() as u32);
for (servo, gecko_shadow) in v.zip(self.gecko.mTextShadow.iter_mut()) {
gecko_shadow.set_from_simple_shadow(servo);
}
}
pub fn copy_text_shadow_from(&mut self, other: &Self) {
self.gecko.mTextShadow.copy_from(&other.gecko.mTextShadow);
}
pub fn reset_text_shadow(&mut self, other: &Self) {
self.copy_text_shadow_from(other)
}
pub fn clone_text_shadow(&self) -> longhands::text_shadow::computed_value::T {
let buf = self.gecko.mTextShadow.iter().map(|v| v.to_simple_shadow()).collect();
longhands::text_shadow::computed_value::T(buf)
}
pub fn set_line_height(&mut self, v: longhands::line_height::computed_value::T) {
use values::generics::text::LineHeight;
// FIXME: Align binary representations and ditch |match| for cast + static_asserts
let en = match v {
LineHeight::Normal => CoordDataValue::Normal,
LineHeight::Length(val) => CoordDataValue::Coord(val.0.to_i32_au()),
LineHeight::Number(val) => CoordDataValue::Factor(val.0),
LineHeight::MozBlockHeight =>
CoordDataValue::Enumerated(structs::NS_STYLE_LINE_HEIGHT_BLOCK_HEIGHT),
};
self.gecko.mLineHeight.set_value(en);
}
pub fn clone_line_height(&self) -> longhands::line_height::computed_value::T {
use values::generics::text::LineHeight;
return match self.gecko.mLineHeight.as_value() {
CoordDataValue::Normal => LineHeight::Normal,
CoordDataValue::Coord(coord) => LineHeight::Length(Au(coord).into()),
CoordDataValue::Factor(n) => LineHeight::Number(n.into()),
CoordDataValue::Enumerated(val) if val == structs::NS_STYLE_LINE_HEIGHT_BLOCK_HEIGHT =>
LineHeight::MozBlockHeight,
_ => panic!("this should not happen"),
}
}
<%call expr="impl_coord_copy('line_height', 'mLineHeight')"></%call>
pub fn set_letter_spacing(&mut self, v: longhands::letter_spacing::computed_value::T) {
use values::generics::text::Spacing;
match v {
Spacing::Value(value) => self.gecko.mLetterSpacing.set(value),
Spacing::Normal => self.gecko.mLetterSpacing.set_value(CoordDataValue::Normal)
}
}
pub fn clone_letter_spacing(&self) -> longhands::letter_spacing::computed_value::T {
use values::computed::Length;
use values::generics::text::Spacing;
debug_assert!(
matches!(self.gecko.mLetterSpacing.as_value(),
CoordDataValue::Normal |
CoordDataValue::Coord(_)),
"Unexpected computed value for letter-spacing");
Length::from_gecko_style_coord(&self.gecko.mLetterSpacing).map_or(Spacing::Normal, Spacing::Value)
}
<%call expr="impl_coord_copy('letter_spacing', 'mLetterSpacing')"></%call>
pub fn set_word_spacing(&mut self, v: longhands::word_spacing::computed_value::T) {
use values::generics::text::Spacing;
match v {
Spacing::Value(lop) => self.gecko.mWordSpacing.set(lop),
// https://drafts.csswg.org/css-text-3/#valdef-word-spacing-normal
Spacing::Normal => self.gecko.mWordSpacing.set_value(CoordDataValue::Coord(0)),
}
}
pub fn clone_word_spacing(&self) -> longhands::word_spacing::computed_value::T {
use values::computed::LengthOrPercentage;
use values::generics::text::Spacing;
debug_assert!(
matches!(self.gecko.mWordSpacing.as_value(),
CoordDataValue::Normal |
CoordDataValue::Coord(_) |
CoordDataValue::Percent(_) |
CoordDataValue::Calc(_)),
"Unexpected computed value for word-spacing");
LengthOrPercentage::from_gecko_style_coord(&self.gecko.mWordSpacing).map_or(Spacing::Normal, Spacing::Value)
}
<%call expr="impl_coord_copy('word_spacing', 'mWordSpacing')"></%call>
fn clear_text_emphasis_style_if_string(&mut self) {
use nsstring::nsString;
if self.gecko.mTextEmphasisStyle == structs::NS_STYLE_TEXT_EMPHASIS_STYLE_STRING as u8 {
self.gecko.mTextEmphasisStyleString.assign(&nsString::new());
self.gecko.mTextEmphasisStyle = structs::NS_STYLE_TEXT_EMPHASIS_STYLE_NONE as u8;
}
}
${impl_simple_type_with_conversion("text_emphasis_position")}
pub fn set_text_emphasis_style(&mut self, v: longhands::text_emphasis_style::computed_value::T) {
use properties::longhands::text_emphasis_style::computed_value::T;
use properties::longhands::text_emphasis_style::ShapeKeyword;
self.clear_text_emphasis_style_if_string();
let (te, s) = match v {
T::None => (structs::NS_STYLE_TEXT_EMPHASIS_STYLE_NONE, ""),
T::Keyword(ref keyword) => {
let fill = if keyword.fill {
structs::NS_STYLE_TEXT_EMPHASIS_STYLE_FILLED
} else {
structs::NS_STYLE_TEXT_EMPHASIS_STYLE_OPEN
};
let shape = match keyword.shape {
ShapeKeyword::Dot => structs::NS_STYLE_TEXT_EMPHASIS_STYLE_DOT,
ShapeKeyword::Circle => structs::NS_STYLE_TEXT_EMPHASIS_STYLE_CIRCLE,
ShapeKeyword::DoubleCircle => structs::NS_STYLE_TEXT_EMPHASIS_STYLE_DOUBLE_CIRCLE,
ShapeKeyword::Triangle => structs::NS_STYLE_TEXT_EMPHASIS_STYLE_TRIANGLE,
ShapeKeyword::Sesame => structs::NS_STYLE_TEXT_EMPHASIS_STYLE_SESAME,
};
(shape | fill, keyword.shape.char(keyword.fill))
},
T::String(ref s) => {
(structs::NS_STYLE_TEXT_EMPHASIS_STYLE_STRING, &**s)
},
};
self.gecko.mTextEmphasisStyleString.assign_utf8(s);
self.gecko.mTextEmphasisStyle = te as u8;
}
pub fn copy_text_emphasis_style_from(&mut self, other: &Self) {
self.clear_text_emphasis_style_if_string();
if other.gecko.mTextEmphasisStyle == structs::NS_STYLE_TEXT_EMPHASIS_STYLE_STRING as u8 {
self.gecko.mTextEmphasisStyleString
.assign(&*other.gecko.mTextEmphasisStyleString)
}
self.gecko.mTextEmphasisStyle = other.gecko.mTextEmphasisStyle;
}
pub fn reset_text_emphasis_style(&mut self, other: &Self) {
self.copy_text_emphasis_style_from(other)
}
pub fn clone_text_emphasis_style(&self) -> longhands::text_emphasis_style::computed_value::T {
use properties::longhands::text_emphasis_style::computed_value::{T, KeywordValue};
use properties::longhands::text_emphasis_style::ShapeKeyword;
if self.gecko.mTextEmphasisStyle == structs::NS_STYLE_TEXT_EMPHASIS_STYLE_NONE as u8 {
return T::None;
} else if self.gecko.mTextEmphasisStyle == structs::NS_STYLE_TEXT_EMPHASIS_STYLE_STRING as u8 {
return T::String(self.gecko.mTextEmphasisStyleString.to_string());
}
let fill = self.gecko.mTextEmphasisStyle & structs::NS_STYLE_TEXT_EMPHASIS_STYLE_OPEN as u8 == 0;
let shape =
match self.gecko.mTextEmphasisStyle as u32 & !structs::NS_STYLE_TEXT_EMPHASIS_STYLE_OPEN {
structs::NS_STYLE_TEXT_EMPHASIS_STYLE_DOT => ShapeKeyword::Dot,
structs::NS_STYLE_TEXT_EMPHASIS_STYLE_CIRCLE => ShapeKeyword::Circle,
structs::NS_STYLE_TEXT_EMPHASIS_STYLE_DOUBLE_CIRCLE => ShapeKeyword::DoubleCircle,
structs::NS_STYLE_TEXT_EMPHASIS_STYLE_TRIANGLE => ShapeKeyword::Triangle,
structs::NS_STYLE_TEXT_EMPHASIS_STYLE_SESAME => ShapeKeyword::Sesame,
x => panic!("Unexpected value in style struct for text-emphasis-style property: {:?}", x)
};
T::Keyword(KeywordValue {
fill: fill,
shape: shape
})
}
${impl_non_negative_length('_webkit_text_stroke_width',
'mWebkitTextStrokeWidth')}
#[allow(non_snake_case)]
pub fn set__moz_tab_size(&mut self, v: longhands::_moz_tab_size::computed_value::T) {
use values::Either;
match v {
Either::Second(non_negative_number) => {
self.gecko.mTabSize.set_value(CoordDataValue::Factor(non_negative_number.0));
}
Either::First(non_negative_length) => {
self.gecko.mTabSize.set(non_negative_length);
}
}
}
#[allow(non_snake_case)]
pub fn clone__moz_tab_size(&self) -> longhands::_moz_tab_size::computed_value::T {
use values::Either;
match self.gecko.mTabSize.as_value() {
CoordDataValue::Coord(coord) => Either::First(Au(coord).into()),
CoordDataValue::Factor(number) => Either::Second(From::from(number)),
_ => unreachable!(),
}
}
<%call expr="impl_coord_copy('_moz_tab_size', 'mTabSize')"></%call>
</%self:impl_trait>
<%self:impl_trait style_struct_name="Text"
skip_longhands="text-decoration-line text-overflow initial-letter">
${impl_simple_type_with_conversion("text_decoration_line")}
fn clear_overflow_sides_if_string(&mut self) {
use gecko_bindings::structs::nsStyleTextOverflowSide;
use nsstring::nsString;
fn clear_if_string(side: &mut nsStyleTextOverflowSide) {
if side.mType == structs::NS_STYLE_TEXT_OVERFLOW_STRING as u8 {
side.mString.assign(&nsString::new());
side.mType = structs::NS_STYLE_TEXT_OVERFLOW_CLIP as u8;
}
}
clear_if_string(&mut self.gecko.mTextOverflow.mLeft);
clear_if_string(&mut self.gecko.mTextOverflow.mRight);
}
pub fn set_text_overflow(&mut self, v: longhands::text_overflow::computed_value::T) {
use gecko_bindings::structs::nsStyleTextOverflowSide;
use properties::longhands::text_overflow::Side;
fn set(side: &mut nsStyleTextOverflowSide, value: &Side) {
let ty = match *value {
Side::Clip => structs::NS_STYLE_TEXT_OVERFLOW_CLIP,
Side::Ellipsis => structs::NS_STYLE_TEXT_OVERFLOW_ELLIPSIS,
Side::String(ref s) => {
side.mString.assign_utf8(s);
structs::NS_STYLE_TEXT_OVERFLOW_STRING
}
};
side.mType = ty as u8;
}
self.clear_overflow_sides_if_string();
self.gecko.mTextOverflow.mLogicalDirections = v.sides_are_logical;
set(&mut self.gecko.mTextOverflow.mLeft, &v.first);
set(&mut self.gecko.mTextOverflow.mRight, &v.second);
}
pub fn copy_text_overflow_from(&mut self, other: &Self) {
use gecko_bindings::structs::nsStyleTextOverflowSide;
fn set(side: &mut nsStyleTextOverflowSide, other: &nsStyleTextOverflowSide) {
if other.mType == structs::NS_STYLE_TEXT_OVERFLOW_STRING as u8 {
side.mString.assign(&*other.mString)
}
side.mType = other.mType
}
self.clear_overflow_sides_if_string();
set(&mut self.gecko.mTextOverflow.mLeft, &other.gecko.mTextOverflow.mLeft);
set(&mut self.gecko.mTextOverflow.mRight, &other.gecko.mTextOverflow.mRight);
self.gecko.mTextOverflow.mLogicalDirections = other.gecko.mTextOverflow.mLogicalDirections;
}
pub fn reset_text_overflow(&mut self, other: &Self) {
self.copy_text_overflow_from(other)
}
pub fn clone_text_overflow(&self) -> longhands::text_overflow::computed_value::T {
use gecko_bindings::structs::nsStyleTextOverflowSide;
use properties::longhands::text_overflow::Side;
fn to_servo(side: &nsStyleTextOverflowSide) -> Side {
match side.mType as u32 {
structs::NS_STYLE_TEXT_OVERFLOW_CLIP => Side::Clip,
structs::NS_STYLE_TEXT_OVERFLOW_ELLIPSIS => Side::Ellipsis,
structs::NS_STYLE_TEXT_OVERFLOW_STRING => Side::String(side.mString.to_string().into_boxed_str()),
x => panic!("Found unexpected value in style struct for text_overflow property: {:?}", x),
}
}
longhands::text_overflow::computed_value::T {
first: to_servo(&self.gecko.mTextOverflow.mLeft),
second: to_servo(&self.gecko.mTextOverflow.mRight),
sides_are_logical: self.gecko.mTextOverflow.mLogicalDirections
}
}
pub fn set_initial_letter(&mut self, v: longhands::initial_letter::computed_value::T) {
use values::generics::text::InitialLetter;
match v {
InitialLetter::Normal => {
self.gecko.mInitialLetterSize = 0.;
self.gecko.mInitialLetterSink = 0;
},
InitialLetter::Specified(size, sink) => {
self.gecko.mInitialLetterSize = size;
if let Some(sink) = sink {
self.gecko.mInitialLetterSink = sink;
} else {
self.gecko.mInitialLetterSink = size.floor() as i32;
}
}
}
}
pub fn copy_initial_letter_from(&mut self, other: &Self) {
self.gecko.mInitialLetterSize = other.gecko.mInitialLetterSize;
self.gecko.mInitialLetterSink = other.gecko.mInitialLetterSink;
}
pub fn reset_initial_letter(&mut self, other: &Self) {
self.copy_initial_letter_from(other)
}
pub fn clone_initial_letter(&self) -> longhands::initial_letter::computed_value::T {
use values::generics::text::InitialLetter;
if self.gecko.mInitialLetterSize == 0. && self.gecko.mInitialLetterSink == 0 {
InitialLetter::Normal
} else if self.gecko.mInitialLetterSize.floor() as i32 == self.gecko.mInitialLetterSink {
InitialLetter::Specified(self.gecko.mInitialLetterSize, None)
} else {
InitialLetter::Specified(self.gecko.mInitialLetterSize, Some(self.gecko.mInitialLetterSink))
}
}
#[inline]
pub fn has_underline(&self) -> bool {
(self.gecko.mTextDecorationLine & (structs::NS_STYLE_TEXT_DECORATION_LINE_UNDERLINE as u8)) != 0
}
#[inline]
pub fn has_overline(&self) -> bool {
(self.gecko.mTextDecorationLine & (structs::NS_STYLE_TEXT_DECORATION_LINE_OVERLINE as u8)) != 0
}
#[inline]
pub fn has_line_through(&self) -> bool {
(self.gecko.mTextDecorationLine & (structs::NS_STYLE_TEXT_DECORATION_LINE_LINE_THROUGH as u8)) != 0
}
</%self:impl_trait>
<%def name="impl_shape_source(ident, gecko_ffi_name)">
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
use gecko_bindings::bindings::{Gecko_NewBasicShape, Gecko_DestroyShapeSource};
use gecko_bindings::structs::{StyleBasicShape, StyleBasicShapeType, StyleShapeSourceType};
use gecko_bindings::structs::{StyleFillRule, StyleGeometryBox, StyleShapeSource};
use gecko::conversions::basic_shape::set_corners_from_radius;
use gecko::values::GeckoStyleCoordConvertible;
use values::generics::basic_shape::{BasicShape, FillRule, ShapeSource};
let ref mut ${ident} = self.gecko.${gecko_ffi_name};
// clean up existing struct
unsafe { Gecko_DestroyShapeSource(${ident}) };
${ident}.mType = StyleShapeSourceType::None;
match v {
ShapeSource::Url(ref url) => {
unsafe {
bindings::Gecko_StyleShapeSource_SetURLValue(${ident}, url.for_ffi())
}
}
ShapeSource::None => {} // don't change the type
ShapeSource::Box(reference) => {
${ident}.mReferenceBox = reference.into();
${ident}.mType = StyleShapeSourceType::Box;
}
ShapeSource::Shape(servo_shape, maybe_box) => {
${ident}.mReferenceBox = maybe_box.map(Into::into)
.unwrap_or(StyleGeometryBox::NoBox);
${ident}.mType = StyleShapeSourceType::Shape;
fn init_shape(${ident}: &mut StyleShapeSource, ty: StyleBasicShapeType) -> &mut StyleBasicShape {
unsafe {
// We have to be very careful to avoid a copy here!
let ref mut union = ${ident}.__bindgen_anon_1;
let shape: &mut *mut StyleBasicShape = union.mBasicShape.as_mut();
*shape = Gecko_NewBasicShape(ty);
&mut **shape
}
}
match servo_shape {
BasicShape::Inset(inset) => {
let shape = init_shape(${ident}, StyleBasicShapeType::Inset);
unsafe { shape.mCoordinates.set_len(4) };
// set_len() can't call constructors, so the coordinates
// can contain any value. set_value() attempts to free
// allocated coordinates, so we don't want to feed it
// garbage values which it may misinterpret.
// Instead, we use leaky_set_value to blindly overwrite
// the garbage data without
// attempting to clean up.
shape.mCoordinates[0].leaky_set_null();
inset.rect.0.to_gecko_style_coord(&mut shape.mCoordinates[0]);
shape.mCoordinates[1].leaky_set_null();
inset.rect.1.to_gecko_style_coord(&mut shape.mCoordinates[1]);
shape.mCoordinates[2].leaky_set_null();
inset.rect.2.to_gecko_style_coord(&mut shape.mCoordinates[2]);
shape.mCoordinates[3].leaky_set_null();
inset.rect.3.to_gecko_style_coord(&mut shape.mCoordinates[3]);
set_corners_from_radius(inset.round, &mut shape.mRadius);
}
BasicShape::Circle(circ) => {
let shape = init_shape(${ident}, StyleBasicShapeType::Circle);
unsafe { shape.mCoordinates.set_len(1) };
shape.mCoordinates[0].leaky_set_null();
circ.radius.to_gecko_style_coord(&mut shape.mCoordinates[0]);
shape.mPosition = circ.position.into();
}
BasicShape::Ellipse(el) => {
let shape = init_shape(${ident}, StyleBasicShapeType::Ellipse);
unsafe { shape.mCoordinates.set_len(2) };
shape.mCoordinates[0].leaky_set_null();
el.semiaxis_x.to_gecko_style_coord(&mut shape.mCoordinates[0]);
shape.mCoordinates[1].leaky_set_null();
el.semiaxis_y.to_gecko_style_coord(&mut shape.mCoordinates[1]);
shape.mPosition = el.position.into();
}
BasicShape::Polygon(poly) => {
let shape = init_shape(${ident}, StyleBasicShapeType::Polygon);
unsafe {
shape.mCoordinates.set_len(poly.coordinates.len() as u32 * 2);
}
for (i, coord) in poly.coordinates.iter().enumerate() {
shape.mCoordinates[2 * i].leaky_set_null();
shape.mCoordinates[2 * i + 1].leaky_set_null();
coord.0.to_gecko_style_coord(&mut shape.mCoordinates[2 * i]);
coord.1.to_gecko_style_coord(&mut shape.mCoordinates[2 * i + 1]);
}
shape.mFillRule = if poly.fill == FillRule::EvenOdd {
StyleFillRule::Evenodd
} else {
StyleFillRule::Nonzero
};
}
}
}
}
}
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
(&self.gecko.${gecko_ffi_name}).into()
}
pub fn copy_${ident}_from(&mut self, other: &Self) {
use gecko_bindings::bindings::Gecko_CopyShapeSourceFrom;
unsafe {
Gecko_CopyShapeSourceFrom(&mut self.gecko.${gecko_ffi_name}, &other.gecko.${gecko_ffi_name});
}
}
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
</%def>
<% skip_svg_longhands = """
mask-mode mask-repeat mask-clip mask-origin mask-composite mask-position-x mask-position-y mask-size mask-image
clip-path
"""
%>
<%self:impl_trait style_struct_name="SVG"
skip_longhands="${skip_svg_longhands}">
<% impl_common_image_layer_properties("mask") %>
<% impl_simple_image_array_property("mode", "mask", "mMask", "mMaskMode", "SVG") %>
<% impl_simple_image_array_property("composite", "mask", "mMask", "mComposite", "SVG") %>
<% impl_shape_source("clip_path", "mClipPath") %>
</%self:impl_trait>
<%self:impl_trait style_struct_name="InheritedSVG"
skip_longhands="paint-order stroke-dasharray -moz-context-properties">
pub fn set_paint_order(&mut self, v: longhands::paint_order::computed_value::T) {
use self::longhands::paint_order;
if v.0 == 0 {
self.gecko.mPaintOrder = structs::NS_STYLE_PAINT_ORDER_NORMAL as u8;
} else {
let mut order = 0;
for pos in 0..3 {
let geckoval = match v.bits_at(pos) {
paint_order::FILL => structs::NS_STYLE_PAINT_ORDER_FILL as u8,
paint_order::STROKE => structs::NS_STYLE_PAINT_ORDER_STROKE as u8,
paint_order::MARKERS => structs::NS_STYLE_PAINT_ORDER_MARKERS as u8,
_ => unreachable!(),
};
order |= geckoval << (pos * structs::NS_STYLE_PAINT_ORDER_BITWIDTH as u8);
}
self.gecko.mPaintOrder = order;
}
}
${impl_simple_copy('paint_order', 'mPaintOrder')}
pub fn clone_paint_order(&self) -> longhands::paint_order::computed_value::T {
use self::longhands::paint_order::{COUNT, FILL, MARKERS, NORMAL, SHIFT, STROKE};
use self::longhands::paint_order::computed_value::T;
if self.gecko.mPaintOrder == structs::NS_STYLE_PAINT_ORDER_NORMAL as u8 {
return T(NORMAL);
}
const PAINT_ORDER_BITWIDTH: u8 = structs::NS_STYLE_PAINT_ORDER_BITWIDTH as u8;
let mask = (1 << PAINT_ORDER_BITWIDTH) - 1;
let mut order = 0;
for pos in 0..COUNT {
let value =
match (self.gecko.mPaintOrder >> pos * PAINT_ORDER_BITWIDTH & mask) as u32 {
structs::NS_STYLE_PAINT_ORDER_FILL => FILL,
structs::NS_STYLE_PAINT_ORDER_STROKE => STROKE,
structs::NS_STYLE_PAINT_ORDER_MARKERS => MARKERS,
_ => unreachable!(),
};
order |= value << (pos * SHIFT);
};
T(order)
}
pub fn set_stroke_dasharray(&mut self, v: longhands::stroke_dasharray::computed_value::T) {
use gecko_bindings::structs::nsStyleSVG_STROKE_DASHARRAY_CONTEXT as CONTEXT_VALUE;
use values::generics::svg::{SVGStrokeDashArray, SvgLengthOrPercentageOrNumber};
match v {
SVGStrokeDashArray::Values(v) => {
let v = v.into_iter();
self.gecko.mContextFlags &= !CONTEXT_VALUE;
unsafe {
bindings::Gecko_nsStyleSVG_SetDashArrayLength(&mut self.gecko, v.len() as u32);
}
for (gecko, servo) in self.gecko.mStrokeDasharray.iter_mut().zip(v) {
match servo {
SvgLengthOrPercentageOrNumber::LengthOrPercentage(lop) =>
gecko.set(lop),
SvgLengthOrPercentageOrNumber::Number(num) =>
gecko.set_value(CoordDataValue::Factor(num.into())),
}
}
}
SVGStrokeDashArray::ContextValue => {
self.gecko.mContextFlags |= CONTEXT_VALUE;
unsafe {
bindings::Gecko_nsStyleSVG_SetDashArrayLength(&mut self.gecko, 0);
}
}
}
}
pub fn copy_stroke_dasharray_from(&mut self, other: &Self) {
use gecko_bindings::structs::nsStyleSVG_STROKE_DASHARRAY_CONTEXT as CONTEXT_VALUE;
unsafe {
bindings::Gecko_nsStyleSVG_CopyDashArray(&mut self.gecko, &other.gecko);
}
self.gecko.mContextFlags =
(self.gecko.mContextFlags & !CONTEXT_VALUE) |
(other.gecko.mContextFlags & CONTEXT_VALUE);
}
pub fn reset_stroke_dasharray(&mut self, other: &Self) {
self.copy_stroke_dasharray_from(other)
}
pub fn clone_stroke_dasharray(&self) -> longhands::stroke_dasharray::computed_value::T {
use gecko_bindings::structs::nsStyleSVG_STROKE_DASHARRAY_CONTEXT as CONTEXT_VALUE;
use values::computed::LengthOrPercentage;
use values::generics::svg::{SVGStrokeDashArray, SvgLengthOrPercentageOrNumber};
if self.gecko.mContextFlags & CONTEXT_VALUE != 0 {
debug_assert_eq!(self.gecko.mStrokeDasharray.len(), 0);
return SVGStrokeDashArray::ContextValue;
}
let mut vec = vec![];
for gecko in self.gecko.mStrokeDasharray.iter() {
match gecko.as_value() {
CoordDataValue::Factor(number) =>
vec.push(SvgLengthOrPercentageOrNumber::Number(number.into())),
CoordDataValue::Coord(coord) =>
vec.push(SvgLengthOrPercentageOrNumber::LengthOrPercentage(
LengthOrPercentage::Length(Au(coord).into()).into())),
CoordDataValue::Percent(p) =>
vec.push(SvgLengthOrPercentageOrNumber::LengthOrPercentage(
LengthOrPercentage::Percentage(Percentage(p)).into())),
CoordDataValue::Calc(calc) =>
vec.push(SvgLengthOrPercentageOrNumber::LengthOrPercentage(
LengthOrPercentage::Calc(calc.into()).into())),
_ => unreachable!(),
}
}
SVGStrokeDashArray::Values(vec)
}
#[allow(non_snake_case)]
pub fn set__moz_context_properties<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::_moz_context_properties::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
let v = v.into_iter();
unsafe {
bindings::Gecko_nsStyleSVG_SetContextPropertiesLength(&mut self.gecko, v.len() as u32);
}
self.gecko.mContextPropsBits = 0;
for (gecko, servo) in self.gecko.mContextProps.iter_mut().zip(v) {
if servo.0 == atom!("fill") {
self.gecko.mContextPropsBits |= structs::NS_STYLE_CONTEXT_PROPERTY_FILL as u8;
} else if servo.0 == atom!("stroke") {
self.gecko.mContextPropsBits |= structs::NS_STYLE_CONTEXT_PROPERTY_STROKE as u8;
} else if servo.0 == atom!("fill-opacity") {
self.gecko.mContextPropsBits |= structs::NS_STYLE_CONTEXT_PROPERTY_FILL_OPACITY as u8;
} else if servo.0 == atom!("stroke-opacity") {
self.gecko.mContextPropsBits |= structs::NS_STYLE_CONTEXT_PROPERTY_STROKE_OPACITY as u8;
}
unsafe { gecko.set_raw_from_addrefed::<structs::nsIAtom>(servo.0.into_addrefed()) }
}
}
#[allow(non_snake_case)]
pub fn copy__moz_context_properties_from(&mut self, other: &Self) {
unsafe {
bindings::Gecko_nsStyleSVG_CopyContextProperties(&mut self.gecko, &other.gecko);
}
}
#[allow(non_snake_case)]
pub fn reset__moz_context_properties(&mut self, other: &Self) {
self.copy__moz_context_properties_from(other)
}
</%self:impl_trait>
<%self:impl_trait style_struct_name="Color"
skip_longhands="*">
pub fn set_color(&mut self, v: longhands::color::computed_value::T) {
let result = convert_rgba_to_nscolor(&v);
${set_gecko_property("mColor", "result")}
}
<%call expr="impl_simple_copy('color', 'mColor')"></%call>
pub fn clone_color(&self) -> longhands::color::computed_value::T {
let color = ${get_gecko_property("mColor")} as u32;
convert_nscolor_to_rgba(color)
}
</%self:impl_trait>
<%self:impl_trait style_struct_name="Pointing"
skip_longhands="cursor caret-color">
pub fn set_cursor(&mut self, v: longhands::cursor::computed_value::T) {
use properties::longhands::cursor::computed_value::Keyword;
use style_traits::cursor::Cursor;
self.gecko.mCursor = match v.keyword {
Keyword::Auto => structs::NS_STYLE_CURSOR_AUTO,
Keyword::Cursor(cursor) => match cursor {
Cursor::None => structs::NS_STYLE_CURSOR_NONE,
Cursor::Default => structs::NS_STYLE_CURSOR_DEFAULT,
Cursor::Pointer => structs::NS_STYLE_CURSOR_POINTER,
Cursor::ContextMenu => structs::NS_STYLE_CURSOR_CONTEXT_MENU,
Cursor::Help => structs::NS_STYLE_CURSOR_HELP,
Cursor::Progress => structs::NS_STYLE_CURSOR_SPINNING,
Cursor::Wait => structs::NS_STYLE_CURSOR_WAIT,
Cursor::Cell => structs::NS_STYLE_CURSOR_CELL,
Cursor::Crosshair => structs::NS_STYLE_CURSOR_CROSSHAIR,
Cursor::Text => structs::NS_STYLE_CURSOR_TEXT,
Cursor::VerticalText => structs::NS_STYLE_CURSOR_VERTICAL_TEXT,
Cursor::Alias => structs::NS_STYLE_CURSOR_ALIAS,
Cursor::Copy => structs::NS_STYLE_CURSOR_COPY,
Cursor::Move => structs::NS_STYLE_CURSOR_MOVE,
Cursor::NoDrop => structs::NS_STYLE_CURSOR_NO_DROP,
Cursor::NotAllowed => structs::NS_STYLE_CURSOR_NOT_ALLOWED,
Cursor::Grab => structs::NS_STYLE_CURSOR_GRAB,
Cursor::Grabbing => structs::NS_STYLE_CURSOR_GRABBING,
Cursor::EResize => structs::NS_STYLE_CURSOR_E_RESIZE,
Cursor::NResize => structs::NS_STYLE_CURSOR_N_RESIZE,
Cursor::NeResize => structs::NS_STYLE_CURSOR_NE_RESIZE,
Cursor::NwResize => structs::NS_STYLE_CURSOR_NW_RESIZE,
Cursor::SResize => structs::NS_STYLE_CURSOR_S_RESIZE,
Cursor::SeResize => structs::NS_STYLE_CURSOR_SE_RESIZE,
Cursor::SwResize => structs::NS_STYLE_CURSOR_SW_RESIZE,
Cursor::WResize => structs::NS_STYLE_CURSOR_W_RESIZE,
Cursor::EwResize => structs::NS_STYLE_CURSOR_EW_RESIZE,
Cursor::NsResize => structs::NS_STYLE_CURSOR_NS_RESIZE,
Cursor::NeswResize => structs::NS_STYLE_CURSOR_NESW_RESIZE,
Cursor::NwseResize => structs::NS_STYLE_CURSOR_NWSE_RESIZE,
Cursor::ColResize => structs::NS_STYLE_CURSOR_COL_RESIZE,
Cursor::RowResize => structs::NS_STYLE_CURSOR_ROW_RESIZE,
Cursor::AllScroll => structs::NS_STYLE_CURSOR_ALL_SCROLL,
Cursor::ZoomIn => structs::NS_STYLE_CURSOR_ZOOM_IN,
Cursor::ZoomOut => structs::NS_STYLE_CURSOR_ZOOM_OUT,
// note: the following properties are gecko-only.
Cursor::MozGrab => structs::NS_STYLE_CURSOR_GRAB,
Cursor::MozGrabbing => structs::NS_STYLE_CURSOR_GRABBING,
Cursor::MozZoomIn => structs::NS_STYLE_CURSOR_ZOOM_IN,
Cursor::MozZoomOut => structs::NS_STYLE_CURSOR_ZOOM_OUT,
}
} as u8;
unsafe {
Gecko_SetCursorArrayLength(&mut self.gecko, v.images.len());
}
for i in 0..v.images.len() {
unsafe {
Gecko_SetCursorImageValue(&mut self.gecko.mCursorImages[i],
v.images[i].url.clone().image_value.unwrap().get());
}
// We don't need to record this struct as uncacheable, like when setting
// background-image to a url() value, since only properties in reset structs
// are re-used from the applicable declaration cache, and the Pointing struct
// is an inherited struct.
match v.images[i].hotspot {
Some((x, y)) => {
self.gecko.mCursorImages[i].mHaveHotspot = true;
self.gecko.mCursorImages[i].mHotspotX = x;
self.gecko.mCursorImages[i].mHotspotY = y;
},
_ => {
self.gecko.mCursorImages[i].mHaveHotspot = false;
}
}
}
}
pub fn copy_cursor_from(&mut self, other: &Self) {
self.gecko.mCursor = other.gecko.mCursor;
unsafe {
Gecko_CopyCursorArrayFrom(&mut self.gecko, &other.gecko);
}
}
pub fn reset_cursor(&mut self, other: &Self) {
self.copy_cursor_from(other)
}
pub fn clone_cursor(&self) -> longhands::cursor::computed_value::T {
use properties::longhands::cursor::computed_value::{Keyword, Image};
use style_traits::cursor::Cursor;
use values::specified::url::SpecifiedUrl;
let keyword = match self.gecko.mCursor as u32 {
structs::NS_STYLE_CURSOR_AUTO => Keyword::Auto,
structs::NS_STYLE_CURSOR_NONE => Keyword::Cursor(Cursor::None),
structs::NS_STYLE_CURSOR_DEFAULT => Keyword::Cursor(Cursor::Default),
structs::NS_STYLE_CURSOR_POINTER => Keyword::Cursor(Cursor::Pointer),
structs::NS_STYLE_CURSOR_CONTEXT_MENU => Keyword::Cursor(Cursor::ContextMenu),
structs::NS_STYLE_CURSOR_HELP => Keyword::Cursor(Cursor::Help),
structs::NS_STYLE_CURSOR_SPINNING => Keyword::Cursor(Cursor::Progress),
structs::NS_STYLE_CURSOR_WAIT => Keyword::Cursor(Cursor::Wait),
structs::NS_STYLE_CURSOR_CELL => Keyword::Cursor(Cursor::Cell),
structs::NS_STYLE_CURSOR_CROSSHAIR => Keyword::Cursor(Cursor::Crosshair),
structs::NS_STYLE_CURSOR_TEXT => Keyword::Cursor(Cursor::Text),
structs::NS_STYLE_CURSOR_VERTICAL_TEXT => Keyword::Cursor(Cursor::VerticalText),
structs::NS_STYLE_CURSOR_ALIAS => Keyword::Cursor(Cursor::Alias),
structs::NS_STYLE_CURSOR_COPY => Keyword::Cursor(Cursor::Copy),
structs::NS_STYLE_CURSOR_MOVE => Keyword::Cursor(Cursor::Move),
structs::NS_STYLE_CURSOR_NO_DROP => Keyword::Cursor(Cursor::NoDrop),
structs::NS_STYLE_CURSOR_NOT_ALLOWED => Keyword::Cursor(Cursor::NotAllowed),
structs::NS_STYLE_CURSOR_GRAB => Keyword::Cursor(Cursor::Grab),
structs::NS_STYLE_CURSOR_GRABBING => Keyword::Cursor(Cursor::Grabbing),
structs::NS_STYLE_CURSOR_E_RESIZE => Keyword::Cursor(Cursor::EResize),
structs::NS_STYLE_CURSOR_N_RESIZE => Keyword::Cursor(Cursor::NResize),
structs::NS_STYLE_CURSOR_NE_RESIZE => Keyword::Cursor(Cursor::NeResize),
structs::NS_STYLE_CURSOR_NW_RESIZE => Keyword::Cursor(Cursor::NwResize),
structs::NS_STYLE_CURSOR_S_RESIZE => Keyword::Cursor(Cursor::SResize),
structs::NS_STYLE_CURSOR_SE_RESIZE => Keyword::Cursor(Cursor::SeResize),
structs::NS_STYLE_CURSOR_SW_RESIZE => Keyword::Cursor(Cursor::SwResize),
structs::NS_STYLE_CURSOR_W_RESIZE => Keyword::Cursor(Cursor::WResize),
structs::NS_STYLE_CURSOR_EW_RESIZE => Keyword::Cursor(Cursor::EwResize),
structs::NS_STYLE_CURSOR_NS_RESIZE => Keyword::Cursor(Cursor::NsResize),
structs::NS_STYLE_CURSOR_NESW_RESIZE => Keyword::Cursor(Cursor::NeswResize),
structs::NS_STYLE_CURSOR_NWSE_RESIZE => Keyword::Cursor(Cursor::NwseResize),
structs::NS_STYLE_CURSOR_COL_RESIZE => Keyword::Cursor(Cursor::ColResize),
structs::NS_STYLE_CURSOR_ROW_RESIZE => Keyword::Cursor(Cursor::RowResize),
structs::NS_STYLE_CURSOR_ALL_SCROLL => Keyword::Cursor(Cursor::AllScroll),
structs::NS_STYLE_CURSOR_ZOOM_IN => Keyword::Cursor(Cursor::ZoomIn),
structs::NS_STYLE_CURSOR_ZOOM_OUT => Keyword::Cursor(Cursor::ZoomOut),
x => panic!("Found unexpected value in style struct for cursor property: {:?}", x),
};
let images = self.gecko.mCursorImages.iter().map(|gecko_cursor_image| {
let url = unsafe {
let gecko_image_request = gecko_cursor_image.mImage.mRawPtr.as_ref().unwrap();
SpecifiedUrl::from_image_request(&gecko_image_request)
.expect("mCursorImages.mImage could not convert to SpecifiedUrl")
};
let hotspot =
if gecko_cursor_image.mHaveHotspot {
Some((gecko_cursor_image.mHotspotX, gecko_cursor_image.mHotspotY))
} else {
None
};
Image { url, hotspot }
}).collect();
longhands::cursor::computed_value::T { images, keyword }
}
<%call expr="impl_color('caret_color', 'mCaretColor')"></%call>
</%self:impl_trait>
<%self:impl_trait style_struct_name="Column"
skip_longhands="column-count column-rule-width">
#[allow(unused_unsafe)]
pub fn set_column_count(&mut self, v: longhands::column_count::computed_value::T) {
use gecko_bindings::structs::{NS_STYLE_COLUMN_COUNT_AUTO, nsStyleColumn_kMaxColumnCount};
self.gecko.mColumnCount = match v {
Either::First(integer) => unsafe {
cmp::min(integer.0 as u32, nsStyleColumn_kMaxColumnCount)
},
Either::Second(Auto) => NS_STYLE_COLUMN_COUNT_AUTO
};
}
${impl_simple_copy('column_count', 'mColumnCount')}
pub fn clone_column_count(&self) -> longhands::column_count::computed_value::T {
use gecko_bindings::structs::{NS_STYLE_COLUMN_COUNT_AUTO, nsStyleColumn_kMaxColumnCount};
if self.gecko.mColumnCount != NS_STYLE_COLUMN_COUNT_AUTO {
debug_assert!(self.gecko.mColumnCount >= 1 &&
self.gecko.mColumnCount <= nsStyleColumn_kMaxColumnCount);
Either::First((self.gecko.mColumnCount as i32).into())
} else {
Either::Second(Auto)
}
}
<% impl_non_negative_length("column_rule_width", "mColumnRuleWidth",
round_to_pixels=True) %>
</%self:impl_trait>
<%self:impl_trait style_struct_name="Counters"
skip_longhands="content counter-increment counter-reset">
pub fn ineffective_content_property(&self) -> bool {
self.gecko.mContents.is_empty()
}
pub fn set_content(&mut self, v: longhands::content::computed_value::T, device: &Device) {
use properties::longhands::content::computed_value::T;
use properties::longhands::content::computed_value::ContentItem;
use values::generics::CounterStyleOrNone;
use gecko_bindings::structs::nsStyleContentData;
use gecko_bindings::structs::nsStyleContentType;
use gecko_bindings::structs::nsStyleContentType::*;
use gecko_bindings::bindings::Gecko_ClearAndResizeStyleContents;
// Converts a string as utf16, and returns an owned, zero-terminated raw buffer.
fn as_utf16_and_forget(s: &str) -> *mut u16 {
use std::mem;
let mut vec = s.encode_utf16().collect::<Vec<_>>();
vec.push(0u16);
let ptr = vec.as_mut_ptr();
mem::forget(vec);
ptr
}
fn set_counter_function(data: &mut nsStyleContentData,
content_type: nsStyleContentType,
name: &str, sep: &str,
style: CounterStyleOrNone, device: &Device) {
debug_assert!(content_type == eStyleContentType_Counter ||
content_type == eStyleContentType_Counters);
let counter_func = unsafe {
bindings::Gecko_SetCounterFunction(data, content_type).as_mut().unwrap()
};
counter_func.mIdent.assign_utf8(name);
if content_type == eStyleContentType_Counters {
counter_func.mSeparator.assign_utf8(sep);
}
style.to_gecko_value(&mut counter_func.mCounterStyle, device);
}
match v {
T::None |
T::Normal => {
// Ensure destructors run, otherwise we could leak.
if !self.gecko.mContents.is_empty() {
unsafe {
Gecko_ClearAndResizeStyleContents(&mut self.gecko, 0);
}
}
},
T::MozAltContent => {
unsafe {
Gecko_ClearAndResizeStyleContents(&mut self.gecko, 1);
*self.gecko.mContents[0].mContent.mString.as_mut() = ptr::null_mut();
}
self.gecko.mContents[0].mType = eStyleContentType_AltContent;
},
T::Items(items) => {
unsafe {
Gecko_ClearAndResizeStyleContents(&mut self.gecko,
items.len() as u32);
}
for (i, item) in items.into_iter().enumerate() {
// NB: Gecko compares the mString value if type is not image
// or URI independently of whatever gets there. In the quote
// cases, they set it to null, so do the same here.
unsafe {
*self.gecko.mContents[i].mContent.mString.as_mut() = ptr::null_mut();
}
match item {
ContentItem::String(value) => {
self.gecko.mContents[i].mType = eStyleContentType_String;
unsafe {
// NB: we share allocators, so doing this is fine.
*self.gecko.mContents[i].mContent.mString.as_mut() =
as_utf16_and_forget(&value);
}
}
ContentItem::Attr(attr) => {
self.gecko.mContents[i].mType = eStyleContentType_Attr;
let s = if let Some((_, ns)) = attr.namespace {
format!("{}|{}", ns, attr.attribute)
} else {
attr.attribute.into()
};
unsafe {
// NB: we share allocators, so doing this is fine.
*self.gecko.mContents[i].mContent.mString.as_mut() =
as_utf16_and_forget(&s);
}
}
ContentItem::OpenQuote
=> self.gecko.mContents[i].mType = eStyleContentType_OpenQuote,
ContentItem::CloseQuote
=> self.gecko.mContents[i].mType = eStyleContentType_CloseQuote,
ContentItem::NoOpenQuote
=> self.gecko.mContents[i].mType = eStyleContentType_NoOpenQuote,
ContentItem::NoCloseQuote
=> self.gecko.mContents[i].mType = eStyleContentType_NoCloseQuote,
ContentItem::Counter(name, style) => {
set_counter_function(&mut self.gecko.mContents[i],
eStyleContentType_Counter, &name, "", style, device);
}
ContentItem::Counters(name, sep, style) => {
set_counter_function(&mut self.gecko.mContents[i],
eStyleContentType_Counters, &name, &sep, style, device);
}
ContentItem::Url(ref url) => {
unsafe {
bindings::Gecko_SetContentDataImageValue(&mut self.gecko.mContents[i],
url.image_value.clone().unwrap().get())
}
}
}
}
}
}
}
pub fn copy_content_from(&mut self, other: &Self) {
use gecko_bindings::bindings::Gecko_CopyStyleContentsFrom;
unsafe {
Gecko_CopyStyleContentsFrom(&mut self.gecko, &other.gecko)
}
}
pub fn reset_content(&mut self, other: &Self) {
self.copy_content_from(other)
}
pub fn clone_content(&self) -> longhands::content::computed_value::T {
use gecko::conversions::string_from_chars_pointer;
use gecko_bindings::structs::nsStyleContentType::*;
use properties::longhands::content::computed_value::{T, ContentItem};
use values::Either;
use values::generics::CounterStyleOrNone;
use values::specified::url::SpecifiedUrl;
use values::specified::Attr;
if self.gecko.mContents.is_empty() {
return T::Normal;
}
if self.gecko.mContents.len() == 1 &&
self.gecko.mContents[0].mType == eStyleContentType_AltContent {
return T::MozAltContent;
}
T::Items(
self.gecko.mContents.iter().map(|gecko_content| {
match gecko_content.mType {
eStyleContentType_OpenQuote => ContentItem::OpenQuote,
eStyleContentType_CloseQuote => ContentItem::CloseQuote,
eStyleContentType_NoOpenQuote => ContentItem::NoOpenQuote,
eStyleContentType_NoCloseQuote => ContentItem::NoCloseQuote,
eStyleContentType_String => {
let gecko_chars = unsafe { gecko_content.mContent.mString.as_ref() };
let string = unsafe { string_from_chars_pointer(*gecko_chars) };
ContentItem::String(string)
},
eStyleContentType_Attr => {
let gecko_chars = unsafe { gecko_content.mContent.mString.as_ref() };
let string = unsafe { string_from_chars_pointer(*gecko_chars) };
let (namespace, attribute) =
match string.find('|') {
None => (None, string),
Some(index) => {
let (_, val) = string.split_at(index);
// FIXME: We should give NamespaceId as well to make Attr
// struct. However, there is no field for it in Gecko.
debug_assert!(false, "Attr with namespace does not support yet");
(None, val.to_string())
}
};
ContentItem::Attr(Attr { namespace, attribute })
},
eStyleContentType_Counter | eStyleContentType_Counters => {
let gecko_function =
unsafe { &**gecko_content.mContent.mCounters.as_ref() };
let ident = gecko_function.mIdent.to_string();
let style =
CounterStyleOrNone::from_gecko_value(&gecko_function.mCounterStyle);
let style = match style {
Either::First(counter_style) => counter_style,
Either::Second(_) =>
unreachable!("counter function shouldn't have single string type"),
};
if gecko_content.mType == eStyleContentType_Counter {
ContentItem::Counter(ident, style)
} else {
let separator = gecko_function.mSeparator.to_string();
ContentItem::Counters(ident, separator, style)
}
},
eStyleContentType_Image => {
unsafe {
let gecko_image_request =
unsafe { &**gecko_content.mContent.mImage.as_ref() };
ContentItem::Url(
SpecifiedUrl::from_image_request(gecko_image_request)
.expect("mContent could not convert to SpecifiedUrl")
)
}
},
x => panic!("Found unexpected value in style struct for content property: {:?}", x),
}
}).collect()
)
}
% for counter_property in ["Increment", "Reset"]:
pub fn set_counter_${counter_property.lower()}(&mut self, v: longhands::counter_increment::computed_value::T) {
unsafe {
bindings::Gecko_ClearAndResizeCounter${counter_property}s(&mut self.gecko,
v.0.len() as u32);
for (i, (name, value)) in v.0.into_iter().enumerate() {
self.gecko.m${counter_property}s[i].mCounter.assign(name.0.as_slice());
self.gecko.m${counter_property}s[i].mValue = value;
}
}
}
pub fn copy_counter_${counter_property.lower()}_from(&mut self, other: &Self) {
unsafe {
bindings::Gecko_CopyCounter${counter_property}sFrom(&mut self.gecko, &other.gecko)
}
}
pub fn reset_counter_${counter_property.lower()}(&mut self, other: &Self) {
self.copy_counter_${counter_property.lower()}_from(other)
}
pub fn clone_counter_${counter_property.lower()}(&self) -> longhands::counter_increment::computed_value::T {
use values::CustomIdent;
use gecko_string_cache::Atom;
longhands::counter_increment::computed_value::T(
self.gecko.m${counter_property}s.iter().map(|ref gecko_counter| {
(CustomIdent(Atom::from(gecko_counter.mCounter.to_string())), gecko_counter.mValue)
}).collect()
)
}
% endfor
</%self:impl_trait>
<%self:impl_trait style_struct_name="UI" skip_longhands="-moz-force-broken-image-icon">
${impl_simple_type_with_conversion("_moz_force_broken_image_icon", "mForceBrokenImageIcon")}
</%self:impl_trait>
<%self:impl_trait style_struct_name="XUL"
skip_longhands="-moz-box-ordinal-group">
#[allow(non_snake_case)]
pub fn set__moz_box_ordinal_group(&mut self, v: i32) {
self.gecko.mBoxOrdinal = v as u32;
}
${impl_simple_copy("_moz_box_ordinal_group", "mBoxOrdinal")}
#[allow(non_snake_case)]
pub fn clone__moz_box_ordinal_group(&self) -> i32 {
self.gecko.mBoxOrdinal as i32
}
</%self:impl_trait>
% for style_struct in data.style_structs:
${declare_style_struct(style_struct)}
${impl_style_struct(style_struct)}
% endfor<|fim▁end|>
|
if self.gecko.mAnimations[index].mIterationCount.is_infinite() {
AnimationIterationCount::Infinite
} else {
|
<|file_name|>solutions.py<|end_file_name|><|fim▁begin|>__problem_title__ = "Maximum Integer Partition Product"
__problem_url___ = "https://projecteuler.net/problem=374"
__problem_description__ = "An integer partition of a number is a way of writing as a sum of " \
"positive integers. Partitions that differ only in the order of their " \
"summands are considered the same. A partition of into is a partition " \
"of in which every part occurs at most once. The partitions of 5 into " \
"distinct parts are: 5, 4+1 and 3+2. Let f( ) be the maximum product " \
"of the parts of any such partition of into distinct parts and let m( " \
") be the number of elements of any such partition of with that " \
"product. So f(5)=6 and m(5)=2. For =10 the partition with the largest " \
"product is 10=2+3+5, which gives f(10)=30 and m(10)=3. And their " \
"product, f(10)·m(10) = 30·3 = 90 It can be verified that ∑f( )·m( ) " \
"for 1 ≤ ≤ 100 = 1683550844462. Find ∑f( )·m( ) for 1 ≤ ≤ 10 . Give " \
"your answer modulo 982451653, the 50 millionth prime."
import timeit
class Solution():
@staticmethod
def solution1():
pass
@staticmethod
def time_solutions():
setup = 'from __main__ import Solution'
print('Solution 1:', timeit.timeit('Solution.solution1()', setup=setup, number=1))
if __name__ == '__main__':<|fim▁hole|> s.time_solutions()<|fim▁end|>
|
s = Solution()
print(s.solution1())
|
<|file_name|>turn.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Nathan Harmon
# https://github.com/nharmon/bogie-five
#
# Turn program
#
from Motion import *
import sys
if __name__ == '__main__':<|fim▁hole|> if len(sys.argv) < 2:
exit("Must specify target image file")
try:
drive = Drive()
drive.turn(float(sys.argv[1]))
except:
exit("Specify direction change")<|fim▁end|>
| |
<|file_name|>term_weight.rs<|end_file_name|><|fim▁begin|>use Term;
use query::Weight;
use core::SegmentReader;
use query::Scorer;
use postings::SegmentPostingsOption;<|fim▁hole|>use Result;
pub struct TermWeight {
pub num_docs: u32,
pub doc_freq: u32,
pub term: Term,
pub segment_postings_options: SegmentPostingsOption,
}
impl Weight for TermWeight {
fn scorer<'a>(&'a self, reader: &'a SegmentReader) -> Result<Box<Scorer + 'a>> {
let specialized_scorer = try!(self.specialized_scorer(reader));
Ok(box specialized_scorer)
}
}
impl TermWeight {
fn idf(&self) -> f32 {
1.0 + (self.num_docs as f32 / (self.doc_freq as f32 + 1.0)).ln()
}
pub fn specialized_scorer<'a>(&'a self,
reader: &'a SegmentReader)
-> Result<TermScorer<SegmentPostings<'a>>> {
let field = self.term.field();
let fieldnorm_reader_opt = reader.get_fieldnorms_reader(field);
Ok(reader
.read_postings(&self.term, self.segment_postings_options)
.map(|segment_postings| {
TermScorer {
idf: self.idf(),
fieldnorm_reader_opt: fieldnorm_reader_opt,
postings: segment_postings,
}
})
.unwrap_or(TermScorer {
idf: 1f32,
fieldnorm_reader_opt: None,
postings: SegmentPostings::empty(),
}))
}
}<|fim▁end|>
|
use postings::SegmentPostings;
use super::term_scorer::TermScorer;
|
<|file_name|>feed_save.js<|end_file_name|><|fim▁begin|>// Render home page and json data
<|fim▁hole|> // Import json data
var logData = require("../data/log.json");
// Set ateFoodGroup to numerical values
var ateGrains;
if(req.body.ateGrains == null){ateGrains = 0;}
else ateGrains = 1;
var ateFruit;
if(req.body.ateFruit == null){ateFruit = 0;}
else ateFruit = 1;
var ateDairy;
if(req.body.ateDairy == null){ateDairy = 0;}
else ateDairy = 1;
var ateProtein;
if(req.body.ateProtein == null){ateProtein = 0;}
else ateProtein = 1;
var ateVeggies;
if(req.body.ateVeggies == null){ateVeggies = 0;}
else ateVeggies = 1;
// Creat new JSON and append to log
var newLog = {
"id": logData.length + 1,
"name": req.body.food,
"dd": req.body.dd,
"mm": req.body.mm,
"year": req.body.yy,
"cal": parseInt(req.body.cal),
"mood": parseInt(req.body.mood),
"info": req.body.info,
"image": "public/images/food/oj.jpg",
"ateGrains": ateGrains,
"ateFruit": ateFruit,
"ateVeggies": ateVeggies,
"ateProtein": ateProtein,
"ateDairy": ateDairy
}
logData.push(newLog);
var newData = JSON.stringify(logData);
var fs = require('fs');
fs.writeFile("data/log.json",newData, function(err) {
if(err) {
return console.log(err);
}
console.log("The new log was saved!");
res.redirect('/vet');
});
};<|fim▁end|>
|
exports.view = function(req, res){
|
<|file_name|>logging.py<|end_file_name|><|fim▁begin|>import logging.handlers
import os
_pabotlog = logging.getLogger('PABot')
_pabotlog.setLevel(logging.DEBUG)
_logPath = os.path.abspath("./logging/pabot.log")
_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s')
_consoleStreamHandler = logging.StreamHandler()
_consoleStreamHandler.setLevel(logging.DEBUG)
_consoleStreamHandler.setFormatter(_formatter)
_symLogRotFileHandler = logging.handlers.RotatingFileHandler(_logPath, maxBytes=2000000, backupCount=5)
_symLogRotFileHandler.setLevel(logging.DEBUG)
_symLogRotFileHandler.setFormatter(_formatter)
_pabotlog.addHandler(_consoleStreamHandler)
_pabotlog.addHandler(_symLogRotFileHandler)
def LogPABotMessage(message):
_pabotlog.info(message)<|fim▁hole|><|fim▁end|>
|
def LogPABotError(message):
_pabotlog.error(message)
|
<|file_name|>CUtils.java<|end_file_name|><|fim▁begin|>/*
*
*/
package org.utilities;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
// TODO: Auto-generated Javadoc
/**
* The Class CUtils.
*/
public class CUtils {<|fim▁hole|>
/**
* Work.
*
* @param task
* the task
* @return the object
*/
public static Object work(Callable<?> task) {
Future<?> futureTask = es.submit(task);
return futureTask;
}
/**
* Gets the random string.
*
* @param rndSeed the rnd seed
* @return the random string
*/
public static String getRandomString(double rndSeed) {
MessageDigest m;
try {
m = MessageDigest.getInstance("MD5");
byte[] data = ("" + rndSeed).getBytes();
m.update(data, 0, data.length);
BigInteger i = new BigInteger(1, m.digest());
return String.format("%1$032X", i);
} catch (NoSuchAlgorithmException e) {
}
return "" + rndSeed;
}
/**
* Gets the random number.
*
* @param s the s
* @return the random number
* @throws Exception the exception
*/
public static int getRandomNumber(String s) throws Exception {
MessageDigest m;
try {
m = MessageDigest.getInstance("MD5");
byte[] data = s.getBytes();
m.update(data, 0, data.length);
BigInteger i = new BigInteger(1, m.digest());
return i.intValue();
} catch (NoSuchAlgorithmException e) {
}
throw new Exception("Cannot generate random number");
}
/** The Constant es. */
private final static ExecutorService es = Executors.newCachedThreadPool();
/**
* Instantiates a new c utils.
*/
private CUtils() {
}
}<|fim▁end|>
| |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(asm)]
#![feature(const_fn)]
#![feature(proc_macro)]
#![no_std]
extern crate cast;
extern crate cortex_m;
extern crate cortex_m_rtfm as rtfm;
extern crate blue_pill;
extern crate numtoa;
use blue_pill::stm32f103xx::Interrupt;
use cortex_m::peripheral::SystClkSource;
use rtfm::{app, Threshold};
mod font5x7;
mod i2c;
mod mma8652fc;
mod ssd1306;
mod state;<|fim▁hole|>
const OLED_ADDR: u8 = 0x3c;
app! {
device: blue_pill::stm32f103xx,
resources: {
static TICKS: u32 = 0;
static STATE: StateMachine = StateMachine::new();
},
tasks: {
SYS_TICK: {
path: tick,
resources: [TICKS],
},
EXTI0: {
path: update_ui,
resources: [I2C1, STATE],
},
EXTI9_5: {
path: exti9_5,
resources: [I2C1, STATE, GPIOA, GPIOB, EXTI],
},
},
}
fn init(p: init::Peripherals, _r: init::Resources) {
// 48Mhz
p.FLASH.acr.modify(
|_, w| w.prftbe().enabled().latency().one(),
);
p.RCC.cfgr.modify(|_, w| unsafe { w.bits(0x0068840A) });
p.RCC.cr.modify(
|_, w| w.pllon().set_bit().hsion().set_bit(),
);
while p.RCC.cr.read().pllrdy().bit_is_clear() {}
while p.RCC.cr.read().hsirdy().bit_is_clear() {}
p.RCC.apb2enr.modify(|_, w| {
w.iopaen().enabled().iopben().enabled().afioen().enabled()
});
p.AFIO.mapr.modify(|_, w| unsafe {
w.swj_cfg().bits(2).i2c1_remap().clear_bit()
});
p.RCC.apb1enr.modify(|_, w| w.i2c1en().enabled());
p.I2C1.cr1.write(|w| w.pe().clear_bit());
p.GPIOA.crl.modify(|_, w| w.mode6().input());
p.GPIOA.crh.modify(|_, w| {
w.mode8().output50().cnf8().push().mode9().input()
});
p.GPIOA.bsrr.write(|w| {
w.bs6().set_bit().bs8().set_bit().bs9().set_bit()
});
p.GPIOB.crl.modify(|_, w| {
w.mode5()
.input()
.mode6()
.output50()
.cnf6()
.alt_open()
.mode7()
.output50()
.cnf7()
.alt_open()
});
p.GPIOB.bsrr.write(|w| w.bs5().set_bit());
p.AFIO.exticr2.modify(|_, w| unsafe {
w.exti5().bits(1).exti6().bits(0)
});
p.AFIO.exticr3.modify(|_, w| unsafe { w.exti9().bits(0) });
p.EXTI.imr.modify(|_, w| {
w.mr5().set_bit().mr6().set_bit().mr9().set_bit()
});
p.EXTI.rtsr.modify(|_, w| {
w.tr5().set_bit().tr6().set_bit().tr9().set_bit()
});
p.EXTI.ftsr.modify(|_, w| {
w.tr5().set_bit().tr6().set_bit().tr9().set_bit()
});
p.I2C1.cr2.modify(|_, w| unsafe { w.freq().bits(24) });
p.I2C1.cr1.modify(|_, w| w.pe().clear_bit());
p.I2C1.trise.modify(
|_, w| unsafe { w.trise().bits(24 + 1) },
);
p.I2C1.ccr.modify(|_, w| unsafe {
w.f_s().clear_bit().duty().clear_bit().ccr().bits(120)
});
p.I2C1.cr1.modify(|_, w| {
w.nostretch()
.clear_bit()
.ack()
.set_bit()
.smbus()
.clear_bit()
});
p.I2C1.cr1.write(|w| w.pe().set_bit());
p.I2C1.oar1.write(|w| unsafe {
w.addmode()
.clear_bit()
.add0()
.clear_bit()
.add7()
.bits(0)
.add10()
.bits(0)
});
let oled = SSD1306(OLED_ADDR, &p.I2C1);
oled.init();
oled.print(0, 0, " ");
oled.print(0, 1, " ");
let accel = MMA8652FC(&p.I2C1);
accel.init();
p.SYST.set_clock_source(SystClkSource::Core);
p.SYST.set_reload(48_000_000);
p.SYST.enable_interrupt();
p.SYST.enable_counter();
}
fn idle() -> ! {
rtfm::set_pending(Interrupt::EXTI0);
loop {
rtfm::wfi();
}
}
fn tick(_t: &mut Threshold, r: SYS_TICK::Resources) {
**r.TICKS += 1;
}
fn update_ui(_t: &mut Threshold, r: EXTI0::Resources) {
let i2c1 = &**r.I2C1;
let oled = SSD1306(OLED_ADDR, &i2c1);
let am = MMA8652FC(&i2c1);
r.STATE.update_accel(am.accel());
r.STATE.update_state();
oled.print(0, 0, "X ");
oled.print(8, 0, "Y ");
oled.print(0, 1, "Z ");
let accel = r.STATE.get_accel();
oled.print_number(2, 0, accel.x);
oled.print_number(10, 0, accel.y);
oled.print_number(2, 1, accel.z);
/*
match r.STATE.current_state() {
State::Idle => {
oled.print(0, 0, " IDLE ");
oled.print(0, 1, " ");
}
State::Soldering => {
oled.print(0, 0, " SOLDERING ");
oled.print(0, 1, " ");
}
State::Cooling => {
oled.print(0, 0, " COOLING ");
oled.print(0, 1, " ");
}
State::Sleep => {
oled.print(0, 0, " zZzZzZ ");
oled.print(0, 1, " ");
}
State::TemperatureControl => {
oled.print(0, 0, " < 200 C > ");
}
State::Thermometer => {
oled.print(0, 0, " 200.1 C ");
}
State::Config(page) => {
match page {
ConfigPage::Save => {
oled.print(0, 0, "Save and Reset? ");
}
}
}
}
*/
}
fn exti9_5(_t: &mut Threshold, r: EXTI9_5::Resources) {
let exti = &**r.EXTI;
let gpioa = &**r.GPIOA;
let gpiob = &**r.GPIOB;
let i2c1 = &**r.I2C1;
// Button A
if exti.pr.read().pr6().bit_is_set() {
if gpioa.idr.read().idr6().bit_is_clear() {
r.STATE.update_keys(Keys::A);
} else {
r.STATE.update_keys(Keys::None);
};
exti.pr.write(|w| w.pr6().set_bit());
// Button B
} else if exti.pr.read().pr9().bit_is_set() {
if gpioa.idr.read().idr9().bit_is_clear() {
r.STATE.update_keys(Keys::B)
} else {
r.STATE.update_keys(Keys::None)
};
exti.pr.write(|w| w.pr9().set_bit());
// Movement
// interrupt doesn't fire
} else if exti.pr.read().pr5().bit_is_set() {
if gpiob.idr.read().idr5().bit_is_clear() {
let am = MMA8652FC(&i2c1);
r.STATE.update_accel(am.accel());
}
exti.pr.write(|w| w.pr5().set_bit());
}
rtfm::set_pending(Interrupt::EXTI0);
}<|fim▁end|>
|
use mma8652fc::MMA8652FC;
use ssd1306::SSD1306;
use state::{ConfigPage, Keys, State, StateMachine};
|
<|file_name|>compare_the_triplets.py<|end_file_name|><|fim▁begin|>#!/bin/python3
import sys<|fim▁hole|> alist = [a0, a1, a2]
blist = [b0, b1, b2]
clist = zip(alist, blist)
for pair in clist:
if pair[0] > pair[1]:
score[0] += 1
elif pair[0] < pair[1]:
score[1] += 1
else:
continue
return score
a0, a1, a2 = input().strip().split(' ')
a0, a1, a2 = [int(a0), int(a1), int(a2)]
b0, b1, b2 = input().strip().split(' ')
b0, b1, b2 = [int(b0), int(b1), int(b2)]
result = solve(a0, a1, a2, b0, b1, b2)
print (" ".join(map(str, result)))<|fim▁end|>
|
def solve(a0, a1, a2, b0, b1, b2):
score = [0, 0]
|
<|file_name|>PageUnderConstruction.js<|end_file_name|><|fim▁begin|>import React from 'react';<|fim▁hole|>export default () => {return <h1>Page Under Construction</h1>}<|fim▁end|>
| |
<|file_name|>vendor_only_svd.py<|end_file_name|><|fim▁begin|># Put libraries such as Divisi in the PYTHONPATH.
import sys, pickle, os
sys.path = ['/stuff/openmind'] + sys.path
from csc.divisi.cnet import *
from csc.divisi.graphics import output_svg
from vendor_db import iter_info
from csamoa.corpus.models import *
from csamoa.conceptnet.models import *
# Load the OMCS language model
en = Language.get('en')
en_nl=get_nl('en')
# Load OMCS stopwords
sw = open('stopwords.txt', 'r')
swords = [x.strip() for x in sw.readlines()]
# Parameters
factor = 1
wsize = 2
def check_concept(concept):
try:
Concept.get(concept, 'en')
return True
except:<|fim▁hole|>def english_window(text):
windows = []
words = [x for x in text.lower().replace('&', 'and').split() if x not in swords]
for x in range(len(words)-wsize+1):
pair = " ".join(words[x:x+wsize])
if check_concept(pair): windows.append(pair)
if check_concept(words[x]): windows.append(words[x])
for c in range(wsize-1):
if check_concept(words[c]): windows.append(words[c])
return windows
if 'vendor_only.pickle' in os.listdir('.'):
print "Loading saved matrix."
matrix = pickle.load(open("vendor_only.pickle"))
else:
print "Creating New Tensor"
matrix = SparseLabeledTensor(ndim=2)
print "Adding Vendors"
for co, englist in iter_info('CFB_Cities'):
print co
for phrase in englist:
parts = english_window(phrase)
print parts
for part in parts:
matrix[co, ('sells', part)] += factor
matrix[part, ('sells_inv', co)] += factor
pickle.dump(matrix, open("vendor_only.pickle", 'w'))
print "Normalizing."
matrix = matrix.normalized()
print "Matrix constructed. Running SVD."
svd = matrix.svd(k=10)
svd.summarize()
output_svg(svd.u, "vendorplain.svg", xscale=3000, yscale=3000, min=0.03)<|fim▁end|>
|
return False
|
<|file_name|>modules.js<|end_file_name|><|fim▁begin|>/*Generated by KISSY Module Compiler*/
config({<|fim▁hole|><|fim▁end|>
|
'editor/plugin/unordered-list': {requires: ['editor','editor/plugin/list-utils/btn']}
});
|
<|file_name|>cache_test.rs<|end_file_name|><|fim▁begin|>use super::*;
use envmnt;
use std::env;
use std::path::PathBuf;
#[test]
fn load_from_path_exists() {
let cwd = env::current_dir().unwrap();
let path = cwd.join("examples/cargo-make");
let cache_data = load_from_path(path);
assert_eq!(cache_data.last_update_check.unwrap(), 1000u64);
}
#[test]
fn load_from_path_not_exists() {
let path = PathBuf::from("examples2/.cargo-make");
let cache_data = load_from_path(path);
assert!(cache_data.last_update_check.is_none());
}
#[test]
#[ignore]
fn load_with_cargo_home() {
let path = env::current_dir().unwrap();
let directory = path.join("examples/cargo-make");
envmnt::set("CARGO_MAKE_HOME", directory.to_str().unwrap());
let cache_data = load();
<|fim▁hole|> envmnt::remove("CARGO_MAKE_HOME");
assert_eq!(cache_data.last_update_check.unwrap(), 1000u64);
}
#[test]
#[ignore]
fn load_without_cargo_home() {
envmnt::remove("CARGO_MAKE_HOME");
load();
}<|fim▁end|>
| |
<|file_name|>ScheduleProvider.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.iosched.provider;
import com.google.android.apps.iosched.provider.ScheduleContract.Blocks;
import com.google.android.apps.iosched.provider.ScheduleContract.Notes;
import com.google.android.apps.iosched.provider.ScheduleContract.Rooms;
import com.google.android.apps.iosched.provider.ScheduleContract.SearchSuggest;
import com.google.android.apps.iosched.provider.ScheduleContract.Sessions;
import com.google.android.apps.iosched.provider.ScheduleContract.Speakers;
import com.google.android.apps.iosched.provider.ScheduleContract.Tracks;
import com.google.android.apps.iosched.provider.ScheduleContract.Vendors;
import com.google.android.apps.iosched.provider.ScheduleDatabase.SessionsSearchColumns;
import com.google.android.apps.iosched.provider.ScheduleDatabase.SessionsSpeakers;
import com.google.android.apps.iosched.provider.ScheduleDatabase.SessionsTracks;
import com.google.android.apps.iosched.provider.ScheduleDatabase.Tables;
import com.google.android.apps.iosched.provider.ScheduleDatabase.VendorsSearchColumns;
import com.google.android.apps.iosched.util.NotesExporter;
import com.google.android.apps.iosched.util.SelectionBuilder;
import android.app.Activity;
import android.app.SearchManager;
import android.content.ContentProvider;
import android.content.ContentProviderOperation;
import android.content.ContentProviderResult;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.OperationApplicationException;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.MatrixCursor;
import android.database.sqlite.SQLiteDatabase;
import android.net.Uri;
import android.os.ParcelFileDescriptor;
import android.provider.BaseColumns;
import android.provider.OpenableColumns;
import android.util.Log;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import net.bespokesystems.android.apps.wicsa2011.de.service.SyncService;
/**
* Provider that stores {@link ScheduleContract} data. Data is usually inserted
* by {@link SyncService}, and queried by various {@link Activity} instances.
*/
public class ScheduleProvider extends ContentProvider {
private static final String TAG = "ScheduleProvider";
private static final boolean LOGV = Log.isLoggable(TAG, Log.VERBOSE);
private ScheduleDatabase mOpenHelper;
private static final UriMatcher sUriMatcher = buildUriMatcher();
private static final int BLOCKS = 100;
private static final int BLOCKS_BETWEEN = 101;
private static final int BLOCKS_ID = 102;
private static final int BLOCKS_ID_SESSIONS = 103;
private static final int TRACKS = 200;
private static final int TRACKS_ID = 201;
private static final int TRACKS_ID_SESSIONS = 202;
private static final int TRACKS_ID_VENDORS = 203;
private static final int ROOMS = 300;
private static final int ROOMS_ID = 301;
private static final int ROOMS_ID_SESSIONS = 302;
private static final int SESSIONS = 400;
private static final int SESSIONS_STARRED = 401;
private static final int SESSIONS_SEARCH = 402;
private static final int SESSIONS_AT = 403;
private static final int SESSIONS_ID = 404;
private static final int SESSIONS_ID_SPEAKERS = 405;
private static final int SESSIONS_ID_TRACKS = 406;
private static final int SESSIONS_ID_NOTES = 407;
private static final int SPEAKERS = 500;
private static final int SPEAKERS_ID = 501;
private static final int SPEAKERS_ID_SESSIONS = 502;
private static final int VENDORS = 600;
private static final int VENDORS_STARRED = 601;
private static final int VENDORS_SEARCH = 603;
private static final int VENDORS_ID = 604;
private static final int NOTES = 700;
private static final int NOTES_EXPORT = 701;
private static final int NOTES_ID = 702;
private static final int SEARCH_SUGGEST = 800;
private static final String MIME_XML = "text/xml";
/**
* Build and return a {@link UriMatcher} that catches all {@link Uri}
* variations supported by this {@link ContentProvider}.
*/
private static UriMatcher buildUriMatcher() {
final UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH);
final String authority = ScheduleContract.CONTENT_AUTHORITY;
matcher.addURI(authority, "blocks", BLOCKS);
matcher.addURI(authority, "blocks/between/*/*", BLOCKS_BETWEEN);
matcher.addURI(authority, "blocks/*", BLOCKS_ID);
matcher.addURI(authority, "blocks/*/sessions", BLOCKS_ID_SESSIONS);
matcher.addURI(authority, "tracks", TRACKS);
matcher.addURI(authority, "tracks/*", TRACKS_ID);
matcher.addURI(authority, "tracks/*/sessions", TRACKS_ID_SESSIONS);
matcher.addURI(authority, "tracks/*/vendors", TRACKS_ID_VENDORS);
matcher.addURI(authority, "rooms", ROOMS);
matcher.addURI(authority, "rooms/*", ROOMS_ID);
matcher.addURI(authority, "rooms/*/sessions", ROOMS_ID_SESSIONS);
matcher.addURI(authority, "sessions", SESSIONS);
matcher.addURI(authority, "sessions/starred", SESSIONS_STARRED);
matcher.addURI(authority, "sessions/search/*", SESSIONS_SEARCH);
matcher.addURI(authority, "sessions/at/*", SESSIONS_AT);
matcher.addURI(authority, "sessions/*", SESSIONS_ID);
matcher.addURI(authority, "sessions/*/speakers", SESSIONS_ID_SPEAKERS);
matcher.addURI(authority, "sessions/*/tracks", SESSIONS_ID_TRACKS);
matcher.addURI(authority, "sessions/*/notes", SESSIONS_ID_NOTES);
matcher.addURI(authority, "speakers", SPEAKERS);
matcher.addURI(authority, "speakers/*", SPEAKERS_ID);
matcher.addURI(authority, "speakers/*/sessions", SPEAKERS_ID_SESSIONS);
matcher.addURI(authority, "vendors", VENDORS);
matcher.addURI(authority, "vendors/starred", VENDORS_STARRED);
matcher.addURI(authority, "vendors/search/*", VENDORS_SEARCH);
matcher.addURI(authority, "vendors/*", VENDORS_ID);
matcher.addURI(authority, "notes", NOTES);
matcher.addURI(authority, "notes/export", NOTES_EXPORT);
matcher.addURI(authority, "notes/*", NOTES_ID);
matcher.addURI(authority, "search_suggest_query", SEARCH_SUGGEST);
return matcher;
}
@Override
public boolean onCreate() {
final Context context = getContext();
mOpenHelper = new ScheduleDatabase(context);
return true;
}
/** {@inheritDoc} */
@Override
public String getType(Uri uri) {
final int match = sUriMatcher.match(uri);
switch (match) {
case BLOCKS:
return Blocks.CONTENT_TYPE;
case BLOCKS_BETWEEN:
return Blocks.CONTENT_TYPE;
case BLOCKS_ID:
return Blocks.CONTENT_ITEM_TYPE;
case BLOCKS_ID_SESSIONS:
return Sessions.CONTENT_TYPE;
case TRACKS:
return Tracks.CONTENT_TYPE;
case TRACKS_ID:
return Tracks.CONTENT_ITEM_TYPE;
case TRACKS_ID_SESSIONS:
return Sessions.CONTENT_TYPE;
case TRACKS_ID_VENDORS:
return Vendors.CONTENT_TYPE;
case ROOMS:
return Rooms.CONTENT_TYPE;
case ROOMS_ID:
return Rooms.CONTENT_ITEM_TYPE;
case ROOMS_ID_SESSIONS:
return Sessions.CONTENT_TYPE;
case SESSIONS:
return Sessions.CONTENT_TYPE;
case SESSIONS_STARRED:
return Sessions.CONTENT_TYPE;
case SESSIONS_SEARCH:
return Sessions.CONTENT_TYPE;
case SESSIONS_AT:
return Sessions.CONTENT_TYPE;
case SESSIONS_ID:
return Sessions.CONTENT_ITEM_TYPE;
case SESSIONS_ID_SPEAKERS:
return Speakers.CONTENT_TYPE;
case SESSIONS_ID_TRACKS:
return Tracks.CONTENT_TYPE;
case SESSIONS_ID_NOTES:
return Notes.CONTENT_TYPE;
case SPEAKERS:
return Speakers.CONTENT_TYPE;
case SPEAKERS_ID:
return Speakers.CONTENT_ITEM_TYPE;
case SPEAKERS_ID_SESSIONS:
return Sessions.CONTENT_TYPE;
case VENDORS:
return Vendors.CONTENT_TYPE;
case VENDORS_STARRED:
return Vendors.CONTENT_TYPE;
case VENDORS_SEARCH:
return Vendors.CONTENT_TYPE;
case VENDORS_ID:
return Vendors.CONTENT_ITEM_TYPE;
case NOTES:
return Notes.CONTENT_TYPE;
case NOTES_EXPORT:
return MIME_XML;
case NOTES_ID:
return Notes.CONTENT_ITEM_TYPE;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
/** {@inheritDoc} */
@Override
public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs,
String sortOrder) {
if (LOGV) Log.v(TAG, "query(uri=" + uri + ", proj=" + Arrays.toString(projection) + ")");
final SQLiteDatabase db = mOpenHelper.getReadableDatabase();
final int match = sUriMatcher.match(uri);
switch (match) {
default: {
// Most cases are handled with simple SelectionBuilder
final SelectionBuilder builder = buildExpandedSelection(uri, match);
return builder.where(selection, selectionArgs).query(db, projection, sortOrder);
}
case NOTES_EXPORT: {
// Provide query values for file attachments
final String[] columns = { OpenableColumns.DISPLAY_NAME, OpenableColumns.SIZE };
final MatrixCursor cursor = new MatrixCursor(columns, 1);
cursor.addRow(new String[] { "notes.xml", null });
return cursor;
}
case SEARCH_SUGGEST: {
final SelectionBuilder builder = new SelectionBuilder();
// Adjust incoming query to become SQL text match
selectionArgs[0] = selectionArgs[0] + "%";
builder.table(Tables.SEARCH_SUGGEST);
builder.where(selection, selectionArgs);
builder.map(SearchManager.SUGGEST_COLUMN_QUERY,
SearchManager.SUGGEST_COLUMN_TEXT_1);
projection = new String[] { BaseColumns._ID, SearchManager.SUGGEST_COLUMN_TEXT_1,
SearchManager.SUGGEST_COLUMN_QUERY };
final String limit = uri.getQueryParameter(SearchManager.SUGGEST_PARAMETER_LIMIT);
return builder.query(db, projection, null, null, SearchSuggest.DEFAULT_SORT, limit);
}
}
}
/** {@inheritDoc} */
@Override
public Uri insert(Uri uri, ContentValues values) {
if (LOGV) Log.v(TAG, "insert(uri=" + uri + ", values=" + values.toString() + ")");
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
switch (match) {
case BLOCKS: {
db.insertOrThrow(Tables.BLOCKS, null, values);
return Blocks.buildBlockUri(values.getAsString(Blocks.BLOCK_ID));
}
case TRACKS: {
db.insertOrThrow(Tables.TRACKS, null, values);
return Tracks.buildTrackUri(values.getAsString(Tracks.TRACK_ID));
}
case ROOMS: {
db.insertOrThrow(Tables.ROOMS, null, values);
return Rooms.buildRoomUri(values.getAsString(Rooms.ROOM_ID));
}
case SESSIONS: {
db.insertOrThrow(Tables.SESSIONS, null, values);
return Sessions.buildSessionUri(values.getAsString(Sessions.SESSION_ID));
}
case SESSIONS_ID_SPEAKERS: {
db.insertOrThrow(Tables.SESSIONS_SPEAKERS, null, values);
return Speakers.buildSpeakerUri(values.getAsString(SessionsSpeakers.SPEAKER_ID));
}
case SESSIONS_ID_TRACKS: {
db.insertOrThrow(Tables.SESSIONS_TRACKS, null, values);
return Tracks.buildTrackUri(values.getAsString(SessionsTracks.TRACK_ID));
}
case SESSIONS_ID_NOTES: {
final String sessionId = Sessions.getSessionId(uri);
values.put(Notes.SESSION_ID, sessionId);
final long noteId = db.insertOrThrow(Tables.NOTES, null, values);
return ContentUris.withAppendedId(Notes.CONTENT_URI, noteId);
}
case SPEAKERS: {
db.insertOrThrow(Tables.SPEAKERS, null, values);
return Speakers.buildSpeakerUri(values.getAsString(Speakers.SPEAKER_ID));
}
case VENDORS: {
db.insertOrThrow(Tables.VENDORS, null, values);
return Vendors.buildVendorUri(values.getAsString(Vendors.VENDOR_ID));
}
case NOTES: {
final long noteId = db.insertOrThrow(Tables.NOTES, null, values);
return ContentUris.withAppendedId(Notes.CONTENT_URI, noteId);
}
case SEARCH_SUGGEST: {
db.insertOrThrow(Tables.SEARCH_SUGGEST, null, values);
return SearchSuggest.CONTENT_URI;
}
default: {
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
}
/** {@inheritDoc} */
@Override
public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) {
if (LOGV) Log.v(TAG, "update(uri=" + uri + ", values=" + values.toString() + ")");
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final SelectionBuilder builder = buildSimpleSelection(uri);
return builder.where(selection, selectionArgs).update(db, values);
}
/** {@inheritDoc} */
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
if (LOGV) Log.v(TAG, "delete(uri=" + uri + ")");
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final SelectionBuilder builder = buildSimpleSelection(uri);
return builder.where(selection, selectionArgs).delete(db);
}
/**
* Apply the given set of {@link ContentProviderOperation}, executing inside
* a {@link SQLiteDatabase} transaction. All changes will be rolled back if
* any single one fails.
*/
@Override
public ContentProviderResult[] applyBatch(ArrayList<ContentProviderOperation> operations)
throws OperationApplicationException {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
db.beginTransaction();
try {
final int numOperations = operations.size();
final ContentProviderResult[] results = new ContentProviderResult[numOperations];
for (int i = 0; i < numOperations; i++) {
results[i] = operations.get(i).apply(this, results, i);
}
db.setTransactionSuccessful();
return results;
} finally {
db.endTransaction();
}
}
/**
* Build a simple {@link SelectionBuilder} to match the requested
* {@link Uri}. This is usually enough to support {@link #insert},
* {@link #update}, and {@link #delete} operations.
*/
private SelectionBuilder buildSimpleSelection(Uri uri) {
final SelectionBuilder builder = new SelectionBuilder();
final int match = sUriMatcher.match(uri);
switch (match) {
case BLOCKS: {
return builder.table(Tables.BLOCKS);
}
case BLOCKS_ID: {
final String blockId = Blocks.getBlockId(uri);
return builder.table(Tables.BLOCKS)
.where(Blocks.BLOCK_ID + "=?", blockId);
}
case TRACKS: {
return builder.table(Tables.TRACKS);
}
case TRACKS_ID: {
final String trackId = Tracks.getTrackId(uri);
return builder.table(Tables.TRACKS)
.where(Tracks.TRACK_ID + "=?", trackId);
}
case ROOMS: {
return builder.table(Tables.ROOMS);
}
case ROOMS_ID: {
final String roomId = Rooms.getRoomId(uri);
return builder.table(Tables.ROOMS)
.where(Rooms.ROOM_ID + "=?", roomId);
}
case SESSIONS: {
return builder.table(Tables.SESSIONS);
}
case SESSIONS_ID: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS)
.where(Sessions.SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_SPEAKERS: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS_SPEAKERS)
.where(Sessions.SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_TRACKS: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS_TRACKS)
.where(Sessions.SESSION_ID + "=?", sessionId);
}
case SPEAKERS: {
return builder.table(Tables.SPEAKERS);
}
case SPEAKERS_ID: {
final String speakerId = Speakers.getSpeakerId(uri);
return builder.table(Tables.SPEAKERS)
.where(Speakers.SPEAKER_ID + "=?", speakerId);
}
case VENDORS: {
return builder.table(Tables.VENDORS);
}
case VENDORS_ID: {
final String vendorId = Vendors.getVendorId(uri);
return builder.table(Tables.VENDORS)
.where(Vendors.VENDOR_ID + "=?", vendorId);
}
case NOTES: {
return builder.table(Tables.NOTES);
}
case NOTES_ID: {
final String noteId = uri.getPathSegments().get(1);
return builder.table(Tables.NOTES)
.where(Notes._ID + "=?", noteId);
}
case SEARCH_SUGGEST: {
return builder.table(Tables.SEARCH_SUGGEST);
}
default: {
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
}
/**
* Build an advanced {@link SelectionBuilder} to match the requested
* {@link Uri}. This is usually only used by {@link #query}, since it
* performs table joins useful for {@link Cursor} data.
*/
private SelectionBuilder buildExpandedSelection(Uri uri, int match) {
final SelectionBuilder builder = new SelectionBuilder();
switch (match) {
case BLOCKS: {
return builder.table(Tables.BLOCKS);
}
case BLOCKS_BETWEEN: {
final List<String> segments = uri.getPathSegments();
final String startTime = segments.get(2);
final String endTime = segments.get(3);
return builder.table(Tables.BLOCKS)
.map(Blocks.SESSIONS_COUNT, Subquery.BLOCK_SESSIONS_COUNT)
.map(Blocks.CONTAINS_STARRED, Subquery.BLOCK_CONTAINS_STARRED)
.where(Blocks.BLOCK_START + ">=?", startTime)
.where(Blocks.BLOCK_START + "<=?", endTime);
}
case BLOCKS_ID: {
final String blockId = Blocks.getBlockId(uri);
return builder.table(Tables.BLOCKS)
.map(Blocks.SESSIONS_COUNT, Subquery.BLOCK_SESSIONS_COUNT)
.map(Blocks.CONTAINS_STARRED, Subquery.BLOCK_CONTAINS_STARRED)
.where(Blocks.BLOCK_ID + "=?", blockId);
}
case BLOCKS_ID_SESSIONS: {
final String blockId = Blocks.getBlockId(uri);
return builder.table(Tables.SESSIONS_JOIN_BLOCKS_ROOMS)
.map(Blocks.SESSIONS_COUNT, Subquery.BLOCK_SESSIONS_COUNT)
.map(Blocks.CONTAINS_STARRED, Subquery.BLOCK_CONTAINS_STARRED)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.mapToTable(Sessions.BLOCK_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.where(Qualified.SESSIONS_BLOCK_ID + "=?", blockId);
}
case TRACKS: {
return builder.table(Tables.TRACKS)
.map(Tracks.SESSIONS_COUNT, Subquery.TRACK_SESSIONS_COUNT)
.map(Tracks.VENDORS_COUNT, Subquery.TRACK_VENDORS_COUNT);
}
case TRACKS_ID: {
final String trackId = Tracks.getTrackId(uri);
return builder.table(Tables.TRACKS)
.where(Tracks.TRACK_ID + "=?", trackId);
}
case TRACKS_ID_SESSIONS: {
final String trackId = Tracks.getTrackId(uri);
return builder.table(Tables.SESSIONS_TRACKS_JOIN_SESSIONS_BLOCKS_ROOMS)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.mapToTable(Sessions.BLOCK_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.where(Qualified.SESSIONS_TRACKS_TRACK_ID + "=?", trackId);
}
case TRACKS_ID_VENDORS: {
final String trackId = Tracks.getTrackId(uri);
return builder.table(Tables.VENDORS_JOIN_TRACKS)
.mapToTable(Vendors._ID, Tables.VENDORS)
.mapToTable(Vendors.TRACK_ID, Tables.VENDORS)
.where(Qualified.VENDORS_TRACK_ID + "=?", trackId);
}
case ROOMS: {
return builder.table(Tables.ROOMS);
}
case ROOMS_ID: {
final String roomId = Rooms.getRoomId(uri);
return builder.table(Tables.ROOMS)
.where(Rooms.ROOM_ID + "=?", roomId);
}
case ROOMS_ID_SESSIONS: {
final String roomId = Rooms.getRoomId(uri);
return builder.table(Tables.SESSIONS_JOIN_BLOCKS_ROOMS)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.BLOCK_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.where(Qualified.SESSIONS_ROOM_ID + "=?", roomId);
}
case SESSIONS: {
return builder.table(Tables.SESSIONS_JOIN_BLOCKS_ROOMS)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.BLOCK_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS);
}
case SESSIONS_STARRED: {
return builder.table(Tables.SESSIONS_JOIN_BLOCKS_ROOMS)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.BLOCK_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.where(Sessions.STARRED + "=1");
}
case SESSIONS_SEARCH: {
final String query = Sessions.getSearchQuery(uri);
return builder.table(Tables.SESSIONS_SEARCH_JOIN_SESSIONS_BLOCKS_ROOMS)
.map(Sessions.SEARCH_SNIPPET, Subquery.SESSIONS_SNIPPET)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.mapToTable(Sessions.BLOCK_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.where(SessionsSearchColumns.BODY + " MATCH ?", query);
}
case SESSIONS_AT: {
final List<String> segments = uri.getPathSegments();
final String time = segments.get(2);
return builder.table(Tables.SESSIONS_JOIN_BLOCKS_ROOMS)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.BLOCK_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.where(Sessions.BLOCK_START + "<=?", time)
.where(Sessions.BLOCK_END + ">=?", time);
}
case SESSIONS_ID: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS_JOIN_BLOCKS_ROOMS)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.BLOCK_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.where(Qualified.SESSIONS_SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_SPEAKERS: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS_SPEAKERS_JOIN_SPEAKERS)
.mapToTable(Speakers._ID, Tables.SPEAKERS)
.mapToTable(Speakers.SPEAKER_ID, Tables.SPEAKERS)
.where(Qualified.SESSIONS_SPEAKERS_SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_TRACKS: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS_TRACKS_JOIN_TRACKS)
.mapToTable(Tracks._ID, Tables.TRACKS)
.mapToTable(Tracks.TRACK_ID, Tables.TRACKS)
.where(Qualified.SESSIONS_TRACKS_SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_NOTES: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.NOTES)
.where(Notes.SESSION_ID + "=?", sessionId);
}
case SPEAKERS: {
return builder.table(Tables.SPEAKERS);
}
case SPEAKERS_ID: {
final String speakerId = Speakers.getSpeakerId(uri);
return builder.table(Tables.SPEAKERS)
.where(Speakers.SPEAKER_ID + "=?", speakerId);
}
case SPEAKERS_ID_SESSIONS: {
final String speakerId = Speakers.getSpeakerId(uri);
return builder.table(Tables.SESSIONS_SPEAKERS_JOIN_SESSIONS_BLOCKS_ROOMS)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.mapToTable(Sessions.BLOCK_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.where(Qualified.SESSIONS_SPEAKERS_SPEAKER_ID + "=?", speakerId);
}
case VENDORS: {
return builder.table(Tables.VENDORS_JOIN_TRACKS)
.mapToTable(Vendors._ID, Tables.VENDORS)
.mapToTable(Vendors.TRACK_ID, Tables.VENDORS);
}
case VENDORS_STARRED: {
return builder.table(Tables.VENDORS_JOIN_TRACKS)
.mapToTable(Vendors._ID, Tables.VENDORS)
.mapToTable(Vendors.TRACK_ID, Tables.VENDORS)
.where(Vendors.STARRED + "=1");
}<|fim▁hole|> .map(Vendors.SEARCH_SNIPPET, Subquery.VENDORS_SNIPPET)
.mapToTable(Vendors._ID, Tables.VENDORS)
.mapToTable(Vendors.VENDOR_ID, Tables.VENDORS)
.mapToTable(Vendors.TRACK_ID, Tables.VENDORS)
.where(VendorsSearchColumns.BODY + " MATCH ?", query);
}
case VENDORS_ID: {
final String vendorId = Vendors.getVendorId(uri);
return builder.table(Tables.VENDORS_JOIN_TRACKS)
.mapToTable(Vendors._ID, Tables.VENDORS)
.mapToTable(Vendors.TRACK_ID, Tables.VENDORS)
.where(Vendors.VENDOR_ID + "=?", vendorId);
}
case NOTES: {
return builder.table(Tables.NOTES);
}
case NOTES_ID: {
final long noteId = Notes.getNoteId(uri);
return builder.table(Tables.NOTES)
.where(Notes._ID + "=?", Long.toString(noteId));
}
default: {
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
}
@Override
public ParcelFileDescriptor openFile(Uri uri, String mode) throws FileNotFoundException {
final int match = sUriMatcher.match(uri);
switch (match) {
case NOTES_EXPORT: {
try {
final File notesFile = NotesExporter.writeExportedNotes(getContext());
return ParcelFileDescriptor
.open(notesFile, ParcelFileDescriptor.MODE_READ_ONLY);
} catch (IOException e) {
throw new FileNotFoundException("Unable to export notes: " + e.toString());
}
}
default: {
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
}
private interface Subquery {
String BLOCK_SESSIONS_COUNT = "(SELECT COUNT(" + Qualified.SESSIONS_SESSION_ID + ") FROM "
+ Tables.SESSIONS + " WHERE " + Qualified.SESSIONS_BLOCK_ID + "="
+ Qualified.BLOCKS_BLOCK_ID + ")";
String BLOCK_CONTAINS_STARRED = "(SELECT MAX(" + Qualified.SESSIONS_STARRED + ") FROM "
+ Tables.SESSIONS + " WHERE " + Qualified.SESSIONS_BLOCK_ID + "="
+ Qualified.BLOCKS_BLOCK_ID + ")";
String TRACK_SESSIONS_COUNT = "(SELECT COUNT(" + Qualified.SESSIONS_TRACKS_SESSION_ID
+ ") FROM " + Tables.SESSIONS_TRACKS + " WHERE "
+ Qualified.SESSIONS_TRACKS_TRACK_ID + "=" + Qualified.TRACKS_TRACK_ID + ")";
String TRACK_VENDORS_COUNT = "(SELECT COUNT(" + Qualified.VENDORS_VENDOR_ID + ") FROM "
+ Tables.VENDORS + " WHERE " + Qualified.VENDORS_TRACK_ID + "="
+ Qualified.TRACKS_TRACK_ID + ")";
String SESSIONS_SNIPPET = "snippet(" + Tables.SESSIONS_SEARCH + ",'{','}','\u2026')";
String VENDORS_SNIPPET = "snippet(" + Tables.VENDORS_SEARCH + ",'{','}','\u2026')";
}
/**
* {@link ScheduleContract} fields that are fully qualified with a specific
* parent {@link Tables}. Used when needed to work around SQL ambiguity.
*/
private interface Qualified {
String SESSIONS_SESSION_ID = Tables.SESSIONS + "." + Sessions.SESSION_ID;
String SESSIONS_BLOCK_ID = Tables.SESSIONS + "." + Sessions.BLOCK_ID;
String SESSIONS_ROOM_ID = Tables.SESSIONS + "." + Sessions.ROOM_ID;
String SESSIONS_TRACKS_SESSION_ID = Tables.SESSIONS_TRACKS + "."
+ SessionsTracks.SESSION_ID;
String SESSIONS_TRACKS_TRACK_ID = Tables.SESSIONS_TRACKS + "."
+ SessionsTracks.TRACK_ID;
String SESSIONS_SPEAKERS_SESSION_ID = Tables.SESSIONS_SPEAKERS + "."
+ SessionsSpeakers.SESSION_ID;
String SESSIONS_SPEAKERS_SPEAKER_ID = Tables.SESSIONS_SPEAKERS + "."
+ SessionsSpeakers.SPEAKER_ID;
String VENDORS_VENDOR_ID = Tables.VENDORS + "." + Vendors.VENDOR_ID;
String VENDORS_TRACK_ID = Tables.VENDORS + "." + Vendors.TRACK_ID;
@SuppressWarnings("hiding")
String SESSIONS_STARRED = Tables.SESSIONS + "." + Sessions.STARRED;
String TRACKS_TRACK_ID = Tables.TRACKS + "." + Tracks.TRACK_ID;
String BLOCKS_BLOCK_ID = Tables.BLOCKS + "." + Blocks.BLOCK_ID;
}
}<|fim▁end|>
|
case VENDORS_SEARCH: {
final String query = Vendors.getSearchQuery(uri);
return builder.table(Tables.VENDORS_SEARCH_JOIN_VENDORS_TRACKS)
|
<|file_name|>CodersStrikeBack.java<|end_file_name|><|fim▁begin|>package com.basicalgorithms.coding_games;
import java.util.HashSet;
import java.util.Objects;
import java.util.Scanner;
import java.util.Set;
/**
* Original question: https://www.codingame.com/multiplayer/bot-programming/coders-strike-back
*/
public class CodersStrikeBack {
static double longestDist = Integer.MIN_VALUE;
static Point initialPoint = null;
static boolean hasFinishedOneLap;
static Point from = null;
static Point lastCheckpoint = null;
static final Set<Point> visitedCheckPoints = new HashSet<>();
static boolean hasBoosted = false;
public static void main(String args[]) {
Scanner in = new Scanner(System.in);
// game loop
while (true) {
int x = in.nextInt();
int y = in.nextInt();
int nextCheckpointX = in.nextInt(); // x position of the next check point
int nextCheckpointY = in.nextInt(); // y position of the next check point
int nextCheckpointDist = in.nextInt(); // distance to the next checkpoint
int nextCheckpointAngle = in.nextInt(); // angle between your pod orientation and the direction of the next checkpoint
int opponentX = in.nextInt();
int opponentY = in.nextInt();
// Write an action using System.out.println()
// To debug: System.err.println("Debug messages...");
// You have to output the target position
// followed by the power (0 <= thrust <= 100)
// i.e.: "x y thrust"
final Point nextCheckpoint = new Point(nextCheckpointX, nextCheckpointY);
final Point currentPosition = new Point(x, y);
final Point enemyPosition = new Point(opponentX, opponentY);
if (visitedCheckPoints.size() > 1 && enemyInRange(currentPosition, enemyPosition)) {
ramEnemyShip(currentPosition, enemyPosition);
} else {
cruise(currentPosition, nextCheckpoint, nextCheckpointAngle);
}
if (!nextCheckpoint.equals(lastCheckpoint)) {<|fim▁hole|> }
private static void ramEnemyShip(final Point currentPosition, final Point enemyPosition) {
sailToDestination((enemyPosition.x), enemyPosition.y, "100");
}
private static boolean enemyInRange(final Point currentPosition, final Point enemyPosition) {
return getDistant(currentPosition, enemyPosition) <= 1000;
}
private static void cruise(
final Point currentPosition,
final Point nextCheckpoint,
final int nextCheckpointAngle) {
if (initialPoint == null) {
initialPoint = currentPosition;
}
int thrust = isWithinAngle(nextCheckpointAngle) ? 100 : 0;
String power = String.valueOf(thrust);
visitedCheckPoints.add(nextCheckpoint);
System.err.println(
"Checkpoint added:" + " nextCheckpointX=" + nextCheckpoint.x + ", nextCheckpointY=" + nextCheckpoint.y);
for (final Point visitedCheckPoint : visitedCheckPoints) {
System.err.println("Visited checkpoint: (" + visitedCheckPoint.x + ", " + visitedCheckPoint.y + ")");
}
if (shouldSlowDown(currentPosition, nextCheckpoint)) {
power = String.valueOf(35);
}
if (hasFinishedOneLap(nextCheckpoint) &&
isLongestDistant(from, nextCheckpoint) &&
isWithinSharpAngle(nextCheckpointAngle) &&
!hasBoosted) {
power = "BOOST";
hasBoosted = true;
System.err.println("Boosted!!!");
}
sailToDestination(nextCheckpoint.x, nextCheckpoint.y, power);
}
private static boolean shouldSlowDown(
final Point currentPosition,
final Point nextCheckpoint) {
return getDistant(currentPosition, nextCheckpoint) < 1000;
}
private static void sailToDestination(final int nextCheckpointX, final int nextCheckpointY, final String power) {
System.out.println(nextCheckpointX + " " + nextCheckpointY + " " + power);
System.err.println("Thrust:" + power);
}
private static boolean isWithinAngle(final int nextCheckpointAngle) {
return -90 < nextCheckpointAngle && nextCheckpointAngle < 90;
}
private static boolean isWithinSharpAngle(final int nextCheckpointAngle) {
return -15 < nextCheckpointAngle && nextCheckpointAngle < 15;
}
private static boolean hasFinishedOneLap(final Point point) {
if (hasFinishedOneLap) {
return true;
}
if (initialPoint == null) { return false; }
hasFinishedOneLap = getDistant(initialPoint, point) <= 600;
return hasFinishedOneLap;
}
private static boolean isLongestDistant(final Point from, final Point endPoint) {
if (from == null) {
return false;
}
System.err.println("Start Point: (" + from.x + ", " + from.y + "); End Point: ("
+ endPoint.x + ", " + endPoint.y + ") ");
double dist = getDistant(from, endPoint);
System.err.println("dist=" + dist + ", longestDist=" + longestDist);
if (dist >= longestDist) {
longestDist = dist;
return true;
}
return false;
}
private static double getDistant(final Point from, final Point endPoint) {
return Math.sqrt(Math.pow(from.x - endPoint.x, 2) + Math.pow(from.y - endPoint.y, 2));
}
private static class Point {
final int x;
final int y;
private Point(final int t1, final int t2) {
this.x = t1;
this.y = t2;
}
@Override
public boolean equals(final Object o) {
if (this == o) { return true; }
if (!(o instanceof Point)) { return false; }
final Point point = (Point) o;
return x == point.x &&
y == point.y;
}
@Override
public int hashCode() {
return Objects.hash(x, y);
}
}
}<|fim▁end|>
|
from = lastCheckpoint;
}
lastCheckpoint = nextCheckpoint;
}
|
<|file_name|>itemmeasures2lineitems.py<|end_file_name|><|fim▁begin|>import sys
sentid_prev = 0
first_line = True
first_word = True<|fim▁hole|> word_ix = row.index('word')
sentid_ix = row.index('sentid')
first_line = False
else:
word = row[word_ix]
sentid = row[sentid_ix]
if first_word:
delim = ''
first_word = False
elif sentid == sentid_prev:
delim = ' '
else:
delim = '\n'
sentid_prev = sentid
sys.stdout.write(delim + word)
sys.stdout.write('\n')<|fim▁end|>
|
for line in sys.stdin:
row = line.strip().split()
if first_line:
|
<|file_name|>sepcomp-extern.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-bitrig
// compile-flags: -C codegen-units=3
// aux-build:sepcomp-extern-lib.rs
// Test accessing external items from multiple compilation units.
extern crate sepcomp_extern_lib;
extern {
#[allow(ctypes)]
fn foo() -> usize;
}<|fim▁hole|>fn call1() -> usize {
unsafe { foo() }
}
mod a {
pub fn call2() -> usize {
unsafe { ::foo() }
}
}
mod b {
pub fn call3() -> usize {
unsafe { ::foo() }
}
}
fn main() {
assert_eq!(call1(), 1234);
assert_eq!(a::call2(), 1234);
assert_eq!(b::call3(), 1234);
}<|fim▁end|>
| |
<|file_name|>0013_add_law_merged_into.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Law.merged_into'
db.add_column('laws_law', 'merged_into', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='duplicates', null=True, to=orm['laws.Law']), keep_default=False)
def backwards(self, orm):
# Deleting field 'Law.merged_into'
db.delete_column('laws_law', 'merged_into_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'committees.committee': {
'Meta': {'object_name': 'Committee'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'committees'", 'symmetrical': 'False', 'to': "orm['mks.Member']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'committees.committeemeeting': {
'Meta': {'object_name': 'CommitteeMeeting'},
'committee': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['committees.Committee']"}),
'date': ('django.db.models.fields.DateField', [], {}),
'date_string': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mks_attended': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'committee_meetings'", 'symmetrical': 'False', 'to': "orm['mks.Member']"}),
'protocol_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'topics': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'votes_mentioned': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'committee_meetings'", 'blank': 'True', 'to': "orm['laws.Vote']"})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'laws.bill': {
'Meta': {'object_name': 'Bill'},
'approval_vote': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'bill_approved'", 'unique': 'True', 'null': 'True', 'to': "orm['laws.Vote']"}),
'first_committee_meetings': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'bills_first'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['committees.CommitteeMeeting']"}),
'first_vote': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'bills_first'", 'null': 'True', 'to': "orm['laws.Vote']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'law': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'bills'", 'null': 'True', 'to': "orm['laws.Law']"}),
'pre_votes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'bills_pre_votes'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['laws.Vote']"}),
'proposers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'bills'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['mks.Member']"}),
'second_committee_meetings': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'bills_second'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['committees.CommitteeMeeting']"}),
'stage': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'stage_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'laws.knessetproposal': {
'Meta': {'object_name': 'KnessetProposal'},
'bill': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'knesset_proposal'", 'unique': 'True', 'null': 'True', 'to': "orm['laws.Bill']"}),
'booklet_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'committee': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'bills'", 'null': 'True', 'to': "orm['committees.Committee']"}),
'committee_meetings': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'laws_knessetproposal_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['committees.CommitteeMeeting']"}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'knesset_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'law': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'laws_knessetproposal_related'", 'null': 'True', 'to': "orm['laws.Law']"}),
'originals': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'knesset_proposals'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['laws.PrivateProposal']"}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'votes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'laws_knessetproposal_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['laws.Vote']"})
},
'laws.law': {
'Meta': {'object_name': 'Law'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'merged_into': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'duplicates'", 'null': 'True', 'to': "orm['laws.Law']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'laws.membervotingstatistics': {
'Meta': {'object_name': 'MemberVotingStatistics'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'voting_statistics'", 'unique': 'True', 'to': "orm['mks.Member']"})
},
'laws.partyvotingstatistics': {
'Meta': {'object_name': 'PartyVotingStatistics'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'party': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'voting_statistics'", 'unique': 'True', 'to': "orm['mks.Party']"})
},
'laws.privateproposal': {
'Meta': {'object_name': 'PrivateProposal'},
'bill': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'proposals'", 'null': 'True', 'to': "orm['laws.Bill']"}),
'committee_meetings': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'laws_privateproposal_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['committees.CommitteeMeeting']"}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'joiners': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'bills_joined'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['mks.Member']"}),
'knesset_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'law': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'laws_privateproposal_related'", 'null': 'True', 'to': "orm['laws.Law']"}),
'proposal_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'proposers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'bills_proposed'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['mks.Member']"}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'votes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'laws_privateproposal_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['laws.Vote']"})
},
'laws.vote': {
'Meta': {'object_name': 'Vote'},
'against_party': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'controversy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'full_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_text_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.FloatField', [], {}),
'meeting_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'src_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'src_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'time_string': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'vote_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'votes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'votes'", 'blank': 'True', 'through': "orm['laws.VoteAction']", 'to': "orm['mks.Member']"}),
'votes_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'laws.voteaction': {
'Meta': {'object_name': 'VoteAction'},
'against_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'against_opposition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'against_party': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'vote': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['laws.Vote']"})
},
'mks.member': {
'Meta': {'object_name': 'Member'},
'area_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'blog': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['planet.Blog']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'current_party': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'members'", 'null': 'True', 'to': "orm['mks.Party']"}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_of_death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'family_status': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_children': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parties': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'all_members'", 'symmetrical': 'False', 'through': "orm['mks.Membership']", 'to': "orm['mks.Party']"}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'place_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'year_of_aliyah': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'mks.membership': {
'Meta': {'object_name': 'Membership'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}),
'party': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Party']"}),<|fim▁hole|> 'Meta': {'object_name': 'Party'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_members': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'number_of_seats': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'planet.blog': {
'Meta': {'object_name': 'Blog'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'})
},
'tagging.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'tagging.taggeditem': {
'Meta': {'unique_together': "(('tag', 'content_type', 'object_id'),)", 'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['tagging.Tag']"})
}
}
complete_apps = ['laws']<|fim▁end|>
|
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'mks.party': {
|
<|file_name|>automata.rs<|end_file_name|><|fim▁begin|>use automata::{M, State, StateSet, Alphabet, Delta, to_delta};
use automata_min::minify;
use automata_operators::afndl_to_afd;
fn prefix_state(prefix: &String, s: &State) -> String {
let mut name: String = prefix.clone();
name.push_str(s);
name
}
fn prefix_automata(prefix: &String, m: &M) -> M {
let q0: State = prefix_state(&prefix, &m.q0);
let f: StateSet = m.f
.iter()
.cloned()
.map(|s| prefix_state(&prefix, &s))
.collect();
let k: StateSet = m.k
.iter()
.cloned()
.map(|s| prefix_state(&prefix, &s))
.collect();
let mut delta = delta!();
for (s, c, ns) in to_delta(&m) {
let s = prefix_state(&prefix, &s.clone());
let c = c.clone();
let ns = prefix_state(&prefix, &ns.clone());
delta.insert((s, c, ns));
}
M::new(k, m.alphabet.clone(), q0, f, delta)
}
pub fn automata_union(m1: &M, m2: &M, prefix: String) -> M {
let m1_prefix: String = {
let mut p = prefix.clone();
p.push_str("1");
p
};
let m2_prefix: String = {
let mut p = prefix.clone();
p.push_str("2");
p
};
let prefixed_m1: M = prefix_automata(&m1_prefix, &m1);
let prefixed_m2: M = prefix_automata(&m2_prefix, &m2);
let f1: State = prefixed_m1.f.iter().cloned().take(1).collect();
let f2: State = prefixed_m2.f.iter().cloned().take(1).collect();
let q0 = prefix_state(&prefix, &"q0".to_string());
let f = prefix_state(&prefix, &"f0".to_string());
let alphabet: Alphabet = m1.alphabet.union(&m2.alphabet).cloned().collect();
let k: StateSet = {
let mut k = stateset!(q0.clone(), f.clone());
k = k.union(&prefixed_m1.k)
.cloned()
.collect::<StateSet>()
.union(&prefixed_m2.k)
.cloned()
.collect::<StateSet>();
k
};
let mut delta: Delta = delta!((q0.clone(), 'λ', prefixed_m1.q0.clone()),
(q0.clone(), 'λ', prefixed_m2.q0.clone()),
(f1.clone(), 'λ', f.clone()),
(f2.clone(), 'λ', f.clone()));
for (s, a, ns) in to_delta(&prefixed_m1) {
if prefixed_m1.f.contains(&s) {
continue;
};
delta.insert((s.clone(), a.clone(), ns.clone()));
}
for (s, a, ns) in to_delta(&prefixed_m2) {
if prefixed_m2.f.contains(&s) {
continue;
};
delta.insert((s.clone(), a.clone(), ns.clone()));
}
M::new(k, alphabet, q0, stateset!(f), delta)
}
pub fn automata_intersection(m1: &M, m2: &M, prefix: String) -> M {
let m1_prefix: String = {
let mut p = prefix.clone();
p.push_str("1");
p
};
let m2_prefix: String = {
let mut p = prefix.clone();
p.push_str("2");
p
};
let prefixed_m1: M = prefix_automata(&m1_prefix, &m1);
let prefixed_m2: M = prefix_automata(&m2_prefix, &m2);
let f1: State = prefixed_m1.f.iter().cloned().take(1).collect();
let f2: State = prefixed_m2.f.iter().cloned().take(1).collect();
let q0 = prefixed_m1.q0.clone();
let f = f2;
let k: StateSet = prefixed_m1.k.union(&prefixed_m2.k).cloned().collect();
let alphabet: Alphabet = prefixed_m1
.alphabet
.union(&prefixed_m2.alphabet)
.cloned()
.collect();
let mut delta: Delta = delta!((f1.clone(), 'λ', prefixed_m2.q0.clone()));
for (s, a, ns) in to_delta(&prefixed_m1) {
//if prefixed_m1.f.contains(&s) { continue };
delta.insert((s.clone(), a.clone(), ns.clone()));
}
for (s, a, ns) in to_delta(&prefixed_m2) {
//if prefixed_m2.f.contains(&s) { continue };
delta.insert((s.clone(), a.clone(), ns.clone()));
}
M::new(k, alphabet, q0, stateset!(f), delta)
}
pub fn automata_star(m: &M, prefix: String) -> M {
let m_prefix: String = {
let mut p = prefix.clone();
p.push_str("1");
p
};
let prefixed_m: M = prefix_automata(&m_prefix, &m);
let f1: State = prefixed_m.f.iter().cloned().take(1).collect();
let q0 = prefix_state(&prefix, &"q0".to_string());
let f = prefix_state(&prefix, &"f0".to_string());
let k: StateSet = prefixed_m.k.union(&stateset!(q0, f)).cloned().collect();
let mut delta: Delta = delta!((q0.clone(), 'λ', prefixed_m.q0.clone()),
(q0.clone(), 'λ', f.clone()),
(f1.clone(), 'λ', prefixed_m.q0.clone()),
(f1.clone(), 'λ', f.clone()));
for (s, a, ns) in to_delta(&prefixed_m) {
if prefixed_m.f.contains(&s) {
continue;
};
delta.insert((s.clone(), a.clone(), ns.clone()));
}
M::new(k, m.alphabet.clone(), q0, stateset!(f), delta)
}
//TODO:
//This is a pseudo working impl of a regexp engine
//We still need to define some context free grammar tools
//in order to parse correctly the metalanguage that defines
//the posible regexp and their structure as to be able to understand
//paranthereses and complex nesting structures.
pub fn regexp(s: String) -> M {
let chain_v: Vec<char> = s.chars().collect();
if chain_v.len() == 0 {
let m: M = M::new(stateset!("q0"),
alphabet!(),
"q0".to_string(),
stateset!("q0"),
delta!());
return m;
} else if chain_v.len() == 1 {
let m: M = M::new(stateset!("q0", "q1"),
alphabet!(chain_v[0]),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", chain_v[0], "q1")));
return m;
}
if s == "a|b".to_string() {
let m1 = regexp("a".to_string());
let m2 = regexp("b".to_string());
let m = automata_union(&m1, &m2, "0".to_string());
{
use automata::print_automata;
print_automata(&m);
}
let m = afndl_to_afd(&m);
{
use automata::print_automata;
print_automata(&m);
}
return minify(&m);
}
if s == "ab".to_string() {
let m1 = regexp("a".to_string());
let m2 = regexp("b".to_string());
let m = automata_intersection(&m1, &m2, "0".to_string());
{
use automata::print_automata;
print_automata(&m);
}
let m = afndl_to_afd(&m);
{
use automata::print_automata;
print_automata(&m);
}
return minify(&m);
}
if s == "a*".to_string() {
let m1 = regexp("a".to_string());
let m = automata_star(&m1, "0".to_string());
{
use automata::print_automata;
print_automata(&m);
}
let m = afndl_to_afd(&m);
{
use automata::print_automata;
print_automata(&m);
}
return minify(&m);
}
M::new(stateset!("q0", "q1"),
alphabet!(),
"q0".to_string(),
stateset!("q1"),
delta!())
}
pub fn re_trivial(s: String) -> M {
assert!(s.len() <= 1);
let chain_v: Vec<char> = s.chars().collect();
if chain_v.len() == 0 {
let m: M = M::new(stateset!("q0"),
alphabet!(),
"q0".to_string(),
stateset!("q0"),
delta!());
return m;
}
// len = 1
let m: M = M::new(stateset!("q0", "q1"),
alphabet!(chain_v[0]),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", chain_v[0], "q1")));
return m;
}
#[cfg(test)]
mod tests {
use automata::M;
use super::regexp;
#[test]
fn prefix_automata_test() {
use super::prefix_automata;
let m = M::new(stateset!("q0", "q1"),
alphabet!('a'),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", 'a', "q1")));
let m_expected = M::new(stateset!("Aq0", "Aq1"),
alphabet!('a'),
"Aq0".to_string(),
stateset!("Aq1"),
delta!(("Aq0", 'a', "Aq1")));<|fim▁hole|>
assert_eq!(m_actual, m_expected)
}
#[test]
fn union_test() {
use super::automata_union;
let m1 = M::new(stateset!("q0", "q1"),
alphabet!('a'),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", 'a', "q1")));
let m2 = M::new(stateset!("q0", "q1"),
alphabet!('b'),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", 'b', "q1")));
let m_expected = M::new(stateset!("0q0", "0f0", "01q0", "01q1", "02q0", "02q1"),
alphabet!('a', 'b'),
"0q0".to_string(),
stateset!("0f0"),
delta!(("0q0", 'λ', "01q0"),
("0q0", 'λ', "02q0"),
("01q0", 'a', "01q1"),
("02q0", 'b', "02q1"),
("01q1", 'λ', "0f0"),
("02q1", 'λ', "0f0")));
let m = automata_union(&m1, &m2, "0".to_string());
assert_eq!(m, m_expected)
}
#[test]
fn intersection_test() {
use super::automata_intersection;
let m1 = M::new(stateset!("q0", "q1"),
alphabet!('a'),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", 'a', "q1")));
let m2 = M::new(stateset!("q0", "q1"),
alphabet!('b'),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", 'b', "q1")));
let m_expected = M::new(stateset!("01q0", "01q1", "02q0", "02q1"),
alphabet!('a', 'b'),
"01q0".to_string(),
stateset!("02q1"),
delta!(("01q0", 'a', "01q1"),
("01q1", 'λ', "02q0"),
("02q0", 'b', "02q1")));
let m = automata_intersection(&m1, &m2, "0".to_string());
{
use automata::print_delta;
print_delta(&m.delta);
print_delta(&m_expected.delta);
}
assert_eq!(m, m_expected)
}
#[test]
fn intersection_test_2() {
use super::automata_intersection;
let m1 = M::new(stateset!("q0"),
alphabet!('a'),
"q0".to_string(),
stateset!("q0"),
delta!(("q0", 'a', "q0")));
let m2 = M::new(stateset!("q0", "q1"),
alphabet!('b'),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", 'b', "q1")));
let m_expected = M::new(stateset!("Q0", "Q1", "Q2"),
alphabet!('a', 'b'),
"Q0".to_string(),
stateset!("Q2"),
delta!(("Q0", 'a', "Q0"), ("Q0", 'λ', "Q1"), ("Q1", 'b', "Q2")));
use automata::print_automata;
use automata_min::pretify_automata;
let m = automata_intersection(&m1, &m2, "0".to_string());
let m = pretify_automata(&m);
{
println!("FUCK YOU");
let m = pretify_automata(&m);
print_automata(&m);
}
assert_eq!(m, m_expected)
}
#[test]
fn star_test() {
use super::automata_star;
let m = M::new(stateset!("q0", "q1"),
alphabet!('a'),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", 'a', "q1")));
let m_expected = M::new(stateset!("0q0", "0f0", "01q0", "01q1"),
alphabet!('a'),
"0q0".to_string(),
stateset!("0f0"),
delta!(("01q0", 'a', "01q1"),
("0q0", 'λ', "01q0"),
("0q0", 'λ', "0f0"),
("01q1", 'λ', "01q0"),
("01q1", 'λ', "0f0")));
let m = automata_star(&m, "0".to_string());
{
use automata::print_delta;
print_delta(&m.delta);
print_delta(&m_expected.delta);
}
assert_eq!(m, m_expected)
}
//Importante case for minification
//The minified star for a single character should be an automata
//with a single state which both initial and final
#[test]
fn star_test_case_1() {
use super::automata_star;
use automata_min::minify;
use automata_operators::afndl_to_afd;
let m = M::new(stateset!("q0", "q1"),
alphabet!('a'),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", 'a', "q1")));
let m_expected =
M::new(stateset!("01q0-01q1-0f0-01q0-0f0-0q0"),
alphabet!('a'),
"01q0-01q1-0f0-01q0-0f0-0q0".to_string(),
stateset!("01q0-01q1-0f0-01q0-0f0-0q0"),
delta!(("01q0-01q1-0f0-01q0-0f0-0q0", 'a', "01q0-01q1-0f0-01q0-0f0-0q0")));
let m = automata_star(&m, "0".to_string());
{
use automata::print_automata;
println!("Automata star");
print_automata(&m);
}
let m = afndl_to_afd(&m);
{
println!("Automata D");
use automata::print_automata;
print_automata(&m);
}
let m = minify(&m);
{
println!("Automata min");
use automata::print_automata;
print_automata(&m);
}
assert_eq!(m, m_expected)
}
#[test]
fn regexp_base_case_lambda() {
let m = regexp("".to_string());
let m_expected = M::new(stateset!("q0"),
alphabet!(),
"q0".to_string(),
stateset!("q0"),
delta!());
assert_eq!(m, m_expected)
}
#[test]
fn base_case_single_char() {
let m = regexp("a".to_string());
let m_expected = M::new(stateset!("q0", "q1"),
alphabet!('a'),
"q0".to_string(),
stateset!("q1"),
delta!(("q0", 'a', "q1")));
assert_eq!(m, m_expected)
}
#[test]
fn regexp_union() {
let mut m = regexp("a|b".to_string());
{
use automata::print_delta;
print_delta(&m.delta);
}
assert!(m.check_string("a").is_ok());
assert!(m.check_string("b").is_ok());
assert!(m.check_string("ab").is_err());
}
#[test]
fn regexp_intersection() {
let mut m = regexp("ab".to_string());
{
use automata::print_delta;
print_delta(&m.delta);
}
assert!(m.check_string("ab").is_ok());
assert!(m.check_string("b").is_err());
}
#[test]
fn regexp_star() {
let mut m = regexp("a*".to_string());
{
use automata::print_delta;
print_delta(&m.delta);
}
assert!(m.check_string("a").is_ok());
assert!(m.check_string("aa").is_ok());
assert!(m.check_string("aaaaaaaaaaaaaaa").is_ok());
assert!(m.check_string("b").is_err());
assert!(m.check_string("").is_ok());
}
}<|fim▁end|>
|
let m_actual = prefix_automata(&"A".to_string(), &m);
|
<|file_name|>Command.java<|end_file_name|><|fim▁begin|>package patterns.behavioral.command;
// General interface for all the commands
public abstract class Command {
public abstract void execute();<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>assets.py<|end_file_name|><|fim▁begin|>"""
Asset compilation and collection.
"""
from __future__ import print_function
import argparse
from paver.easy import sh, path, task, cmdopts, needs, consume_args, call_task
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import glob
import traceback
from .utils.envs import Env
from .utils.cmd import cmd, django_cmd
COFFEE_DIRS = ['lms', 'cms', 'common']
SASS_LOAD_PATHS = ['./common/static/sass']
SASS_UPDATE_DIRS = ['*/static']
SASS_CACHE_PATH = '/tmp/sass-cache'
class CoffeeScriptWatcher(PatternMatchingEventHandler):
"""
Watches for coffeescript changes
"""
ignore_directories = True
patterns = ['*.coffee']
def register(self, observer):
"""
register files with observer
"""
dirnames = set()
for filename in sh(coffeescript_files(), capture=True).splitlines():
dirnames.add(path(filename).dirname())
for dirname in dirnames:
observer.schedule(self, dirname)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
compile_coffeescript(event.src_path)
except Exception: # pylint: disable=W0703
traceback.print_exc()
class SassWatcher(PatternMatchingEventHandler):
"""
Watches for sass file changes
"""
ignore_directories = True
patterns = ['*.scss']
ignore_patterns = ['common/static/xmodule/*']
def register(self, observer):
"""
register files with observer
"""
for dirname in SASS_LOAD_PATHS + SASS_UPDATE_DIRS + theme_sass_paths():
paths = []
if '*' in dirname:
paths.extend(glob.glob(dirname))
else:
paths.append(dirname)
for dirname in paths:
observer.schedule(self, dirname, recursive=True)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
compile_sass()
except Exception: # pylint: disable=W0703
traceback.print_exc()
class XModuleSassWatcher(SassWatcher):
"""
Watches for sass file changes
"""
ignore_directories = True
ignore_patterns = []
def register(self, observer):
"""
register files with observer
"""
observer.schedule(self, 'common/lib/xmodule/', recursive=True)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
process_xmodule_assets()
except Exception: # pylint: disable=W0703
traceback.print_exc()
def theme_sass_paths():
"""
Return the a list of paths to the theme's sass assets,
or an empty list if no theme is configured.
"""
edxapp_env = Env()
if edxapp_env.feature_flags.get('USE_CUSTOM_THEME', False):
theme_name = edxapp_env.env_tokens.get('THEME_NAME', '')
parent_dir = path(edxapp_env.REPO_ROOT).abspath().parent
theme_root = parent_dir / "themes" / theme_name
return [theme_root / "static" / "sass"]
else:
return []
<|fim▁hole|> return find command for paths containing coffee files
"""
dirs = " ".join([Env.REPO_ROOT / coffee_dir for coffee_dir in COFFEE_DIRS])
return cmd('find', dirs, '-type f', '-name \"*.coffee\"')
def compile_coffeescript(*files):
"""
Compile CoffeeScript to JavaScript.
"""
if not files:
files = ["`{}`".format(coffeescript_files())]
sh(cmd(
"node_modules/.bin/coffee", "--compile", *files
))
def compile_sass(debug=False):
"""
Compile Sass to CSS.
"""
theme_paths = theme_sass_paths()
sh(cmd(
'sass', '' if debug else '--style compressed',
"--cache-location {cache}".format(cache=SASS_CACHE_PATH),
"--load-path", " ".join(SASS_LOAD_PATHS + theme_paths),
"--update", "-E", "utf-8", " ".join(SASS_UPDATE_DIRS + theme_paths)
))
def compile_templated_sass(systems, settings):
"""
Render Mako templates for Sass files.
`systems` is a list of systems (e.g. 'lms' or 'studio' or both)
`settings` is the Django settings module to use.
"""
for sys in systems:
sh(django_cmd(sys, settings, 'preprocess_assets'))
def process_xmodule_assets():
"""
Process XModule static assets.
"""
sh('xmodule_assets common/static/xmodule')
def collect_assets(systems, settings):
"""
Collect static assets, including Django pipeline processing.
`systems` is a list of systems (e.g. 'lms' or 'studio' or both)
`settings` is the Django settings module to use.
"""
for sys in systems:
sh(django_cmd(sys, settings, "collectstatic --noinput > /dev/null"))
@task
@cmdopts([('background', 'b', 'Background mode')])
def watch_assets(options):
"""
Watch for changes to asset files, and regenerate js/css
"""
observer = Observer()
CoffeeScriptWatcher().register(observer)
SassWatcher().register(observer)
XModuleSassWatcher().register(observer)
print("Starting asset watcher...")
observer.start()
if not getattr(options, 'background', False):
# when running as a separate process, the main thread needs to loop
# in order to allow for shutdown by contrl-c
try:
while True:
observer.join(2)
except KeyboardInterrupt:
observer.stop()
print("\nStopped asset watcher.")
@task
@needs('pavelib.prereqs.install_prereqs')
@consume_args
def update_assets(args):
"""
Compile CoffeeScript and Sass, then collect static assets.
"""
parser = argparse.ArgumentParser(prog='paver update_assets')
parser.add_argument(
'system', type=str, nargs='*', default=['lms', 'studio'],
help="lms or studio",
)
parser.add_argument(
'--settings', type=str, default="dev",
help="Django settings module",
)
parser.add_argument(
'--debug', action='store_true', default=False,
help="Disable Sass compression",
)
parser.add_argument(
'--skip-collect', dest='collect', action='store_false', default=True,
help="Skip collection of static assets",
)
parser.add_argument(
'--watch', action='store_true', default=False,
help="Watch files for changes",
)
args = parser.parse_args(args)
compile_templated_sass(args.system, args.settings)
process_xmodule_assets()
compile_coffeescript()
compile_sass(args.debug)
if args.collect:
collect_assets(args.system, args.settings)
if args.watch:
call_task('watch_assets', options={'background': not args.debug})<|fim▁end|>
|
def coffeescript_files():
"""
|
<|file_name|>step_create_instance.go<|end_file_name|><|fim▁begin|>package ecs
import (
"context"
"encoding/base64"
"fmt"
"io/ioutil"
"strconv"
"github.com/hashicorp/packer/common/uuid"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
"github.com/aliyun/alibaba-cloud-sdk-go/services/ecs"
confighelper "github.com/hashicorp/packer/helper/config"
"github.com/hashicorp/packer/helper/multistep"
"github.com/hashicorp/packer/packer"
)
type stepCreateAlicloudInstance struct {
IOOptimized confighelper.Trilean
InstanceType string
UserData string
UserDataFile string
instanceId string
RegionId string
InternetChargeType string
InternetMaxBandwidthOut int
InstanceName string
ZoneId string
instance *ecs.Instance
}
var createInstanceRetryErrors = []string{
"IdempotentProcessing",
}
var deleteInstanceRetryErrors = []string{
"IncorrectInstanceStatus.Initializing",
}
func (s *stepCreateAlicloudInstance) Run(ctx context.Context, state multistep.StateBag) multistep.StepAction {
client := state.Get("client").(*ClientWrapper)
ui := state.Get("ui").(packer.Ui)
ui.Say("Creating instance...")
createInstanceRequest, err := s.buildCreateInstanceRequest(state)
if err != nil {
return halt(state, err, "")
}
createInstanceResponse, err := client.WaitForExpected(&WaitForExpectArgs{
RequestFunc: func() (responses.AcsResponse, error) {
return client.CreateInstance(createInstanceRequest)
},
EvalFunc: client.EvalCouldRetryResponse(createInstanceRetryErrors, EvalRetryErrorType),
})
if err != nil {
return halt(state, err, "Error creating instance")
}
instanceId := createInstanceResponse.(*ecs.CreateInstanceResponse).InstanceId
_, err = client.WaitForInstanceStatus(s.RegionId, instanceId, InstanceStatusStopped)
if err != nil {
return halt(state, err, "Error waiting create instance")
}
describeInstancesRequest := ecs.CreateDescribeInstancesRequest()
describeInstancesRequest.InstanceIds = fmt.Sprintf("[\"%s\"]", instanceId)<|fim▁hole|> }
ui.Message(fmt.Sprintf("Created instance: %s", instanceId))
s.instance = &instances.Instances.Instance[0]
state.Put("instance", s.instance)
return multistep.ActionContinue
}
func (s *stepCreateAlicloudInstance) Cleanup(state multistep.StateBag) {
if s.instance == nil {
return
}
cleanUpMessage(state, "instance")
client := state.Get("client").(*ClientWrapper)
ui := state.Get("ui").(packer.Ui)
_, err := client.WaitForExpected(&WaitForExpectArgs{
RequestFunc: func() (responses.AcsResponse, error) {
request := ecs.CreateDeleteInstanceRequest()
request.InstanceId = s.instance.InstanceId
request.Force = requests.NewBoolean(true)
return client.DeleteInstance(request)
},
EvalFunc: client.EvalCouldRetryResponse(deleteInstanceRetryErrors, EvalRetryErrorType),
RetryTimes: shortRetryTimes,
})
if err != nil {
ui.Say(fmt.Sprintf("Failed to clean up instance %s: %s", s.instance.InstanceId, err))
}
}
func (s *stepCreateAlicloudInstance) buildCreateInstanceRequest(state multistep.StateBag) (*ecs.CreateInstanceRequest, error) {
request := ecs.CreateCreateInstanceRequest()
request.ClientToken = uuid.TimeOrderedUUID()
request.RegionId = s.RegionId
request.InstanceType = s.InstanceType
request.InstanceName = s.InstanceName
request.ZoneId = s.ZoneId
sourceImage := state.Get("source_image").(*ecs.Image)
request.ImageId = sourceImage.ImageId
securityGroupId := state.Get("securitygroupid").(string)
request.SecurityGroupId = securityGroupId
networkType := state.Get("networktype").(InstanceNetWork)
if networkType == InstanceNetworkVpc {
vswitchId := state.Get("vswitchid").(string)
request.VSwitchId = vswitchId
userData, err := s.getUserData(state)
if err != nil {
return nil, err
}
request.UserData = userData
} else {
if s.InternetChargeType == "" {
s.InternetChargeType = "PayByTraffic"
}
if s.InternetMaxBandwidthOut == 0 {
s.InternetMaxBandwidthOut = 5
}
}
request.InternetChargeType = s.InternetChargeType
request.InternetMaxBandwidthOut = requests.Integer(convertNumber(s.InternetMaxBandwidthOut))
if s.IOOptimized.True() {
request.IoOptimized = IOOptimizedOptimized
} else if s.IOOptimized.False() {
request.IoOptimized = IOOptimizedNone
}
config := state.Get("config").(*Config)
password := config.Comm.SSHPassword
if password == "" && config.Comm.WinRMPassword != "" {
password = config.Comm.WinRMPassword
}
request.Password = password
systemDisk := config.AlicloudImageConfig.ECSSystemDiskMapping
request.SystemDiskDiskName = systemDisk.DiskName
request.SystemDiskCategory = systemDisk.DiskCategory
request.SystemDiskSize = requests.Integer(convertNumber(systemDisk.DiskSize))
request.SystemDiskDescription = systemDisk.Description
imageDisks := config.AlicloudImageConfig.ECSImagesDiskMappings
var dataDisks []ecs.CreateInstanceDataDisk
for _, imageDisk := range imageDisks {
var dataDisk ecs.CreateInstanceDataDisk
dataDisk.DiskName = imageDisk.DiskName
dataDisk.Category = imageDisk.DiskCategory
dataDisk.Size = string(convertNumber(imageDisk.DiskSize))
dataDisk.SnapshotId = imageDisk.SnapshotId
dataDisk.Description = imageDisk.Description
dataDisk.DeleteWithInstance = strconv.FormatBool(imageDisk.DeleteWithInstance)
dataDisk.Device = imageDisk.Device
if imageDisk.Encrypted != confighelper.TriUnset {
dataDisk.Encrypted = strconv.FormatBool(imageDisk.Encrypted.True())
}
dataDisks = append(dataDisks, dataDisk)
}
request.DataDisk = &dataDisks
return request, nil
}
func (s *stepCreateAlicloudInstance) getUserData(state multistep.StateBag) (string, error) {
userData := s.UserData
if s.UserDataFile != "" {
data, err := ioutil.ReadFile(s.UserDataFile)
if err != nil {
return "", err
}
userData = string(data)
}
if userData != "" {
userData = base64.StdEncoding.EncodeToString([]byte(userData))
}
return userData, nil
}<|fim▁end|>
|
instances, err := client.DescribeInstances(describeInstancesRequest)
if err != nil {
return halt(state, err, "")
|
<|file_name|>glusterfs.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import errno
import os
import stat
import warnings
from os_brick.remotefs import remotefs as remotefs_brick
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import fileutils
from oslo_utils import units
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder.image import image_utils
from cinder import utils
from cinder.volume import driver
from cinder.volume.drivers import remotefs as remotefs_drv
LOG = logging.getLogger(__name__)
volume_opts = [
cfg.StrOpt('glusterfs_shares_config',
default='/etc/cinder/glusterfs_shares',
help='File with the list of available gluster shares'),
cfg.StrOpt('glusterfs_mount_point_base',
default='$state_path/mnt',
help='Base dir containing mount points for gluster shares.'),
]
CONF = cfg.CONF
CONF.register_opts(volume_opts)
class GlusterfsDriver(remotefs_drv.RemoteFSSnapDriver, driver.CloneableVD,
driver.ExtendVD):
"""Gluster based cinder driver.
Creates file on Gluster share for using it as block device on hypervisor.
Operations such as create/delete/extend volume/snapshot use locking on a
per-process basis to prevent multiple threads from modifying qcow2 chains
or the snapshot .info file simultaneously.
"""
driver_volume_type = 'glusterfs'
driver_prefix = 'glusterfs'
volume_backend_name = 'GlusterFS'
VERSION = '1.3.0'
def __init__(self, execute=processutils.execute, *args, **kwargs):
self._remotefsclient = None
super(GlusterfsDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(volume_opts)
root_helper = utils.get_root_helper()
self.base = getattr(self.configuration,
'glusterfs_mount_point_base',
CONF.glusterfs_mount_point_base)
self._remotefsclient = remotefs_brick.RemoteFsClient(
'glusterfs', root_helper, execute,
glusterfs_mount_point_base=self.base)
def do_setup(self, context):
"""Any initialization the volume driver does while starting."""
super(GlusterfsDriver, self).do_setup(context)
config = self.configuration.glusterfs_shares_config
if not config:
msg = (_("There's no Gluster config file configured (%s)") %
'glusterfs_shares_config')
LOG.warning(msg)
raise exception.GlusterfsException(msg)
if not os.path.exists(config):
msg = (_("Gluster config file at %(config)s doesn't exist") %
{'config': config})
LOG.warning(msg)
raise exception.GlusterfsException(msg)
self.shares = {}
try:
self._execute('mount.glusterfs', check_exit_code=False)
except OSError as exc:
if exc.errno == errno.ENOENT:
raise exception.GlusterfsException(
_('mount.glusterfs is not installed'))
else:
raise
self._refresh_mounts()
def _unmount_shares(self):
self._load_shares_config(self.configuration.glusterfs_shares_config)
for share in self.shares.keys():
try:
self._do_umount(True, share)
except Exception as exc:
LOG.warning(_LW('Exception during unmounting %s'), exc)
def _do_umount(self, ignore_not_mounted, share):
mount_path = self._get_mount_point_for_share(share)
command = ['umount', mount_path]
try:
self._execute(*command, run_as_root=True)
except processutils.ProcessExecutionError as exc:
if ignore_not_mounted and 'not mounted' in exc.stderr:
LOG.info(_LI("%s is already umounted"), share)
else:
LOG.error(_LE("Failed to umount %(share)s, reason=%(stderr)s"),
{'share': share, 'stderr': exc.stderr})
raise
def _refresh_mounts(self):
try:
self._unmount_shares()
except processutils.ProcessExecutionError as exc:
if 'target is busy' in exc.stderr:
LOG.warning(_LW("Failed to refresh mounts, reason=%s"),
exc.stderr)
else:
raise
self._ensure_shares_mounted()
def _qemu_img_info(self, path, volume_name):
return super(GlusterfsDriver, self)._qemu_img_info_base(
path, volume_name, self.configuration.glusterfs_mount_point_base)
def check_for_setup_error(self):
"""Just to override parent behavior."""
pass
def _local_volume_dir(self, volume):
hashed = self._get_hash_str(volume['provider_location'])
path = '%s/%s' % (self.configuration.glusterfs_mount_point_base,
hashed)
return path
def _update_volume_stats(self):
"""Retrieve stats info from volume group."""
super(GlusterfsDriver, self)._update_volume_stats()
data = self._stats
global_capacity = data['total_capacity_gb']
global_free = data['free_capacity_gb']
thin_enabled = self.configuration.nas_volume_prov_type == 'thin'
if thin_enabled:
provisioned_capacity = self._get_provisioned_capacity()
else:
provisioned_capacity = round(global_capacity - global_free, 2)
data['provisioned_capacity_gb'] = provisioned_capacity
data['max_over_subscription_ratio'] = (
self.configuration.max_over_subscription_ratio)
data['thin_provisioning_support'] = thin_enabled
data['thick_provisioning_support'] = not thin_enabled
self._stats = data
@remotefs_drv.locked_volume_id_operation
def create_volume(self, volume):
"""Creates a volume."""
self._ensure_shares_mounted()
volume['provider_location'] = self._find_share(volume['size'])
LOG.info(_LI('casted to %s'), volume['provider_location'])
self._do_create_volume(volume)
return {'provider_location': volume['provider_location']}
def _copy_volume_from_snapshot(self, snapshot, volume, volume_size):
"""Copy data from snapshot to destination volume.
This is done with a qemu-img convert to raw/qcow2 from the snapshot
qcow2.
"""
LOG.debug("snapshot: %(snap)s, volume: %(vol)s, "
"volume_size: %(size)s",
{'snap': snapshot['id'],
'vol': volume['id'],
'size': volume_size})
info_path = self._local_path_volume_info(snapshot['volume'])
snap_info = self._read_info_file(info_path)
vol_path = self._local_volume_dir(snapshot['volume'])
forward_file = snap_info[snapshot['id']]
forward_path = os.path.join(vol_path, forward_file)
# Find the file which backs this file, which represents the point
# when this snapshot was created.
img_info = self._qemu_img_info(forward_path,
snapshot['volume']['name'])
path_to_snap_img = os.path.join(vol_path, img_info.backing_file)
path_to_new_vol = self._local_path_volume(volume)
LOG.debug("will copy from snapshot at %s", path_to_snap_img)
if self.configuration.nas_volume_prov_type == 'thin':
out_format = 'qcow2'
else:
out_format = 'raw'
image_utils.convert_image(path_to_snap_img,
path_to_new_vol,
out_format)
self._set_rw_permissions_for_all(path_to_new_vol)
@remotefs_drv.locked_volume_id_operation
def delete_volume(self, volume):
"""Deletes a logical volume."""
if not volume['provider_location']:
LOG.warning(_LW('Volume %s does not have '
'provider_location specified, '
'skipping'), volume['name'])
return
self._ensure_share_mounted(volume['provider_location'])
volume_dir = self._local_volume_dir(volume)
mounted_path = os.path.join(volume_dir,
self.get_active_image_from_info(volume))
self._execute('rm', '-f', mounted_path, run_as_root=True)
# If an exception (e.g. timeout) occurred during delete_snapshot, the
# base volume may linger around, so just delete it if it exists
base_volume_path = self._local_path_volume(volume)
fileutils.delete_if_exists(base_volume_path)
info_path = self._local_path_volume_info(volume)
fileutils.delete_if_exists(info_path)
def _get_matching_backing_file(self, backing_chain, snapshot_file):
return next(f for f in backing_chain
if f.get('backing-filename', '') == snapshot_file)
def ensure_export(self, ctx, volume):
"""Synchronously recreates an export for a logical volume."""
self._ensure_share_mounted(volume['provider_location'])
def create_export(self, ctx, volume, connector):
"""Exports the volume."""
pass
def remove_export(self, ctx, volume):
"""Removes an export for a logical volume."""
pass
def validate_connector(self, connector):
pass
@remotefs_drv.locked_volume_id_operation
def initialize_connection(self, volume, connector):
"""Allow connection to connector and return connection info."""
# Find active qcow2 file
active_file = self.get_active_image_from_info(volume)
path = '%s/%s/%s' % (self.configuration.glusterfs_mount_point_base,
self._get_hash_str(volume['provider_location']),
active_file)
data = {'export': volume['provider_location'],
'name': active_file}
if volume['provider_location'] in self.shares:
data['options'] = self.shares[volume['provider_location']]
# Test file for raw vs. qcow2 format
info = self._qemu_img_info(path, volume['name'])
data['format'] = info.file_format
if data['format'] not in ['raw', 'qcow2']:
msg = _('%s must be a valid raw or qcow2 image.') % path
raise exception.InvalidVolume(msg)
return {
'driver_volume_type': 'glusterfs',
'data': data,
'mount_point_base': self._get_mount_point_base()
}
def terminate_connection(self, volume, connector, **kwargs):
"""Disallow connection from connector."""
pass
@remotefs_drv.locked_volume_id_operation
def extend_volume(self, volume, size_gb):
volume_path = self.local_path(volume)
info = self._qemu_img_info(volume_path, volume['name'])
backing_fmt = info.file_format
if backing_fmt not in ['raw', 'qcow2']:
msg = _('Unrecognized backing format: %s')
raise exception.InvalidVolume(msg % backing_fmt)
# qemu-img can resize both raw and qcow2 files
image_utils.resize_image(volume_path, size_gb)
def _do_create_volume(self, volume):
"""Create a volume on given glusterfs_share.
:param volume: volume reference
"""
volume_path = self.local_path(volume)
volume_size = volume['size']
LOG.debug("creating new volume at %s", volume_path)
if os.path.exists(volume_path):
msg = _('file already exists at %s') % volume_path
LOG.error(msg)
raise exception.InvalidVolume(reason=msg)
if self.configuration.nas_volume_prov_type == 'thin':
self._create_qcow2_file(volume_path, volume_size)
else:
try:
self._fallocate(volume_path, volume_size)
except processutils.ProcessExecutionError as exc:
if 'Operation not supported' in exc.stderr:
warnings.warn('Fallocate not supported by current version '
'of glusterfs. So falling back to dd.')
self._create_regular_file(volume_path, volume_size)
else:
fileutils.delete_if_exists(volume_path)
raise
self._set_rw_permissions_for_all(volume_path)
def _ensure_shares_mounted(self):
"""Mount all configured GlusterFS shares."""
self._mounted_shares = []
self._load_shares_config(self.configuration.glusterfs_shares_config)
for share in self.shares.keys():
try:
self._ensure_share_mounted(share)<|fim▁hole|> except Exception as exc:
LOG.error(_LE('Exception during mounting %s'), exc)
LOG.debug('Available shares: %s', self._mounted_shares)
def _ensure_share_mounted(self, glusterfs_share):
"""Mount GlusterFS share.
:param glusterfs_share: string
"""
mount_path = self._get_mount_point_for_share(glusterfs_share)
self._mount_glusterfs(glusterfs_share)
# Ensure we can write to this share
group_id = os.getegid()
current_group_id = utils.get_file_gid(mount_path)
current_mode = utils.get_file_mode(mount_path)
if group_id != current_group_id:
cmd = ['chgrp', group_id, mount_path]
self._execute(*cmd, run_as_root=True)
if not (current_mode & stat.S_IWGRP):
cmd = ['chmod', 'g+w', mount_path]
self._execute(*cmd, run_as_root=True)
self._ensure_share_writable(mount_path)
def _find_share(self, volume_size_for):
"""Choose GlusterFS share among available ones for given volume size.
Current implementation looks for greatest capacity.
:param volume_size_for: int size in GB
"""
if not self._mounted_shares:
raise exception.GlusterfsNoSharesMounted()
greatest_size = 0
greatest_share = None
for glusterfs_share in self._mounted_shares:
capacity = self._get_available_capacity(glusterfs_share)[0]
if capacity > greatest_size:
greatest_share = glusterfs_share
greatest_size = capacity
if volume_size_for * units.Gi > greatest_size:
raise exception.GlusterfsNoSuitableShareFound(
volume_size=volume_size_for)
return greatest_share
def _mount_glusterfs(self, glusterfs_share):
"""Mount GlusterFS share to mount path."""
mnt_flags = []
if self.shares.get(glusterfs_share) is not None:
mnt_flags = self.shares[glusterfs_share].split()
try:
self._remotefsclient.mount(glusterfs_share, mnt_flags)
except processutils.ProcessExecutionError:
LOG.error(_LE("Mount failure for %(share)s."),
{'share': glusterfs_share})
raise
def backup_volume(self, context, backup, backup_service):
"""Create a new backup from an existing volume.
Allow a backup to occur only if no snapshots exist.
Check both Cinder and the file on-disk. The latter is only
a safety mechanism to prevent further damage if the snapshot
information is already inconsistent.
"""
snapshots = self.db.snapshot_get_all_for_volume(context,
backup['volume_id'])
snap_error_msg = _('Backup is not supported for GlusterFS '
'volumes with snapshots.')
if len(snapshots) > 0:
raise exception.InvalidVolume(snap_error_msg)
volume = self.db.volume_get(context, backup['volume_id'])
volume_dir = self._local_volume_dir(volume)
active_file_path = os.path.join(
volume_dir,
self.get_active_image_from_info(volume))
info = self._qemu_img_info(active_file_path, volume['name'])
if info.backing_file is not None:
LOG.error(_LE('No snapshots found in database, but %(path)s has '
'backing file %(backing_file)s!'),
{'path': active_file_path,
'backing_file': info.backing_file})
raise exception.InvalidVolume(snap_error_msg)
if info.file_format != 'raw':
msg = _('Backup is only supported for raw-formatted '
'GlusterFS volumes.')
raise exception.InvalidVolume(msg)
return super(GlusterfsDriver, self).backup_volume(
context, backup, backup_service)<|fim▁end|>
|
self._mounted_shares.append(share)
|
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|># file test_fedora/test_models.py
#
# Copyright 2011 Emory University Libraries
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime, timedelta
import logging
from lxml import etree
from mock import patch, Mock
import os
from rdflib import URIRef, Graph as RdfGraph, XSD, Literal
from rdflib.namespace import Namespace
import re
import tempfile
import six
from eulfedora import models
from eulfedora.api import ApiFacade
from eulfedora.rdfns import relsext, model as modelns
from eulfedora.util import RequestFailed, fedoratime_to_datetime, md5sum, \
force_bytes, force_text
from eulfedora.xml import ObjectDatastream, FEDORA_MANAGE_NS, FoxmlDigitalObject, \
AuditTrail, AuditTrailRecord
from eulxml.xmlmap.dc import DublinCore
from test.test_fedora.base import FedoraTestCase, FIXTURE_ROOT
from test.testsettings import FEDORA_PIDSPACE
logger = logging.getLogger(__name__)
ONE_SEC = timedelta(seconds=1)
TWO_SECS = timedelta(seconds=2)
class MyDigitalObject(models.DigitalObject):
CONTENT_MODELS = ['info:fedora/%s:ExampleCModel' % FEDORA_PIDSPACE,
'info:fedora/%s:AnotherCModel' % FEDORA_PIDSPACE]
# extend digital object with datastreams for testing
text = models.Datastream("TEXT", "Text datastream", defaults={
'mimetype': 'text/plain',
})
extradc = models.XmlDatastream("EXTRADC", "Managed DC XML datastream", DublinCore,
defaults={
'mimetype': 'application/xml',
'versionable': True,
})
image = models.FileDatastream('IMAGE', 'managed binary image datastream', defaults={
'mimetype': 'image/png',
})
class SimpleDigitalObject(models.DigitalObject):
CONTENT_MODELS = ['info:fedora/%s:SimpleObject' % FEDORA_PIDSPACE]
# extend digital object with datastreams for testing
text = models.Datastream("TEXT", "Text datastream", defaults={
'mimetype': 'text/plain',
})
extradc = models.XmlDatastream("EXTRADC", "Managed DC XML datastream", DublinCore)
TEXT_CONTENT = "Here is some text content for a non-xml datastream."
def _add_text_datastream(obj):
# add a text datastream to the current test object
FILE = tempfile.NamedTemporaryFile(mode="w", suffix=".txt")
FILE.write(TEXT_CONTENT)
FILE.flush()
# info for calling addDatastream, and return
ds = {'id': 'TEXT', 'label': 'text datastream', 'mimeType': 'text/plain',
'controlGroup': 'M', 'logMessage': "creating new datastream",
'versionable': False, 'checksumType': 'MD5'}
with open(FILE.name) as tmpfile:
obj.api.addDatastream(obj.pid, ds['id'], ds['label'],
ds['mimeType'], ds['logMessage'], ds['controlGroup'], content=tmpfile,
checksumType=ds['checksumType'], versionable=ds['versionable'])
FILE.close()
class TestDatastreams(FedoraTestCase):
fixtures = ['object-with-pid.foxml']
pidspace = FEDORA_PIDSPACE
def setUp(self):
super(TestDatastreams, self).setUp()
self.pid = self.fedora_fixtures_ingested[-1] # get the pid for the last object
self.obj = MyDigitalObject(self.api, self.pid)
# add a text datastream to the current test object
_add_text_datastream(self.obj)
# get fixture ingest time from the server (the hard way) for testing
r = self.obj.api.getDatastream(self.pid, "DC")
dsprofile_node = etree.fromstring(r.content, base_url=r.url)
created_s = dsprofile_node.xpath('string(m:dsCreateDate)',
namespaces={'m': FEDORA_MANAGE_NS})
self.ingest_time = fedoratime_to_datetime(created_s)
def test_get_ds_content(self):
dc = self.obj.dc.content
self.assert_(isinstance(self.obj.dc, models.XmlDatastreamObject))
self.assert_(isinstance(dc, DublinCore))
self.assertEqual(dc.title, "A partially-prepared test object")
self.assertEqual(dc.identifier, self.pid)
self.assert_(isinstance(self.obj.text, models.DatastreamObject))
self.assertEqual(force_text(self.obj.text.content), TEXT_CONTENT)
def test_get_ds_info(self):
self.assertEqual(self.obj.dc.label, "Dublin Core")
self.assertEqual(self.obj.dc.mimetype, "text/xml")
self.assertEqual(self.obj.dc.state, "A")
self.assertEqual(self.obj.dc.versionable, True)
self.assertEqual(self.obj.dc.control_group, "X")
# there may be micro-second variation between these two
# ingest/creation times, but they should probably be less than
# a second or two apart
try:
self.assertAlmostEqual(self.ingest_time, self.obj.dc.created,
delta=TWO_SECS)
except TypeError:
# delta keyword unavailable before python 2.7
self.assert_(abs(self.ingest_time - self.obj.dc.created) < TWO_SECS)
# short-cut to datastream size
self.assertEqual(self.obj.dc.info.size, self.obj.dc.size)
self.assertEqual(self.obj.text.label, "text datastream")
self.assertEqual(self.obj.text.mimetype, "text/plain")
self.assertEqual(self.obj.text.state, "A")
self.assertEqual(self.obj.text.versionable, False)
self.assertEqual(self.obj.text.control_group, "M")
try:
self.assertAlmostEqual(self.ingest_time, self.obj.text.created,
delta=TWO_SECS)
except TypeError:
# delta keyword unavailable before python 2.7
self.assert_(abs(self.ingest_time - self.obj.text.created) < TWO_SECS)
# bootstrap info from defaults for a new object
newobj = MyDigitalObject(self.api)
self.assertEqual('Text datastream', newobj.text.label,
'default label should be set on new datastream')
self.assertEqual('text/plain', newobj.text.mimetype,
'default label should be set on new datastream')
self.assertEqual('MD5', newobj.text.checksum_type,
'default checksum type should be set on new datastream')
def test_savedatastream(self):
new_text = "Here is some totally new text content."
self.obj.text.content = new_text
self.obj.text.label = "new ds label"
self.obj.text.mimetype = "text/other"
self.obj.text.versionable = False
self.obj.text.state = "I"
self.obj.text.format = "some.format.uri"
saved = self.obj.text.save("changed text")
self.assertTrue(saved, "saving TEXT datastream should return true")
self.assertEqual(self.obj.text.content, new_text)
# compare with the datastream pulled directly from Fedora
r = self.obj.api.getDatastreamDissemination(self.pid, self.obj.text.id)
self.assertEqual(r.text, new_text)
r = self.obj.api.getDatastream(self.pid, self.obj.text.id)
dsinfo = r.text
self.assert_("<dsLabel>new ds label</dsLabel>" in dsinfo)
self.assert_("<dsMIME>text/other</dsMIME>" in dsinfo)
self.assert_("<dsVersionable>false</dsVersionable>" in dsinfo)
self.assert_("<dsState>I</dsState>" in dsinfo)
self.assert_("<dsFormatURI>some.format.uri</dsFormatURI>" in dsinfo)
# checksum not sent - fedora should calculate one for us
self.assert_("<dsChecksum>%s</dsChecksum>" % md5sum(force_bytes(new_text))
in dsinfo)
# look for log message ?
self.obj.dc.content.title = "this is a new title"
saved = self.obj.dc.save("changed DC title")
self.assertTrue(saved, "saving DC datastream should return true")
r = self.obj.api.getDatastreamDissemination(self.pid, self.obj.dc.id)
self.assert_("<dc:title>this is a new title</dc:title>" in r.text)
def test_save_by_location(self):
file_uri = 'file:///tmp/rsk-test.txt'
# since we can't put or guarantee a test file on the fedora server,
# patch the api with Mock to check api call
with patch.object(ApiFacade, 'modifyDatastream') as mock_mod_ds:
mock_mod_ds.return_value = Mock(status_code=200, content='saved')
self.obj.text.ds_location = file_uri
self.obj.text.content = 'this content should be ignored'
logmsg = 'text content from file uri'
saved = self.obj.text.save(logmsg)
self.assertTrue(saved)
mock_mod_ds.assert_called_with(self.obj.pid, self.obj.text.id,
mimeType='text/plain', dsLocation=file_uri,
logMessage=logmsg)
self.assertEqual(None, self.obj.text.ds_location,
'ds_location should be None after successful save')
# simulate save failure (without an exception)
mock_mod_ds.return_value = Mock(status_code=304)
self.obj.text.ds_location = file_uri
saved = self.obj.text.save(logmsg)
self.assertFalse(saved)
self.assertNotEqual(None, self.obj.text.ds_location,
'ds_location should not be None after failed save')
# purge ds and test addDatastream
self.obj.api.purgeDatastream(self.obj.pid, self.obj.text.id)
# load a new version that knows text ds doesn't exist
obj = MyDigitalObject(self.api, self.pid)
with patch.object(ApiFacade, 'addDatastream') as mock_add_ds:
mock_add_ds.return_value = Mock(status_code=201, content='added')
obj.text.ds_location = file_uri
obj.text.content = 'this content should be ignored'
logmsg = 'text content from file uri'
saved = obj.text.save(logmsg)
self.assertTrue(saved)
mock_add_ds.assert_called_with(self.obj.pid, self.obj.text.id,
mimeType='text/plain', dsLocation=file_uri,
logMessage=logmsg, controlGroup='M')
self.assertEqual(None, obj.text.ds_location,
'ds_location should be None after successful save (add)')
def test_ds_isModified(self):
self.assertFalse(self.obj.text.isModified(),
"isModified should return False for unchanged DC datastream")
self.assertFalse(self.obj.dc.isModified(),
"isModified should return False for unchanged DC datastream")
self.obj.text.label = "next text label"
self.assertTrue(self.obj.text.isModified(),
"isModified should return True when text datastream label has been updated")
self.obj.dc.content.description = "new datastream contents"
self.assertTrue(self.obj.dc.isModified(),
"isModified should return True when DC datastream content has changed")
self.obj.text.save()
self.obj.dc.save()
self.assertFalse(self.obj.text.isModified(),
"isModified should return False after text datastream has been saved")
self.assertFalse(self.obj.dc.isModified(),
"isModified should return False after DC datastream has been saved")
# empty xml should also show as not modified
self.assertFalse(self.obj.extradc.isModified())
def test_rdf_datastream(self):
# add a relationship to test RELS-EXT/rdf datastreams
foo123 = "info:fedora/foo:123"
self.obj.add_relationship(relsext.isMemberOf, foo123)
self.assert_(isinstance(self.obj.rels_ext, models.RdfDatastreamObject))
self.assert_(isinstance(self.obj.rels_ext.content, RdfGraph))
self.assert_((self.obj.uriref, relsext.isMemberOf, URIRef(foo123)) in
self.obj.rels_ext.content)
def test_file_datastream(self):
# confirm the image datastream does not exist, so we can test adding it
self.assertFalse(self.obj.image.exists)
# add file datastream to test object
filename = os.path.join(FIXTURE_ROOT, 'test.png')
with open(filename, mode='rb') as imgfile:
self.obj.image.content = imgfile
imgsaved = self.obj.save()
self.assertTrue(imgsaved)
# datastream should exist now
self.assertTrue(self.obj.image.exists)
# file content should be reset<|fim▁hole|> # access via file datastream descriptor
self.assert_(isinstance(self.obj.image, models.FileDatastreamObject))
self.assertEqual(self.obj.image.content.read(), open(filename, mode='rb').read())
# update via descriptor
new_file = os.path.join(FIXTURE_ROOT, 'test.jpeg')
self.obj.image.content = open(new_file, mode='rb')
self.obj.image.checksum = 'aaa'
self.assertTrue(self.obj.image.isModified())
#Saving with incorrect checksum should fail.
expected_error = None
try:
self.obj.save()
except models.DigitalObjectSaveFailure as e:
#Error should go here
expected_error = e
self.assert_(str(expected_error).endswith('successfully backed out '), 'Incorrect checksum should back out successfully.')
#Now try with correct checksum
self.obj.image.content = open(new_file, mode='rb')
self.obj.image.checksum = '57d5eb11a19cf6f67ebd9e8673c9812e'
return_status = self.obj.save()
self.fedora_fixtures_ingested.append(self.obj.pid)
self.assertEqual(True, return_status)
# grab a new copy from fedora, confirm contents match
obj = MyDigitalObject(self.api, self.pid)
self.assertEqual(obj.image.content.read(), open(new_file, mode='rb').read())
self.assertEqual(obj.image.checksum, '57d5eb11a19cf6f67ebd9e8673c9812e')
def test_undo_last_save(self):
# test undoing profile and content changes
# unversioned datastream
self.obj.text.label = "totally new label"
self.obj.text.content = "and totally new content, too"
self.obj.text.save()
self.append_pid(self.obj.pid)
self.assertTrue(self.obj.text.undo_last_save())
history = self.obj.text.history()
self.assertEqual("text datastream", history.versions[0].label)
r = self.obj.api.getDatastreamDissemination(self.pid, self.obj.text.id)
self.assertEqual(TEXT_CONTENT, r.text)
# versioned datastream
self.obj.dc.label = "DC 2.0"
self.obj.dc.title = "my new DC"
self.obj.dc.save()
self.assertTrue(self.obj.dc.undo_last_save())
history = self.obj.dc.history()
self.assertEqual(1, len(history.versions)) # new datastream added, then removed - back to 1 version
self.assertEqual("Dublin Core", history.versions[0].label)
r = self.obj.api.getDatastreamDissemination(self.pid, self.obj.dc.id)
self.assert_('<dc:title>A partially-prepared test object</dc:title>' in r.text)
# unversioned - profile change only
self.obj = MyDigitalObject(self.api, self.pid)
self.obj.text.label = "totally new label"
self.obj.text.save()
self.assertTrue(self.obj.text.undo_last_save())
history = self.obj.text.history()
self.assertEqual("text datastream", history.versions[0].label)
r = self.obj.api.getDatastreamDissemination(self.pid, self.obj.text.id)
self.assertEqual(TEXT_CONTENT, r.text)
def test_get_chunked_content(self):
# get chunks - chunksize larger than entire text content
chunks = list(self.obj.text.get_chunked_content(1024))
self.assertEqual(self.obj.text.content, chunks[0])
# smaller chunksize
chunks = list(self.obj.text.get_chunked_content(10))
self.assertEqual(self.obj.text.content[:10], chunks[0])
self.assertEqual(self.obj.text.content[10:20], chunks[1])
def test_datastream_version(self):
# modify dc & save to create a second version
self.obj.dc.content.description = "new datastream contents"
self.obj.dc.save()
# get the two versions ds obj
dc_v0 = self.obj.getDatastreamObject(self.obj.dc.id,
as_of_date=self.obj.dc.history().versions[0].created)
dc_v1 = self.obj.getDatastreamObject(self.obj.dc.id,
as_of_date=self.obj.dc.history().versions[1].created)
# ds info should be different
self.assertNotEqual(dc_v0.created, dc_v1.created)
self.assertNotEqual(dc_v0.size, dc_v1.size)
self.assertNotEqual(dc_v0.checksum, dc_v1.checksum)
# ds content should be different
self.assertNotEqual(dc_v0.content, dc_v1.content)
# saving a historical version is not allowed
self.assertRaises(RuntimeError, dc_v0.save)
class TestNewObject(FedoraTestCase):
pidspace = FEDORA_PIDSPACE
def test_basic_ingest(self):
self.repo.default_pidspace = self.pidspace
obj = self.repo.get_object(type=MyDigitalObject)
self.assertFalse(isinstance(obj.pid, six.string_types))
obj.save()
self.append_pid(obj.pid)
self.assertTrue(isinstance(obj.pid, six.string_types))
self.append_pid(obj.pid)
fetched = self.repo.get_object(obj.pid, type=MyDigitalObject)
self.assertEqual(fetched.dc.content.identifier, obj.pid)
# confirm that fedora generates a checksum for us
r = obj.api.getDatastream(obj.pid, obj.dc.id)
dsinfo = r.text
self.assert_(re.search("<dsChecksum>[0-9a-f]+</dsChecksum>", dsinfo),
'Fedora should automatically generated a datastream checksum on ingest ' +
'(requires auto-checksum enabled and Fedora 3.7+)')
def test_ingest_content_uri(self):
obj = self.repo.get_object(type=MyDigitalObject)
obj.pid = 'test:1'
obj.text.ds_location = 'file:///tmp/some/local/file.txt'
# don't actually save, since we can't put a test file on the fedora test server
foxml = obj._build_foxml_doc()
# inspect TEXT datastream contentLocation in the generated foxml
text_dsloc = foxml.xpath('.//f:datastream[@ID="TEXT"]/' +
'f:datastreamVersion/f:contentLocation',
namespaces={'f': obj.FOXML_NS})[0]
self.assertEqual(obj.text.ds_location, text_dsloc.get('REF'))
self.assertEqual('URL', text_dsloc.get('TYPE'))
def test_modified_profile(self):
obj = self.repo.get_object(type=MyDigitalObject)
obj.label = 'test label'
obj.owner = 'tester'
obj.state = 'I'
obj.save()
self.append_pid(obj.pid)
self.assertEqual(obj.label, 'test label')
self.assertEqual(obj.owner, 'tester')
self.assertEqual(obj.state, 'I')
fetched = self.repo.get_object(obj.pid, type=MyDigitalObject)
self.assertEqual(fetched.label, 'test label')
self.assertEqual(fetched.owner, 'tester')
self.assertEqual(fetched.state, 'I')
def test_multiple_owners(self):
obj = self.repo.get_object(type=MyDigitalObject)
obj.owner = 'thing1, thing2'
self.assert_(isinstance(obj.owners, list))
self.assertEqual(['thing1', 'thing2'], obj.owners)
obj.owner = ' thing1, thing2 '
self.assertEqual(['thing1', 'thing2'], obj.owners)
def test_default_datastreams(self):
"""If we just create and save an object, verify that DigitalObject
initializes its datastreams appropriately."""
obj = self.repo.get_object(type=MyDigitalObject)
obj.save()
self.append_pid(obj.pid)
# verify some datastreams on the original object
# fedora treats dc specially
self.assertEqual(obj.dc.label, 'Dublin Core')
self.assertEqual(obj.dc.mimetype, 'text/xml')
self.assertEqual(obj.dc.versionable, False)
self.assertEqual(obj.dc.state, 'A')
self.assertEqual(obj.dc.format, 'http://www.openarchives.org/OAI/2.0/oai_dc/')
self.assertEqual(obj.dc.control_group, 'X')
self.assertEqual(obj.dc.content.identifier, obj.pid) # fedora sets this automatically
# test rels-ext as an rdf datastream
self.assertEqual(obj.rels_ext.label, 'External Relations')
self.assertEqual(obj.rels_ext.mimetype, 'application/rdf+xml')
self.assertEqual(obj.rels_ext.versionable, False)
self.assertEqual(obj.rels_ext.state, 'A')
self.assertEqual(obj.rels_ext.format, 'info:fedora/fedora-system:FedoraRELSExt-1.0')
self.assertEqual(obj.rels_ext.control_group, 'X')
self.assertTrue(isinstance(obj.rels_ext.content, RdfGraph))
self.assert_((obj.uriref, modelns.hasModel, URIRef(MyDigitalObject.CONTENT_MODELS[0])) in
obj.rels_ext.content)
self.assert_((obj.uriref, modelns.hasModel, URIRef(MyDigitalObject.CONTENT_MODELS[0])) in
obj.rels_ext.content)
# test managed xml datastreams
self.assertEqual(obj.extradc.label, 'Managed DC XML datastream')
self.assertEqual(obj.extradc.mimetype, 'application/xml')
self.assertEqual(obj.extradc.versionable, True)
self.assertEqual(obj.extradc.state, 'A')
self.assertEqual(obj.extradc.control_group, 'M')
self.assertTrue(isinstance(obj.extradc.content, DublinCore))
# verify those datastreams on a new version fetched fresh from the
# repo
fetched = self.repo.get_object(obj.pid, type=MyDigitalObject)
self.assertEqual(fetched.dc.label, 'Dublin Core')
self.assertEqual(fetched.dc.mimetype, 'text/xml')
self.assertEqual(fetched.dc.versionable, False)
self.assertEqual(fetched.dc.state, 'A')
self.assertEqual(fetched.dc.format, 'http://www.openarchives.org/OAI/2.0/oai_dc/')
self.assertEqual(fetched.dc.control_group, 'X')
self.assertEqual(fetched.dc.content.identifier, fetched.pid)
self.assertEqual(fetched.rels_ext.label, 'External Relations')
self.assertEqual(fetched.rels_ext.mimetype, 'application/rdf+xml')
self.assertEqual(fetched.rels_ext.versionable, False)
self.assertEqual(fetched.rels_ext.state, 'A')
self.assertEqual(fetched.rels_ext.format, 'info:fedora/fedora-system:FedoraRELSExt-1.0')
self.assertEqual(fetched.rels_ext.control_group, 'X')
self.assert_((obj.uriref, modelns.hasModel, URIRef(MyDigitalObject.CONTENT_MODELS[0])) in
fetched.rels_ext.content)
self.assert_((obj.uriref, modelns.hasModel, URIRef(MyDigitalObject.CONTENT_MODELS[1])) in
fetched.rels_ext.content)
self.assertEqual(fetched.extradc.label, 'Managed DC XML datastream')
self.assertEqual(fetched.extradc.mimetype, 'application/xml')
self.assertEqual(fetched.extradc.versionable, True)
self.assertEqual(fetched.extradc.state, 'A')
self.assertEqual(fetched.extradc.control_group, 'M')
self.assertTrue(isinstance(fetched.extradc.content, DublinCore))
def test_modified_datastreams(self):
"""Verify that we can modify a new object's datastreams before
ingesting it."""
obj = MyDigitalObject(self.api, pid=self.getNextPid(), create=True)
# modify content for dc (metadata should be covered by other tests)
obj.dc.content.description = 'A test object'
obj.dc.content.rights = 'Rights? Sure, copy our test object.'
# modify managed xml content (more metadata in text, below)
obj.extradc.content.description = 'Still the same test object'
# rewrite info and content for a managed binary datastream
obj.text.label = 'The outer limits of testing'
obj.text.mimetype = 'text/x-test'
obj.text.versionable = True
obj.text.state = 'I'
obj.text.format = 'http://example.com/'
obj.text.content = 'We are controlling transmission.'
# save and verify in the same object
obj.save()
self.append_pid(obj.pid)
self.assertEqual(obj.dc.content.description, 'A test object')
self.assertEqual(obj.dc.content.rights, 'Rights? Sure, copy our test object.')
self.assertEqual(obj.extradc.content.description, 'Still the same test object')
self.assertEqual(obj.text.label, 'The outer limits of testing')
self.assertEqual(obj.text.mimetype, 'text/x-test')
self.assertEqual(obj.text.versionable, True)
self.assertEqual(obj.text.state, 'I')
self.assertEqual(obj.text.format, 'http://example.com/')
self.assertEqual(obj.text.content, b'We are controlling transmission.')
# re-fetch and verify
fetched = MyDigitalObject(self.api, obj.pid)
self.assertEqual(fetched.dc.content.description, 'A test object')
self.assertEqual(fetched.dc.content.rights, 'Rights? Sure, copy our test object.')
self.assertEqual(fetched.extradc.content.description, 'Still the same test object')
self.assertEqual(fetched.text.label, 'The outer limits of testing')
self.assertEqual(fetched.text.mimetype, 'text/x-test')
self.assertEqual(fetched.text.versionable, True)
self.assertEqual(fetched.text.state, 'I')
self.assertEqual(fetched.text.format, 'http://example.com/')
self.assertEqual(fetched.text.content, b'We are controlling transmission.')
def test_modify_multiple(self):
obj = self.repo.get_object(type=MyDigitalObject)
obj.label = 'test label'
obj.dc.content.title = 'test dc title'
obj.image.content = open(os.path.join(FIXTURE_ROOT, 'test.png'), mode='rb')
obj.save()
self.append_pid(obj.pid)
# update and save multiple pieces, including filedatastream metadata
obj.label = 'new label'
obj.dc.content.title = 'new dc title'
obj.image.label = 'testimage.png'
saved = obj.save()
self.assertTrue(saved)
updated_obj = self.repo.get_object(obj.pid, type=MyDigitalObject)
self.assertEqual(obj.label, updated_obj.label)
self.assertEqual(obj.dc.content.title, updated_obj.dc.content.title)
self.assertEqual(obj.image.label, updated_obj.image.label)
def test_new_file_datastream(self):
obj = self.repo.get_object(type=MyDigitalObject)
obj.image.content = open(os.path.join(FIXTURE_ROOT, 'test.png'), mode='rb')
obj.save()
self.append_pid(obj.pid)
fetched = self.repo.get_object(obj.pid, type=MyDigitalObject)
file = open(os.path.join(FIXTURE_ROOT, 'test.png'), mode='rb')
self.assertEqual(fetched.image.content.read(), file.read())
def test_new_getdatastream(self):
# use getDatastreamObject to add a datastream not defined
# on the digital object
self.repo.default_pidspace = self.pidspace
obj = self.repo.get_object(type=MyDigitalObject)
dsid = 'new_ds'
content = 'here is some simple text content'
label = 'my ad-hoc datastream'
new_ds = obj.getDatastreamObject(dsid)
new_ds.content = content
new_ds.label = label
new_ds.mimetype = 'text/plain'
obj.save()
self.append_pid(obj.pid)
# fetch fresh copy from repo for inspection
fetched = self.repo.get_object(obj.pid, type=MyDigitalObject)
self.assert_(dsid in fetched.ds_list)
dsobj = fetched.getDatastreamObject(dsid)
self.assertEqual(label, dsobj.label)
self.assertEqual('text/plain', dsobj.mimetype)
self.assertEqual(content, force_text(dsobj.content))
# add new datastream to existing object using the same method
dsid2 = 'newer_ds'
content = 'totally different content here'
label = 'yet another ad-hoc datastream'
newer_ds = fetched.getDatastreamObject(dsid2)
newer_ds.content = content
newer_ds.label = label
newer_ds.mimetype = 'text/plain'
fetched.save()
# re-fetch for inspect
fetched = self.repo.get_object(obj.pid, type=MyDigitalObject)
self.assert_(dsid2 in fetched.ds_list)
dsobj = fetched.getDatastreamObject(dsid2)
self.assertEqual(label, dsobj.label)
self.assertEqual('text/plain', dsobj.mimetype)
self.assertEqual(content, force_text(dsobj.content))
class TestDigitalObject(FedoraTestCase):
fixtures = ['object-with-pid.foxml']
pidspace = FEDORA_PIDSPACE
def setUp(self):
super(TestDigitalObject, self).setUp()
self.pid = self.fedora_fixtures_ingested[-1] # get the pid for the last object
self.obj = MyDigitalObject(self.api, self.pid)
_add_text_datastream(self.obj)
# get fixture ingest time from the server (the hard way) for testing
r = self.obj.api.getDatastream(self.pid, "DC")
dsprofile_node = etree.fromstring(r.content, base_url=r.url)
created_s = dsprofile_node.xpath('string(m:dsCreateDate)',
namespaces={'m': FEDORA_MANAGE_NS})
self.ingest_time = fedoratime_to_datetime(created_s)
def test_properties(self):
self.assertEqual(self.pid, self.obj.pid)
self.assertTrue(self.obj.uri.startswith("info:fedora/"))
self.assertTrue(self.obj.uri.endswith(self.pid))
def test_get_object_info(self):
self.assertEqual(self.obj.label, "A partially-prepared test object")
self.assertEqual(self.obj.owner, "tester")
self.assertEqual(self.obj.state, "A")
try:
self.assertAlmostEqual(self.ingest_time, self.obj.created,
delta=ONE_SEC)
except TypeError:
# delta keyword unavailable before python 2.7
self.assert_(abs(self.ingest_time - self.obj.created) < ONE_SEC)
self.assert_(self.ingest_time < self.obj.modified)
def test_save_object_info(self):
self.obj.label = "An updated test object"
self.obj.owner = "notme"
self.obj.state = "I"
saved = self.obj._saveProfile("saving test object profile")
self.assertTrue(saved, "DigitalObject saveProfile should return True on successful update")
profile = self.obj.getProfile() # get fresh from fedora to confirm updated
self.assertEqual(profile.label, "An updated test object")
self.assertEqual(profile.owner, "notme")
self.assertEqual(profile.state, "I")
self.assertNotEqual(profile.created, profile.modified,
"object create date should not equal modified after updating object profile")
def test_object_label(self):
# object label set method has special functionality
self.obj.label = ' '.join('too long' for i in range(50))
self.assertEqual(self.obj.label_max_size, len(self.obj.label),
'object label should be truncated to 255 characters')
self.assertTrue(self.obj.info_modified, 'object info modified when object label has changed')
self.obj.info_modified = False
self.obj.label = str(self.obj.label)
self.assertFalse(self.obj.info_modified,
'object info should not be considered modified after setting label to its current value')
def test_object_owner(self):
self.obj.owner = ','.join('userid' for i in range(14))
self.assertTrue(len(self.obj.owner) <= self.obj.owner_max_size,
'object owner should be truncated to 64 characters or less')
self.assertTrue(self.obj.info_modified,
'object info modified when object owner has changed')
# last value should not be truncated
self.assertTrue(self.obj.owner.endswith('userid'))
# non-delimited value should just be truncated
self.obj.owner = ''.join('longestownernameever' for i in range(10))
self.assertEqual(self.obj.owner_max_size, len(self.obj.owner),
'object owner should be truncated to 64 characters or less')
def test_save(self):
# unmodified object - save should do nothing
self.obj.save()
self.append_pid(self.obj.pid)
# modify object profile, datastream content, datastream info
self.obj.label = "new label"
self.obj.dc.content.title = "new dublin core title"
self.obj.text.label = "text content"
self.obj.text.checksum_type = "MD5"
self.obj.text.checksum = "avcd"
# Saving with incorrect checksum should fail.
expected_error = None
try:
self.obj.save()
except models.DigitalObjectSaveFailure as err:
# Error should go here
expected_error = err
self.assert_('successfully backed out' in str(expected_error),
'Incorrect checksum should back out successfully.')
# re-initialize the object. do it with a unicode pid to test a regression.
self.obj = MyDigitalObject(self.api, force_text(self.pid))
# modify object profile, datastream content, datastream info
self.obj.label = u"new label\u2014with unicode"
self.obj.dc.content.title = u"new dublin core title\u2014also with unicode"
self.obj.text.label = "text content"
self.obj.text.checksum_type = "MD5"
self.obj.text.checksum = "1c83260ff729265470c0d349e939c755"
return_status = self.obj.save()
#Correct checksum should modify correctly.
self.assertEqual(True, return_status)
# confirm all changes were saved to fedora
profile = self.obj.getProfile()
self.assertEqual(profile.label, u"new label\u2014with unicode")
r = self.obj.api.getDatastreamDissemination(self.pid, self.obj.dc.id)
self.assert_(u'<dc:title>new dublin core title\u2014also with unicode</dc:title>' in force_text(r.content))
text_info = self.obj.getDatastreamProfile(self.obj.text.id)
self.assertEqual(text_info.label, "text content")
self.assertEqual(text_info.checksum_type, "MD5")
# force an error on saving DC to test backing out text datastream
self.obj.text.content = "some new text"
self.obj.dc.content = "this is not dublin core!" # NOTE: setting xml content like this could change...
# catch the exception so we can inspect it
try:
self.obj.save()
except models.DigitalObjectSaveFailure as f:
save_error = f
self.assert_(isinstance(save_error, models.DigitalObjectSaveFailure))
self.assertEqual(save_error.obj_pid, self.obj.pid,
"save failure exception should include object pid %s, got %s" % (self.obj.pid, save_error.obj_pid))
self.assertEqual(save_error.failure, "DC", )
self.assertEqual(set(['TEXT', 'DC']), set(save_error.to_be_saved))
self.assertEqual(['TEXT'], save_error.saved)
self.assertEqual(['TEXT'], save_error.cleaned)
self.assertEqual([], save_error.not_cleaned)
self.assertTrue(save_error.recovered)
r = self.obj.api.getDatastreamDissemination(self.pid, self.obj.text.id)
self.assertEqual(TEXT_CONTENT, r.text)
# force an error updating the profile, should back out both datastreams
self.obj = MyDigitalObject(self.api, self.pid)
self.obj.text.content = "some new text"
self.obj.dc.content.description = "happy happy joy joy"
# object label is limited in length - force an error with a label that exceeds it
# NOTE: bypassing the label property because label set method now truncates to 255 characters
self.obj.info.label = ' '.join('too long' for i in range(50))
self.obj.info_modified = True
try:
self.obj.save()
except models.DigitalObjectSaveFailure as f:
profile_save_error = f
self.assert_(isinstance(profile_save_error, models.DigitalObjectSaveFailure))
self.assertEqual(profile_save_error.obj_pid, self.obj.pid,
"save failure exception should include object pid %s, got %s" % (self.obj.pid, save_error.obj_pid))
self.assertEqual(profile_save_error.failure, "object profile", )
all_datastreams = set(['TEXT', 'DC'])
self.assertEqual(all_datastreams, set(profile_save_error.to_be_saved))
self.assertEqual(all_datastreams, set(profile_save_error.saved))
self.assertEqual(all_datastreams, set(profile_save_error.cleaned))
self.assertEqual([], profile_save_error.not_cleaned)
self.assertTrue(profile_save_error.recovered)
# confirm datastreams were reverted back to previous contents
r = self.obj.api.getDatastreamDissemination(self.pid, self.obj.text.id)
self.assertEqual(TEXT_CONTENT, r.text)
r = self.obj.api.getDatastreamDissemination(self.pid, self.obj.dc.id)
self.assert_("<dc:description>This object has more data in it than a basic-object.</dc:description>" in r.text)
# how to force an error that can't be backed out?
def test_datastreams_list(self):
self.assert_("DC" in self.obj.ds_list.keys())
self.assert_(isinstance(self.obj.ds_list["DC"], ObjectDatastream))
dc = self.obj.ds_list["DC"]
self.assertEqual("DC", dc.dsid)
self.assertEqual("Dublin Core", dc.label)
self.assertEqual("text/xml", dc.mimeType)
self.assert_("TEXT" in self.obj.ds_list.keys())
text = self.obj.ds_list["TEXT"]
self.assertEqual("text datastream", text.label)
self.assertEqual("text/plain", text.mimeType)
def test_get_datastream_object(self):
# NOTE: this is not an exhaustive test of getDatastreamObject, but
# is a test for a particular error introduced somewhere between eulfedora
# 0.20 and 0.23
# error when using defined datastreams - e.g. returns xmldatastream instead of
# XmlDatastreamObject
ds = self.obj.getDatastreamObject('extradc')
self.assert_(isinstance(ds, models.DatastreamObject))
self.assertFalse(ds.exists)
def test_history(self):
self.assert_(isinstance(self.obj.history, list))
self.assert_(isinstance(self.obj.history[0], datetime))
self.assertEqual(self.ingest_time, self.obj.history[0])
def test_object_xml(self):
self.assert_(isinstance(self.obj.object_xml, FoxmlDigitalObject))
# uningested object has none
newobj = MyDigitalObject(self.api)
self.assertEqual(None, newobj.object_xml)
def test_audit_trail(self):
self.assert_(isinstance(self.obj.audit_trail, AuditTrail))
self.assert_(isinstance(self.obj.audit_trail.records[0], AuditTrailRecord))
# inspect the audit trail by adding text datastream in setup
audit = self.obj.audit_trail.records[0]
self.assertEqual('AUDREC1', audit.id)
self.assertEqual('Fedora API-M', audit.process_type)
self.assertEqual('addDatastream', audit.action)
self.assertEqual('TEXT', audit.component)
self.assertEqual('fedoraAdmin', audit.user)
self.assert_(isinstance(audit.date, datetime))
self.assertEqual('creating new datastream', audit.message)
# uningested object has none
newobj = MyDigitalObject(self.api)
self.assertEqual(None, newobj.audit_trail)
# test audit-trail derived properties
# no ingest message set, therefore no ingest user in audit trail
self.assertEqual(None, self.obj.ingest_user)
self.assertEqual(set(['fedoraAdmin']), self.obj.audit_trail_users)
# tweak xml in the audit trail to test
self.obj.audit_trail.records[0].action = 'ingest'
self.obj.audit_trail.records.extend([AuditTrailRecord(user='editor'),
AuditTrailRecord(user='manager'),
AuditTrailRecord(user='editor')])
self.assertEqual('fedoraAdmin', self.obj.ingest_user)
self.assertEqual(set(['fedoraAdmin', 'editor', 'manager']),
self.obj.audit_trail_users)
# should not error when audit trail is not available
newobj = MyDigitalObject(self.api)
self.assertEqual(None, newobj.ingest_user)
self.assertEqual(set(), newobj.audit_trail_users)
def test_methods(self):
methods = self.obj.methods
self.assert_('fedora-system:3' in methods) # standard system sdef
self.assert_('viewMethodIndex' in methods['fedora-system:3'])
def test_has_model(self):
cmodel_uri = "info:fedora/control:ContentType"
# FIXME: checking when rels-ext datastream does not exist causes an error
self.assertFalse(self.obj.has_model(cmodel_uri))
self.obj.add_relationship(modelns.hasModel, cmodel_uri)
self.assertTrue(self.obj.has_model(cmodel_uri))
self.assertFalse(self.obj.has_model(self.obj.uri))
def test_get_models(self):
cmodel_uri = "info:fedora/control:ContentType"
# FIXME: checking when rels-ext datastream does not exist causes an error
self.assertEqual(self.obj.get_models(), [])
self.obj.add_relationship(modelns.hasModel, cmodel_uri)
self.assertEquals(self.obj.get_models(), [URIRef(cmodel_uri)])
def test_has_requisite_content_models(self):
# fixture has no content models
# init fixture as generic object
obj = models.DigitalObject(self.api, self.pid)
# should have all required content models because there are none
self.assertTrue(obj.has_requisite_content_models)
# init fixture as test digital object with cmodels
obj = MyDigitalObject(self.api, self.pid)
# initially false since fixture has no cmodels
self.assertFalse(obj.has_requisite_content_models)
# add first cmodel
obj.rels_ext.content.add((obj.uriref, modelns.hasModel,
URIRef(MyDigitalObject.CONTENT_MODELS[0])))
# should still be false since both are required
self.assertFalse(obj.has_requisite_content_models)
# add second cmodel
obj.rels_ext.content.add((obj.uriref, modelns.hasModel,
URIRef(MyDigitalObject.CONTENT_MODELS[1])))
# now all cmodels should be present
self.assertTrue(obj.has_requisite_content_models)
# add an additional, extraneous cmodel
obj.rels_ext.content.add((obj.uriref, modelns.hasModel,
URIRef(SimpleDigitalObject.CONTENT_MODELS[0])))
# should still be true
self.assertTrue(obj.has_requisite_content_models)
def test_add_relationships(self):
# add relation to a resource, by digital object
related = models.DigitalObject(self.api, "foo:123")
added = self.obj.add_relationship(relsext.isMemberOf, related)
self.assertTrue(added, "add relationship should return True on success, got %s" % added)
r = self.obj.api.getDatastreamDissemination(self.pid, "RELS-EXT")
self.assert_("isMemberOf" in r.text)
self.assert_(related.uri in r.text) # should be full uri, not just pid
# add relation to a resource, by string
collection_uri = "info:fedora/foo:456"
self.obj.add_relationship(relsext.isMemberOfCollection, collection_uri)
r = self.obj.api.getDatastreamDissemination(self.pid, "RELS-EXT")
self.assert_("isMemberOfCollection" in r.text)
self.assert_('rdf:resource="%s"' % collection_uri in r.text,
'string uri should be added to rels-ext as a resource')
# add relation to a resource, by string
collection_uri = u"info:fedora/foo:457"
self.obj.add_relationship(relsext.isMemberOfCollection, collection_uri)
r = self.obj.api.getDatastreamDissemination(self.pid, "RELS-EXT")
self.assert_("isMemberOfCollection" in r.text)
self.assert_('rdf:resource="%s"' % collection_uri in r.text,
'unicode uri should be added to rels-ext as a resource')
# add relation to a literal
self.obj.add_relationship('info:fedora/example:owner', "testuser")
r = self.obj.api.getDatastreamDissemination(self.pid, "RELS-EXT")
self.assert_("owner" in r.text)
self.assert_("testuser" in r.text)
rels = self.obj.rels_ext.content
# convert first added relationship to rdflib statement to check that it is in the rdf graph
st = (self.obj.uriref, relsext.isMemberOf, related.uriref)
self.assertTrue(st in rels)
def test_purge_relationships(self):
# purge relation from a resource, by digital object
related = models.DigitalObject(self.api, "foo:123")
self.obj.add_relationship(relsext.isMemberOf, related)
purged = self.obj.purge_relationship(relsext.isMemberOf, related)
self.assertTrue(purged, "add relationship should return True on success, got %s" % purged)
r = self.obj.api.getDatastreamDissemination(self.pid, "RELS-EXT")
self.assert_("isMemberOf" not in r.text)
self.assert_(related.uri not in r.text) # should be full uri, not just pid
# purge relation from a resource, by string
collection_uri = "info:fedora/foo:456"
self.obj.add_relationship(relsext.isMemberOfCollection, collection_uri)
self.obj.purge_relationship(relsext.isMemberOfCollection, collection_uri)
r = self.obj.api.getDatastreamDissemination(self.pid, "RELS-EXT")
self.assert_("isMemberOfCollection" not in r.text)
self.assert_(collection_uri not in r.text)
# purge relation to a literal
self.obj.add_relationship('info:fedora/example:owner', "testuser")
self.obj.purge_relationship('info:fedora/example:owner', "testuser")
r = self.obj.api.getDatastreamDissemination(self.pid, "RELS-EXT")
self.assert_("owner" not in r.text)
self.assert_("testuser" not in r.text)
rels = self.obj.rels_ext.content
# convert first added relationship to rdflib statement to check that it is NOT in the rdf graph
st = (self.obj.uriref, relsext.isMemberOf, related.uriref)
self.assertTrue(st not in rels)
def test_modify_relationships(self):
# modify a pre-existing relation to a resource, by digital object
old_related = models.DigitalObject(self.api, "foo:1234")
new_related = models.DigitalObject(self.api, "foo:5678")
self.obj.add_relationship(relsext.isMemberOf, old_related)
modified = self.obj.modify_relationship(relsext.isMemberOf, old_related, new_related)
self.assertTrue(modified, "modify relationship should return True on success, got %s" % modified)
r = self.obj.api.getDatastreamDissemination(self.pid, "RELS-EXT")
self.assert_("isMemberOf" in r.text)
self.assert_(new_related.uri in r.text) # should be full uri, not just pid
# modify a pre-existing relation, by string
old_collection_uri = "info:fedora/foo:8765"
new_collection_uri = "info:fedora/foo:4321"
self.obj.add_relationship(relsext.isMemberOfCollection, old_collection_uri)
self.obj.modify_relationship(relsext.isMemberOfCollection, old_collection_uri, new_collection_uri)
r = self.obj.api.getDatastreamDissemination(self.pid, "RELS-EXT")
self.assert_("isMemberOfCollection" in r.text)
self.assert_(new_collection_uri in r.text)
# modify a relation to a literal
self.obj.add_relationship('info:fedora/example:owner', "old_testuser")
self.obj.modify_relationship('info:fedora/example:owner', "old_testuser", "new_testuser")
r = self.obj.api.getDatastreamDissemination(self.pid, "RELS-EXT")
self.assert_("owner" in r.text)
self.assert_("new_testuser" in r.text)
rels = self.obj.rels_ext.content
# convert first modified relationship to rdflib statement to check that it is in the rdf graph
st = (self.obj.uriref, relsext.isMemberOf, new_related.uriref)
self.assertTrue(st in rels)
def test_registry(self):
self.assert_('test.test_fedora.test_models.MyDigitalObject' in
models.DigitalObject.defined_types)
def test_index_data(self):
indexdata = self.obj.index_data()
# check that top-level object properties are included in index data
# (implicitly checking types)
self.assertEqual(self.obj.pid, indexdata['pid'])
self.assertEqual(self.obj.owners, indexdata['owner'])
self.assertEqual(self.obj.label, indexdata['label'])
self.assertEqual(self.obj.modified.isoformat(), indexdata['last_modified'])
self.assertEqual(self.obj.created.isoformat(), indexdata['created'])
self.assertEqual(self.obj.state, indexdata['state'])
for cm in self.obj.get_models():
self.assert_(str(cm) in indexdata['content_model'])
# descriptive data included in index data
self.assert_(self.obj.dc.content.title in indexdata['title'])
self.assert_(self.obj.dc.content.description in indexdata['description'])
self.assertEqual(set(['TEXT', 'DC']), set(indexdata['dsids']))
def test_index_data_relations(self):
# add a few rels-ext relations to test
partof = 'something bigger'
self.obj.rels_ext.content.add((self.obj.uriref, relsext.isPartOf, URIRef(partof)))
member1 = 'foo'
member2 = 'bar'
self.obj.rels_ext.content.add((self.obj.uriref, relsext.hasMember, URIRef(member1)))
self.obj.rels_ext.content.add((self.obj.uriref, relsext.hasMember, URIRef(member2)))
indexdata = self.obj.index_data_relations()
self.assertEqual([partof], indexdata['isPartOf'])
self.assert_(member1 in indexdata['hasMember'])
self.assert_(member2 in indexdata['hasMember'])
# rels-ext data included in main index data
indexdata = self.obj.index_data()
self.assert_('isPartOf' in indexdata)
self.assert_('hasMember' in indexdata)
def test_get_object(self):
obj = MyDigitalObject(self.api)
otherobj = obj.get_object(self.pid)
self.assert_(isinstance(otherobj, MyDigitalObject),
'if type is not specified, get_object should return current type')
self.assertEqual(self.api, otherobj.api,
'get_object should pass existing api connection')
otherobj = obj.get_object(self.pid, type=SimpleDigitalObject)
self.assert_(isinstance(otherobj, SimpleDigitalObject),
'get_object should object with requested type')
class TestContentModel(FedoraTestCase):
def tearDown(self):
super(TestContentModel, self).tearDown()
cmodels = list(MyDigitalObject.CONTENT_MODELS)
cmodels.extend(SimpleDigitalObject.CONTENT_MODELS)
for pid in cmodels:
try:
self.repo.purge_object(pid)
except RequestFailed as rf:
logger.warn('Error purging %s: %s' % (pid, rf))
# patch ContentModel to avoid actually ingesting into fedora
@patch.object(models.ContentModel, '_ingest', new=Mock())
def test_for_class(self):
CMODEL_URI = models.ContentModel.CONTENT_MODELS[0]
# first: create a cmodel for SimpleDigitalObject, the simple case
cmodel = models.ContentModel.for_class(SimpleDigitalObject, self.repo)
expect_uri = SimpleDigitalObject.CONTENT_MODELS[0]
self.assertEqual(cmodel.uri, expect_uri)
self.assertTrue(cmodel.has_model(CMODEL_URI))
dscm = cmodel.ds_composite_model.content
typemodel = dscm.get_type_model('TEXT')
self.assertEqual(typemodel.mimetype, 'text/plain')
typemodel = dscm.get_type_model('EXTRADC')
self.assertEqual(typemodel.mimetype, 'text/xml')
# try ContentModel itself. Content model objects have the "content
# model" content model. That content model should already be in
# every repo, so for_class shouldn't need to make anything.
cmodel = models.ContentModel.for_class(models.ContentModel, self.repo)
expect_uri = models.ContentModel.CONTENT_MODELS[0]
self.assertEqual(cmodel.uri, expect_uri)
self.assertTrue(cmodel.has_model(CMODEL_URI))
dscm = cmodel.ds_composite_model.content
typemodel = dscm.get_type_model('DS-COMPOSITE-MODEL')
self.assertEqual(typemodel.mimetype, 'text/xml')
self.assertEqual(typemodel.format_uri, 'info:fedora/fedora-system:FedoraDSCompositeModel-1.0')
# try MyDigitalObject. this should fail, as MyDigitalObject has two
# CONTENT_MODELS: we support only one
self.assertRaises(ValueError, models.ContentModel.for_class,
MyDigitalObject, self.repo)
# using DC namespace to test RDF literal values
DCNS = Namespace(URIRef('http://purl.org/dc/elements/1.1/'))
class SiblingObject(models.DigitalObject):
pass
class RelatorObject(MyDigitalObject):
# related object
parent = models.Relation(relsext.isMemberOfCollection, type=SimpleDigitalObject)
# literal
dctitle = models.Relation(DCNS.title)
# literal with explicit type and namespace prefix
dcid = models.Relation(DCNS.identifier, ns_prefix={'dcns': DCNS}, rdf_type=XSD.int)
# type of "self"
recursive_rel = models.Relation(relsext.isMemberOf, type='self')
# test variant options for automatic reverse relations
other = models.Relation(relsext.isMemberOfCollection, type=SimpleDigitalObject,
related_name='related_items', related_order=DCNS.title)
parent1 = models.Relation(relsext.isMemberOfCollection, type=models.DigitalObject,
related_name='my_custom_rel')
sib = models.Relation(relsext.isMemberOf, type=SiblingObject,
related_name='+')
class ReverseRelator(MyDigitalObject):
member = models.ReverseRelation(relsext.isMemberOfCollection, type=RelatorObject)
members = models.ReverseRelation(relsext.isMemberOfCollection,
type=RelatorObject, multiple=True)
sorted_members = models.ReverseRelation(relsext.isMemberOfCollection,
type=RelatorObject, multiple=True, order_by=DCNS.title)
class TestRelation(FedoraTestCase):
fixtures = ['object-with-pid.foxml']
def setUp(self):
super(TestRelation, self).setUp()
self.pid = self.fedora_fixtures_ingested[-1] # get the pid for the last object
self.obj = RelatorObject(self.api)
def test_object_relation(self):
# get - not yet set
self.assertEqual(None, self.obj.parent)
# set via descriptor
newobj = models.DigitalObject(self.api)
newobj.pid = 'foo:2' # test pid for convenience/distinguish temp pids
self.obj.parent = newobj
self.assert_((self.obj.uriref, relsext.isMemberOfCollection, newobj.uriref)
in self.obj.rels_ext.content,
'isMemberOfCollection should be set in RELS-EXT after updating via descriptor')
# access via descriptor
self.assertEqual(newobj.pid, self.obj.parent.pid)
self.assert_(isinstance(self.obj.parent, SimpleDigitalObject),
'Relation descriptor returns configured type of DigitalObject')
# set existing property
otherobj = models.DigitalObject(self.api)
otherobj.pid = 'bar:none'
self.obj.parent = otherobj
self.assert_((self.obj.uriref, relsext.isMemberOfCollection, otherobj.uriref)
in self.obj.rels_ext.content,
'isMemberOfCollection should be updated in RELS-EXT after update')
self.assert_((self.obj.uriref, relsext.isMemberOfCollection, newobj.uriref)
not in self.obj.rels_ext.content,
'previous isMemberOfCollection value should not be in RELS-EXT after update')
# delete
del self.obj.parent
self.assertEqual(None, self.obj.rels_ext.content.value(subject=self.obj.uriref,
predicate=relsext.isMemberOfCollection),
'isMemberOfCollection should not be set in rels-ext after delete')
def test_recursive_relation(self):
self.assertEqual(None, self.obj.recursive_rel)
# set via descriptor
newobj = models.DigitalObject(self.api)
newobj.pid = 'foo:3' # test pid for convenience/distinguish temp pids
self.obj.recursive_rel = newobj
# access to check type
self.assert_(isinstance(self.obj.recursive_rel, RelatorObject))
def test_literal_relation(self):
# get - not set
self.assertEqual(None, self.obj.dcid)
self.assertEqual(None, self.obj.dctitle)
# set via descriptor
# - integer, with type specified
self.obj.dcid = 1234
self.assert_((self.obj.uriref, DCNS.identifier, Literal(1234, datatype=XSD.int))
in self.obj.rels_ext.content,
'literal value should be set in RELS-EXT after updating via descriptor')
# check namespace prefix
self.assert_('dcns:identifier' in force_text(self.obj.rels_ext.content.serialize()),
'configured namespace prefix should be used for serialization')
# check type
self.assert_('XMLSchema#int' in force_text(self.obj.rels_ext.content.serialize()),
'configured RDF type should be used for serialization')
# - simpler case
self.obj.dctitle = 'foo'
self.assert_((self.obj.uriref, DCNS.title, Literal('foo'))
in self.obj.rels_ext.content,
'literal value should be set in RELS-EXT after updating via descriptor')
self.assertEqual('foo', self.obj.dctitle)
# get
self.assertEqual(1234, self.obj.dcid)
# update
self.obj.dcid = 987
self.assertEqual(987, self.obj.dcid)
# delete
del self.obj.dcid
self.assertEqual(None, self.obj.rels_ext.content.value(subject=self.obj.uriref,
predicate=DCNS.identifier),
'dc:identifier should not be set in rels-ext after delete')
def test_reverse_relation(self):
rev = ReverseRelator(self.api, pid=self.getNextPid())
# add a relation to the object and save so we can query risearch
self.obj.parent = rev
self.obj.dc.content.title = 'title b'
self.obj.save()
# run an risearch query with flush updates true
# so that tests do not require syncUpdates to be enabled
self.repo.risearch.count_statements('<%s> * *' % self.obj.pid,
flush=True)
self.fedora_fixtures_ingested.append(self.obj.pid) # save pid for cleanup in tearDown
self.assertEqual(rev.member.pid, self.obj.pid,
'ReverseRelation returns correct object based on risearch query')
self.assert_(isinstance(rev.member, RelatorObject),
'ReverseRelation returns correct object type')
obj2 = RelatorObject(self.api)
obj2.parent = rev
obj2.dc.content.title = 'title a'
obj2.save()
# run an risearch query with flush updates true
# so that tests do not require syncUpdates to be enabled
self.repo.risearch.count_statements('<%s> * *' % self.obj.pid,
flush=True)
self.assert_(isinstance(rev.members, list),
'ReverseRelation returns list when multiple=True')
pids = [m.pid for m in rev.members]
self.assertTrue(self.obj.pid in pids,
'ReverseRelation list includes expected object')
self.assertTrue(obj2.pid in pids,
'ReverseRelation list includes expected object')
self.assert_(isinstance(rev.members[0], RelatorObject),
'ReverseRelation list items initialized as correct object type')
# test order by
self.assert_(isinstance(rev.sorted_members, list),
'ReverseRelation returns list for multiple=True with order_by')
pids = [m.pid for m in rev.sorted_members]
self.assertTrue(self.obj.pid in pids,
'ReverseRelation list includes expected object')
self.assertTrue(obj2.pid in pids,
'ReverseRelation list includes expected object')
self.assert_(isinstance(rev.sorted_members[0], RelatorObject),
'ReverseRelation list items initialized as correct object type')
self.assertEqual(obj2.pid, rev.sorted_members[0].pid,
'ReverseRelation items are sorted correctly by specified field')
def test_auto_reverse_relation(self):
# default reverse name based on classname
self.assert_(hasattr(SimpleDigitalObject, 'relatorobject_set'))
self.assert_(isinstance(SimpleDigitalObject.relatorobject_set,
models.ReverseRelation))
# check reverse-rel is configured correctly
self.assertEqual(relsext.isMemberOfCollection,
SimpleDigitalObject.relatorobject_set.relation)
self.assertEqual(RelatorObject,
SimpleDigitalObject.relatorobject_set.object_type)
self.assertEqual(True,
SimpleDigitalObject.relatorobject_set.multiple)
# reverse order not set
self.assertEqual(None,
SimpleDigitalObject.relatorobject_set.order_by)
# explicitly named reverse rel
self.assert_(hasattr(SimpleDigitalObject, 'related_items'))
# reverse rel order passed through
self.assertEqual(DCNS.title,
SimpleDigitalObject.related_items.order_by)
# generic digital object should *NOT* get reverse rels
self.assertFalse(hasattr(models.DigitalObject, 'my_custom_rel'))
# related_name of + also means no reverse rel
self.assertFalse(hasattr(SiblingObject, 'relatorobject_set'))<|fim▁end|>
|
self.assertEqual(None, self.obj.image._raw_content())
self.assertFalse(self.obj.image.isModified(),
"isModified should return False for image datastream after it has been saved")
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub(crate) mod inner;
mod port;<|fim▁hole|>mod io;
pub mod buffer;
pub use self::port::*;
pub use self::io::*;<|fim▁end|>
| |
<|file_name|>demo8.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) 2015, BROCADE COMMUNICATIONS SYSTEMS, INC
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
"""
@authors: Sergei Garbuzov
@status: Development
@version: 1.1.0
"""
import time
import json
from pybvc.controller.controller import Controller
from pybvc.openflowdev.ofswitch import (OFSwitch,
FlowEntry,
Instruction,
OutputAction,
Match)
from pybvc.common.status import STATUS
from pybvc.common.utils import load_dict_from_file
from pybvc.common.constants import (ETH_TYPE_IPv4,
IP_PROTO_TLSP,
IP_DSCP_CS3)
def of_demo_8():
f = "cfg.yml"
d = {}
if(load_dict_from_file(f, d) is False):
print("Config file '%s' read error: " % f)
exit()
try:
ctrlIpAddr = d['ctrlIpAddr']
ctrlPortNum = d['ctrlPortNum']
ctrlUname = d['ctrlUname']
ctrlPswd = d['ctrlPswd']
nodeName = d['nodeName']
rundelay = d['rundelay']
except:
print ("Failed to get Controller device attributes")
exit(0)
print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
print ("<<< Demo 8 Start")
print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
ctrl = Controller(ctrlIpAddr, ctrlPortNum, ctrlUname, ctrlPswd)
ofswitch = OFSwitch(ctrl, nodeName)
# --- Flow Match: Ethernet Source Address
# Ethernet Destination Address
# IPv4 Source Address
# IPv4 Destination Address
# IP Protocol Number
# IP DSCP
# Input Port
# NOTE: Ethernet type MUST be 2048 (0x800) -> IPv4 protocol
eth_type = ETH_TYPE_IPv4
eth_src = "00:1c:01:00:23:aa"
eth_dst = "00:02:02:60:ff:fe"
ipv4_src = "10.0.245.1/24"
ipv4_dst = "192.168.1.123/16"
ip_proto = IP_PROTO_TLSP
ip_dscp = IP_DSCP_CS3 # 'Class Selector' = 'Flash'
input_port = 13
print ("<<< 'Controller': %s, 'OpenFlow' switch: '%s'"
% (ctrlIpAddr, nodeName))
print "\n"
print ("<<< Set OpenFlow flow on the Controller")
print (" Match: Ethernet Type (%s)\n"
" Ethernet Source Address (%s)\n"
" Ethernet Destination Address (%s)\n"
" IPv4 Source Address (%s)\n"
" IPv4 Destination Address (%s)\n"
" IP Protocol Number (%s)\n"
" IP DSCP (%s)\n"
" Input Port (%s)"
% (hex(eth_type), eth_src,
eth_dst, ipv4_src, ipv4_dst,
ip_proto, ip_dscp,
input_port))
print (" Action: Output (CONTROLLER)")
time.sleep(rundelay)
flow_entry = FlowEntry()
table_id = 0
flow_entry.set_flow_table_id(table_id)
flow_id = 15
flow_entry.set_flow_id(flow_id)
flow_entry.set_flow_priority(flow_priority=1006)
flow_entry.set_flow_cookie(cookie=100)
flow_entry.set_flow_cookie_mask(cookie_mask=255)
# --- Instruction: 'Apply-actions'
# Action: 'Output' to CONTROLLER
instruction = Instruction(instruction_order=0)
action = OutputAction(order=0, port="CONTROLLER", max_len=60)
instruction.add_apply_action(action)
flow_entry.add_instruction(instruction)
# --- Match Fields: Ethernet Type
# Ethernet Source Address
# Ethernet Destination Address
# IPv4 Source Address
# IPv4 Destination Address
# IP Protocol Number
# IP DSCP
# Input Port
match = Match()
match.set_eth_type(eth_type)
match.set_eth_src(eth_src)
match.set_eth_dst(eth_dst)
match.set_ipv4_src(ipv4_src)
match.set_ipv4_dst(ipv4_dst)
match.set_ip_proto(ip_proto)
match.set_ip_dscp(ip_dscp)
match.set_in_port(input_port)
flow_entry.add_match(match)
print ("\n")
print ("<<< Flow to send:")
print flow_entry.get_payload()
time.sleep(rundelay)
result = ofswitch.add_modify_flow(flow_entry)
status = result.get_status()
if(status.eq(STATUS.OK)):
print ("<<< Flow successfully added to the Controller")
else:
print ("\n")
print ("!!!Demo terminated, reason: %s" % status.brief().lower())
exit(0)
print ("\n")
print ("<<< Get configured flow from the Controller")
time.sleep(rundelay)
result = ofswitch.get_configured_flow(table_id, flow_id)
status = result.get_status()
if(status.eq(STATUS.OK)):
print ("<<< Flow successfully read from the Controller")
print ("Flow info:")
flow = result.get_data()
print json.dumps(flow, indent=4)
else:
print ("\n")
print ("!!!Demo terminated, reason: %s" % status.brief().lower())
exit(0)
print ("\n")
print ("<<< Delete flow with id of '%s' from the Controller's cache "
"and from the table '%s' on the '%s' node"
% (flow_id, table_id, nodeName))
time.sleep(rundelay)
result = ofswitch.delete_flow(flow_entry.get_flow_table_id(),
flow_entry.get_flow_id())
status = result.get_status()
if(status.eq(STATUS.OK)):
print ("<<< Flow successfully removed from the Controller")
else:
print ("\n")
print ("!!!Demo terminated, reason: %s" % status.brief().lower())
exit(0)
print ("\n")
print (">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
print (">>> Demo End")
print (">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")<|fim▁hole|> of_demo_8()<|fim▁end|>
|
if __name__ == "__main__":
|
<|file_name|>signal_linux_arm.go<|end_file_name|><|fim▁begin|>// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package runtime
import (
"runtime/internal/sys"
"unsafe"
)
type sigctxt struct {
info *siginfo
ctxt unsafe.Pointer
}
func (c *sigctxt) regs() *sigcontext { return &(*ucontext)(c.ctxt).uc_mcontext }
func (c *sigctxt) r0() uint32 { return c.regs().r0 }
func (c *sigctxt) r1() uint32 { return c.regs().r1 }
func (c *sigctxt) r2() uint32 { return c.regs().r2 }
func (c *sigctxt) r3() uint32 { return c.regs().r3 }<|fim▁hole|>func (c *sigctxt) r8() uint32 { return c.regs().r8 }
func (c *sigctxt) r9() uint32 { return c.regs().r9 }
func (c *sigctxt) r10() uint32 { return c.regs().r10 }
func (c *sigctxt) fp() uint32 { return c.regs().fp }
func (c *sigctxt) ip() uint32 { return c.regs().ip }
func (c *sigctxt) sp() uint32 { return c.regs().sp }
func (c *sigctxt) lr() uint32 { return c.regs().lr }
func (c *sigctxt) pc() uint32 { return c.regs().pc }
func (c *sigctxt) cpsr() uint32 { return c.regs().cpsr }
func (c *sigctxt) fault() uint32 { return c.regs().fault_address }
func (c *sigctxt) trap() uint32 { return c.regs().trap_no }
func (c *sigctxt) error() uint32 { return c.regs().error_code }
func (c *sigctxt) oldmask() uint32 { return c.regs().oldmask }
func (c *sigctxt) sigcode() uint32 { return uint32(c.info.si_code) }
func (c *sigctxt) sigaddr() uint32 { return c.info.si_addr }
func (c *sigctxt) set_pc(x uint32) { c.regs().pc = x }
func (c *sigctxt) set_sp(x uint32) { c.regs().sp = x }
func (c *sigctxt) set_lr(x uint32) { c.regs().lr = x }
func (c *sigctxt) set_r10(x uint32) { c.regs().r10 = x }
func (c *sigctxt) set_sigcode(x uint32) { c.info.si_code = int32(x) }
func (c *sigctxt) set_sigaddr(x uint32) {
*(*uintptr)(add(unsafe.Pointer(c.info), 2*sys.PtrSize)) = uintptr(x)
}<|fim▁end|>
|
func (c *sigctxt) r4() uint32 { return c.regs().r4 }
func (c *sigctxt) r5() uint32 { return c.regs().r5 }
func (c *sigctxt) r6() uint32 { return c.regs().r6 }
func (c *sigctxt) r7() uint32 { return c.regs().r7 }
|
<|file_name|>latex_codec.py<|end_file_name|><|fim▁begin|>"""latex.py
Character translation utilities for LaTeX-formatted text.
Usage:
- unicode(string,'latex')
- ustring.decode('latex')
are both available just by letting "import latex" find this file.
- unicode(string,'latex+latin1')
- ustring.decode('latex+latin1')
where latin1 can be replaced by any other known encoding, also
become available by calling latex.register().
We also make public a dictionary latex_equivalents,
mapping ord(unicode char) to LaTeX code.
D. Eppstein, October 2003.
"""
from __future__ import generators
import codecs
import re
from backports import Set
def register():
"""Enable encodings of the form 'latex+x' where x describes another encoding.
Unicode characters are translated to or from x when possible, otherwise
expanded to latex.
"""
codecs.register(_registry)
def getregentry():
"""Encodings module API."""
return _registry('latex')
def _registry(encoding):
if encoding == 'latex':
encoding = None
elif encoding.startswith('latex+'):
encoding = encoding[6:]
else:
return None
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
"""Convert unicode string to latex."""
output = []
for c in input:
if encoding:
try:
output.append(c.encode(encoding))
continue
except:
pass
if ord(c) in latex_equivalents:
output.append(latex_equivalents[ord(c)])
else:
output += ['{\\char', str(ord(c)), '}']
return ''.join(output), len(input)
def decode(self,input,errors='strict'):
"""Convert latex source string to unicode."""
if encoding:
input = unicode(input,encoding,errors)
# Note: we may get buffer objects here.
# It is not permussable to call join on buffer objects
# but we can make them joinable by calling unicode.
# This should always be safe since we are supposed
# to be producing unicode output anyway.
x = map(unicode,_unlatex(input))
return u''.join(x), len(input)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
def _tokenize(tex):
"""Convert latex source into sequence of single-token substrings."""
start = 0
try:
# skip quickly across boring stuff
pos = _stoppers.finditer(tex).next().span()[0]
except StopIteration:
yield tex
return
while 1:
if pos > start:
yield tex[start:pos]
if tex[start] == '\\' and not (tex[pos-1].isdigit() and tex[start+1].isalpha()):
while pos < len(tex) and tex[pos].isspace(): # skip blanks after csname
pos += 1
while pos < len(tex) and tex[pos] in _ignore:
pos += 1 # flush control characters
if pos >= len(tex):
return
start = pos
if tex[pos:pos+2] in {'$$':None, '/~':None}: # protect ~ in urls
pos += 2
elif tex[pos].isdigit():
while pos < len(tex) and tex[pos].isdigit():
pos += 1
elif tex[pos] == '-':
while pos < len(tex) and tex[pos] == '-':
pos += 1
elif tex[pos] != '\\' or pos == len(tex) - 1:
pos += 1
elif not tex[pos+1].isalpha():
pos += 2
else:
pos += 1
while pos < len(tex) and tex[pos].isalpha():
pos += 1
if tex[start:pos] == '\\char' or tex[start:pos] == '\\accent':
while pos < len(tex) and tex[pos].isdigit():
pos += 1
class _unlatex:
"""Convert tokenized tex into sequence of unicode strings. Helper for decode()."""
def __iter__(self):
"""Turn self into an iterator. It already is one, nothing to do."""
return self
def __init__(self,tex):
"""Create a new token converter from a string."""
self.tex = tuple(_tokenize(tex)) # turn tokens into indexable list
self.pos = 0 # index of first unprocessed token
self.lastoutput = 'x' # lastoutput must always be nonempty string
def __getitem__(self,n):
"""Return token at offset n from current pos."""
p = self.pos + n
t = self.tex
return p < len(t) and t[p] or None
def next(self):
"""Find and return another piece of converted output."""
if self.pos >= len(self.tex):
raise StopIteration
nextoutput = self.chunk()
if self.lastoutput[0] == '\\' and self.lastoutput[-1].isalpha() and nextoutput[0].isalpha():
nextoutput = ' ' + nextoutput # add extra space to terminate csname
self.lastoutput = nextoutput
return nextoutput
def chunk(self):
"""Grab another set of input tokens and convert them to an output string."""
for delta,c in self.candidates(0):
if c in _l2u:
self.pos += delta
return unichr(_l2u[c])
elif len(c) == 2 and c[1] == 'i' and (c[0],'\\i') in _l2u:
self.pos += delta # correct failure to undot i
return unichr(_l2u[(c[0],'\\i')])
elif len(c) == 1 and c[0].startswith('\\char') and c[0][5:].isdigit():
self.pos += delta
return unichr(int(c[0][5:]))
# nothing matches, just pass through token as-is
self.pos += 1
return self[-1]
def candidates(self,offset):
"""Generate pairs delta,c where c is a token or tuple of tokens from tex
(after deleting extraneous brackets starting at pos) and delta
is the length of the tokens prior to bracket deletion.
"""
t = self[offset]
if t in _blacklist:
return
elif t == '{':
for delta,c in self.candidates(offset+1):
if self[offset+delta+1] == '}':<|fim▁hole|> yield delta+1,c
elif t == '$' and self[offset+2] == '$':
yield 3, (t,self[offset+1],t)
else:
q = self[offset+1]
if q == '{' and self[offset+3] == '}':
yield 4, (t,self[offset+2])
elif q:
yield 2, (t,q)
yield 1, t
latex_equivalents = {
0x0009: ' ',
0x000a: '\n',
0x0023: '{\#}',
0x0026: '{\&}',
0x00a0: '{~}',
0x00a1: '{!`}',
0x00a2: '{\\not{c}}',
0x00a3: '{\\pounds}',
0x00a7: '{\\S}',
0x00a8: '{\\"{}}',
0x00a9: '{\\copyright}',
0x00af: '{\\={}}',
0x00ac: '{\\neg}',
0x00ad: '{\\-}',
0x00b0: '{\\mbox{$^\\circ$}}',
0x00b1: '{\\mbox{$\\pm$}}',
0x00b2: '{\\mbox{$^2$}}',
0x00b3: '{\\mbox{$^3$}}',
0x00b4: "{\\'{}}",
0x00b5: '{\\mbox{$\\mu$}}',
0x00b6: '{\\P}',
0x00b7: '{\\mbox{$\\cdot$}}',
0x00b8: '{\\c{}}',
0x00b9: '{\\mbox{$^1$}}',
0x00bf: '{?`}',
0x00c0: '{\\`A}',
0x00c1: "{\\'A}",
0x00c2: '{\\^A}',
0x00c3: '{\\~A}',
0x00c4: '{\\"A}',
0x00c5: '{\\AA}',
0x00c6: '{\\AE}',
0x00c7: '{\\c{C}}',
0x00c8: '{\\`E}',
0x00c9: "{\\'E}",
0x00ca: '{\\^E}',
0x00cb: '{\\"E}',
0x00cc: '{\\`I}',
0x00cd: "{\\'I}",
0x00ce: '{\\^I}',
0x00cf: '{\\"I}',
0x00d1: '{\\~N}',
0x00d2: '{\\`O}',
0x00d3: "{\\'O}",
0x00d4: '{\\^O}',
0x00d5: '{\\~O}',
0x00d6: '{\\"O}',
0x00d7: '{\\mbox{$\\times$}}',
0x00d8: '{\\O}',
0x00d9: '{\\`U}',
0x00da: "{\\'U}",
0x00db: '{\\^U}',
0x00dc: '{\\"U}',
0x00dd: "{\\'Y}",
0x00df: '{\\ss}',
0x00e0: '{\\`a}',
0x00e1: "{\\'a}",
0x00e2: '{\\^a}',
0x00e3: '{\\~a}',
0x00e4: '{\\"a}',
0x00e5: '{\\aa}',
0x00e6: '{\\ae}',
0x00e7: '{\\c{c}}',
0x00e8: '{\\`e}',
0x00e9: "{\\'e}",
0x00ea: '{\\^e}',
0x00eb: '{\\"e}',
0x00ec: '{\\`\\i}',
0x00ed: "{\\'\\i}",
0x00ee: '{\\^\\i}',
0x00ef: '{\\"\\i}',
0x00f1: '{\\~n}',
0x00f2: '{\\`o}',
0x00f3: "{\\'o}",
0x00f4: '{\\^o}',
0x00f5: '{\\~o}',
0x00f6: '{\\"o}',
0x00f7: '{\\mbox{$\\div$}}',
0x00f8: '{\\o}',
0x00f9: '{\\`u}',
0x00fa: "{\\'u}",
0x00fb: '{\\^u}',
0x00fc: '{\\"u}',
0x00fd: "{\\'y}",
0x00ff: '{\\"y}',
0x0100: '{\\=A}',
0x0101: '{\\=a}',
0x0102: '{\\u{A}}',
0x0103: '{\\u{a}}',
0x0104: '{\\c{A}}',
0x0105: '{\\c{a}}',
0x0106: "{\\'C}",
0x0107: "{\\'c}",
0x0108: "{\\^C}",
0x0109: "{\\^c}",
0x010a: "{\\.C}",
0x010b: "{\\.c}",
0x010c: "{\\v{C}}",
0x010d: "{\\v{c}}",
0x010e: "{\\v{D}}",
0x010f: "{\\v{d}}",
0x0112: '{\\=E}',
0x0113: '{\\=e}',
0x0114: '{\\u{E}}',
0x0115: '{\\u{e}}',
0x0116: '{\\.E}',
0x0117: '{\\.e}',
0x0118: '{\\c{E}}',
0x0119: '{\\c{e}}',
0x011a: "{\\v{E}}",
0x011b: "{\\v{e}}",
0x011c: '{\\^G}',
0x011d: '{\\^g}',
0x011e: '{\\u{G}}',
0x011f: '{\\u{g}}',
0x0120: '{\\.G}',
0x0121: '{\\.g}',
0x0122: '{\\c{G}}',
0x0123: '{\\c{g}}',
0x0124: '{\\^H}',
0x0125: '{\\^h}',
0x0128: '{\\~I}',
0x0129: '{\\~\\i}',
0x012a: '{\\=I}',
0x012b: '{\\=\\i}',
0x012c: '{\\u{I}}',
0x012d: '{\\u\\i}',
0x012e: '{\\c{I}}',
0x012f: '{\\c{i}}',
0x0130: '{\\.I}',
0x0131: '{\\i}',
0x0132: '{IJ}',
0x0133: '{ij}',
0x0134: '{\\^J}',
0x0135: '{\\^\\j}',
0x0136: '{\\c{K}}',
0x0137: '{\\c{k}}',
0x0139: "{\\'L}",
0x013a: "{\\'l}",
0x013b: "{\\c{L}}",
0x013c: "{\\c{l}}",
0x013d: "{\\v{L}}",
0x013e: "{\\v{l}}",
0x0141: '{\\L}',
0x0142: '{\\l}',
0x0143: "{\\'N}",
0x0144: "{\\'n}",
0x0145: "{\\c{N}}",
0x0146: "{\\c{n}}",
0x0147: "{\\v{N}}",
0x0148: "{\\v{n}}",
0x014c: '{\\=O}',
0x014d: '{\\=o}',
0x014e: '{\\u{O}}',
0x014f: '{\\u{o}}',
0x0150: '{\\H{O}}',
0x0151: '{\\H{o}}',
0x0152: '{\\OE}',
0x0153: '{\\oe}',
0x0154: "{\\'R}",
0x0155: "{\\'r}",
0x0156: "{\\c{R}}",
0x0157: "{\\c{r}}",
0x0158: "{\\v{R}}",
0x0159: "{\\v{r}}",
0x015a: "{\\'S}",
0x015b: "{\\'s}",
0x015c: "{\\^S}",
0x015d: "{\\^s}",
0x015e: "{\\c{S}}",
0x015f: "{\\c{s}}",
0x0160: "{\\v{S}}",
0x0161: "{\\v{s}}",
0x0162: "{\\c{T}}",
0x0163: "{\\c{t}}",
0x0164: "{\\v{T}}",
0x0165: "{\\v{t}}",
0x0168: "{\\~U}",
0x0169: "{\\~u}",
0x016a: "{\\=U}",
0x016b: "{\\=u}",
0x016c: "{\\u{U}}",
0x016d: "{\\u{u}}",
0x016e: "{\\r{U}}",
0x016f: "{\\r{u}}",
0x0170: "{\\H{U}}",
0x0171: "{\\H{u}}",
0x0172: "{\\c{U}}",
0x0173: "{\\c{u}}",
0x0174: "{\\^W}",
0x0175: "{\\^w}",
0x0176: "{\\^Y}",
0x0177: "{\\^y}",
0x0178: '{\\"Y}',
0x0179: "{\\'Z}",
0x017a: "{\\'Z}",
0x017b: "{\\.Z}",
0x017c: "{\\.Z}",
0x017d: "{\\v{Z}}",
0x017e: "{\\v{z}}",
0x01c4: "{D\\v{Z}}",
0x01c5: "{D\\v{z}}",
0x01c6: "{d\\v{z}}",
0x01c7: "{LJ}",
0x01c8: "{Lj}",
0x01c9: "{lj}",
0x01ca: "{NJ}",
0x01cb: "{Nj}",
0x01cc: "{nj}",
0x01cd: "{\\v{A}}",
0x01ce: "{\\v{a}}",
0x01cf: "{\\v{I}}",
0x01d0: "{\\v\\i}",
0x01d1: "{\\v{O}}",
0x01d2: "{\\v{o}}",
0x01d3: "{\\v{U}}",
0x01d4: "{\\v{u}}",
0x01e6: "{\\v{G}}",
0x01e7: "{\\v{g}}",
0x01e8: "{\\v{K}}",
0x01e9: "{\\v{k}}",
0x01ea: "{\\c{O}}",
0x01eb: "{\\c{o}}",
0x01f0: "{\\v\\j}",
0x01f1: "{DZ}",
0x01f2: "{Dz}",
0x01f3: "{dz}",
0x01f4: "{\\'G}",
0x01f5: "{\\'g}",
0x01fc: "{\\'\\AE}",
0x01fd: "{\\'\\ae}",
0x01fe: "{\\'\\O}",
0x01ff: "{\\'\\o}",
0x02c6: '{\\^{}}',
0x02dc: '{\\~{}}',
0x02d8: '{\\u{}}',
0x02d9: '{\\.{}}',
0x02da: "{\\r{}}",
0x02dd: '{\\H{}}',
0x02db: '{\\c{}}',
0x02c7: '{\\v{}}',
0x03c0: '{\\mbox{$\\pi$}}',
# consider adding more Greek here
0xfb01: '{fi}',
0xfb02: '{fl}',
0x2013: '{--}',
0x2014: '{---}',
0x2018: "{`}",
0x2019: "{'}",
0x201c: "{``}",
0x201d: "{''}",
0x2020: "{\\dag}",
0x2021: "{\\ddag}",
0x2122: "{\\mbox{$^\\mbox{TM}$}}",
0x2022: "{\\mbox{$\\bullet$}}",
0x2026: "{\\ldots}",
0x2202: "{\\mbox{$\\partial$}}",
0x220f: "{\\mbox{$\\prod$}}",
0x2211: "{\\mbox{$\\sum$}}",
0x221a: "{\\mbox{$\\surd$}}",
0x221e: "{\\mbox{$\\infty$}}",
0x222b: "{\\mbox{$\\int$}}",
0x2248: "{\\mbox{$\\approx$}}",
0x2260: "{\\mbox{$\\neq$}}",
0x2264: "{\\mbox{$\\leq$}}",
0x2265: "{\\mbox{$\\geq$}}",
}
for _i in range(0x0020):
if _i not in latex_equivalents:
latex_equivalents[_i] = ''
for _i in range(0x0020,0x007f):
if _i not in latex_equivalents:
latex_equivalents[_i] = chr(_i)
# Characters that should be ignored and not output in tokenization
_ignore = Set([chr(i) for i in range(32)+[127]]) - Set('\t\n\r')
# Regexp of chars not in blacklist, for quick start of tokenize
_stoppers = re.compile('[\x00-\x1f!$\\-?\\{~\\\\`\']')
_blacklist = Set(' \n\r')
_blacklist.add(None) # shortcut candidate generation at end of data
# Construction of inverse translation table
_l2u = {
'\ ':ord(' ') # unexpanding space makes no sense in non-TeX contexts
}
for _tex in latex_equivalents:
if _tex <= 0x0020 or (_tex <= 0x007f and len(latex_equivalents[_tex]) <= 1):
continue # boring entry
_toks = tuple(_tokenize(latex_equivalents[_tex]))
if _toks[0] == '{' and _toks[-1] == '}':
_toks = _toks[1:-1]
if _toks[0].isalpha():
continue # don't turn ligatures into single chars
if len(_toks) == 1 and (_toks[0] == "'" or _toks[0] == "`"):
continue # don't turn ascii quotes into curly quotes
if _toks[0] == '\\mbox' and _toks[1] == '{' and _toks[-1] == '}':
_toks = _toks[2:-1]
if len(_toks) == 4 and _toks[1] == '{' and _toks[3] == '}':
_toks = (_toks[0],_toks[2])
if len(_toks) == 1:
_toks = _toks[0]
_l2u[_toks] = _tex
# Shortcut candidate generation for certain useless candidates:
# a character is in _blacklist if it can not be at the start
# of any translation in _l2u. We use this to quickly skip through
# such characters before getting to more difficult-translate parts.
# _blacklist is defined several lines up from here because it must
# be defined in order to call _tokenize, however it is safe to
# delay filling it out until now.
for i in range(0x0020,0x007f):
_blacklist.add(chr(i))
_blacklist.remove('{')
_blacklist.remove('$')
for candidate in _l2u:
if isinstance(candidate,tuple):
if not candidate or not candidate[0]:
continue
firstchar = candidate[0][0]
else:
firstchar = candidate[0]
_blacklist.discard(firstchar)<|fim▁end|>
|
yield delta+2,c
elif t == '\\mbox':
for delta,c in self.candidates(offset+1):
|
<|file_name|>issue-2526-a.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>
extern mod issue_2526;
use issue_2526::*;
pub fn main() {}<|fim▁end|>
|
// xfail-fast
// aux-build:issue-2526.rs
|
<|file_name|>Parameter.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 Global Biodiversity Information Facility (GBIF)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.metrics.es;
import java.util.Objects;
/** Query parameter. */
public class Parameter {
private final String name;
private final String value;
/**
* @param name parameter name/key
* @param value parameter value
*/
public Parameter(String name, String value) {
this.name = name;
this.value = value;
}
/** @return parameter name/key */
public String getName() {
return name;
}
/** @return parameter value */
public String getValue() {
return value;
}
@Override
public boolean equals(Object o) {<|fim▁hole|> if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Parameter parameter = (Parameter) o;
return name.equals(parameter.name) && value.equals(parameter.value);
}
@Override
public int hashCode() {
return Objects.hash(name, value);
}
}<|fim▁end|>
| |
<|file_name|>decrementOperatorWithUnsupportedBooleanType.ts<|end_file_name|><|fim▁begin|>// -- operator on boolean type
var BOOLEAN: boolean;
function foo(): boolean { return true; }
class A {
public a: boolean;
static foo() { return true; }
}
module M {
export var n: boolean;
}
var objA = new A();
// boolean type var
var ResultIsNumber1 = --BOOLEAN;
var ResultIsNumber2 = BOOLEAN--;
// boolean type literal
var ResultIsNumber3 = --true;
var ResultIsNumber4 = --{ x: true, y: false };
var ResultIsNumber5 = --{ x: true, y: (n: boolean) => { return n; } };
var ResultIsNumber6 = true--;
var ResultIsNumber7 = { x: true, y: false }--;
var ResultIsNumber8 = { x: true, y: (n: boolean) => { return n; } }--;
// boolean type expressions
var ResultIsNumber9 = --objA.a;
var ResultIsNumber10 = --M.n;
var ResultIsNumber11 = --foo();
var ResultIsNumber12 = --A.foo();
var ResultIsNumber13 = foo()--;
var ResultIsNumber14 = A.foo()--;
var ResultIsNumber15 = objA.a--;
var ResultIsNumber16 = M.n--;
// miss assignment operators
--true;
--BOOLEAN;
--foo();
--objA.a;
--M.n;
--objA.a, M.n;
true--;
BOOLEAN--;
foo()--;<|fim▁hole|>objA.a--;
M.n--;
objA.a--, M.n--;<|fim▁end|>
| |
<|file_name|>schema.go<|end_file_name|><|fim▁begin|>package ignition
// This schema structure is based on github.com/coreos/ignition/config/v2_2/types/schema.go
// Due to issue with unmarshalling embedded anonymous nested structures,
// this file removes such structures.
// Changed types: Directory, File, Link.
type CaReference struct {
Source string `json:"source,omitempty" yaml:"source,omitempty"`
Verification Verification `json:"verification,omitempty" yaml:"verification,omitempty"`
}
type Config struct {
Ignition Ignition `json:"ignition" yaml:"ignition,omitempty"`
Networkd Networkd `json:"networkd,omitempty" yaml:"networkd,omitempty"`
Passwd Passwd `json:"passwd,omitempty" yaml:"passwd,omitempty"`
Storage Storage `json:"storage,omitempty" yaml:"storage,omitempty"`
Systemd Systemd `json:"systemd,omitempty" yaml:"systemd,omitempty"`
}
type ConfigReference struct {
Source string `json:"source,omitempty" yaml:"source,omitempty"`
Verification Verification `json:"verification,omitempty" yaml:"verification,omitempty"`
}
type Create struct {
Force bool `json:"force,omitempty" yaml:"force,omitempty"`
Options []CreateOption `json:"options,omitempty" yaml:"options,omitempty"`
}
type CreateOption string
type Device string
type Directory struct {
Filesystem string `json:"filesystem,omitempty" yaml:"filesystem,omitempty"`
Group *NodeGroup `json:"group,omitempty" yaml:"group,omitempty"`
Mode *int `json:"mode,omitempty" yaml:"mode,omitempty"`
Overwrite *bool `json:"overwrite,omitempty" yaml:"overwrite,omitempty"`
Path string `json:"path,omitempty" yaml:"path,omitempty"`
User *NodeUser `json:"user,omitempty" yaml:"user,omitempty"`
}
type Disk struct {
Device string `json:"device,omitempty" yaml:"device,omitempty"`
Partitions []Partition `json:"partitions,omitempty" yaml:"partitions,omitempty"`
WipeTable bool `json:"wipeTable,omitempty" yaml:"wipeTable,omitempty"`
}
type File struct {
Append bool `json:"append,omitempty" yaml:"append,omitempty"`
Contents FileContents `json:"contents,omitempty" yaml:"contents,omitempty"`
Filesystem string `json:"filesystem,omitempty" yaml:"filesystem,omitempty"`
Mode int `json:"mode,omitempty" yaml:"mode,omitempty"`
Group *NodeGroup `json:"group,omitempty" yaml:"group,omitempty"`
Overwrite *bool `json:"overwrite,omitempty" yaml:"overwrite,omitempty"`
Path string `json:"path,omitempty" yaml:"path,omitempty"`
User *NodeUser `json:"user,omitempty" yaml:"user,omitempty"`
}
type FileContents struct {
Compression string `json:"compression,omitempty" yaml:"compression,omitempty"`
Source string `json:"source,omitempty" yaml:"source,omitempty"`
Verification Verification `json:"verification,omitempty" yaml:"verification,omitempty"`
}
type Filesystem struct {
Mount *Mount `json:"mount,omitempty" yaml:"mount,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
Path *string `json:"path,omitempty" yaml:"path,omitempty"`
}
type Group string
type Ignition struct {
Config IgnitionConfig `json:"config,omitempty" yaml:"config,omitempty"`
Security Security `json:"security,omitempty" yaml:"security,omitempty"`
Timeouts Timeouts `json:"timeouts,omitempty" yaml:"timeouts,omitempty"`
Version string `json:"version,omitempty" yaml:"version,omitempty"`
}
type IgnitionConfig struct {
Append []ConfigReference `json:"append,omitempty" yaml:"append,omitempty"`
Replace *ConfigReference `json:"replace,omitempty" yaml:"replace,omitempty"`
}<|fim▁hole|> Hard bool `json:"hard,omitempty" yaml:"hard,omitempty"`
Overwrite *bool `json:"overwrite,omitempty" yaml:"overwrite,omitempty"`
Path string `json:"path,omitempty" yaml:"path,omitempty"`
Target string `json:"target,omitempty" yaml:"target,omitempty"`
User *NodeUser `json:"user,omitempty" yaml:"user,omitempty"`
}
type Mount struct {
Create *Create `json:"create,omitempty" yaml:"create,omitempty"`
Device string `json:"device,omitempty" yaml:"device,omitempty"`
Format string `json:"format,omitempty" yaml:"format,omitempty"`
Label *string `json:"label,omitempty" yaml:"label,omitempty"`
Options []MountOption `json:"options,omitempty" yaml:"options,omitempty"`
UUID *string `json:"uuid,omitempty" yaml:"uuid,omitempty"`
WipeFilesystem bool `json:"wipeFilesystem,omitempty" yaml:"wipeFilesystem,omitempty"`
}
type MountOption string
type Networkd struct {
Units []Networkdunit `json:"units,omitempty" yaml:"units,omitempty"`
}
type NetworkdDropin struct {
Contents string `json:"contents,omitempty" yaml:"contents,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
}
type Networkdunit struct {
Contents string `json:"contents,omitempty" yaml:"contents,omitempty"`
Dropins []NetworkdDropin `json:"dropins,omitempty" yaml:"dropins,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
}
type Node struct {
Filesystem string `json:"filesystem,omitempty" yaml:"filesystem,omitempty"`
Group *NodeGroup `json:"group,omitempty" yaml:"group,omitempty"`
Overwrite *bool `json:"overwrite,omitempty" yaml:"overwrite,omitempty"`
Path string `json:"path,omitempty" yaml:"path,omitempty"`
User *NodeUser `json:"user,omitempty" yaml:"user,omitempty"`
}
type NodeGroup struct {
ID *int `json:"id,omitempty" yaml:"id,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
}
type NodeUser struct {
ID *int `json:"id,omitempty" yaml:"id,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
}
type Partition struct {
GUID string `json:"guid,omitempty" yaml:"guid,omitempty"`
Label string `json:"label,omitempty" yaml:"label,omitempty"`
Number int `json:"number,omitempty" yaml:"number,omitempty"`
Size int `json:"size,omitempty" yaml:"size,omitempty"`
Start int `json:"start,omitempty" yaml:"start,omitempty"`
TypeGUID string `json:"typeGuid,omitempty" yaml:"typeGUID,omitempty"`
}
type Passwd struct {
Groups []PasswdGroup `json:"groups,omitempty" yaml:"groups,omitempty"`
Users []PasswdUser `json:"users,omitempty" yaml:"users,omitempty"`
}
type PasswdGroup struct {
Gid *int `json:"gid,omitempty" yaml:"gid,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
PasswordHash string `json:"passwordHash,omitempty" yaml:"passwordHash,omitempty"`
System bool `json:"system,omitempty" yaml:"system,omitempty"`
}
type PasswdUser struct {
Create *Usercreate `json:"create,omitempty" yaml:"create,omitempty"`
Gecos string `json:"gecos,omitempty" yaml:"gecos,omitempty"`
Groups []Group `json:"groups,omitempty" yaml:"groups,omitempty"`
HomeDir string `json:"homeDir,omitempty" yaml:"homeDir,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
NoCreateHome bool `json:"noCreateHome,omitempty" yaml:"noCreateHome,omitempty"`
NoLogInit bool `json:"noLogInit,omitempty" yaml:"noLogInit,omitempty"`
NoUserGroup bool `json:"noUserGroup,omitempty" yaml:"noUserGroup,omitempty"`
PasswordHash *string `json:"passwordHash,omitempty" yaml:"passwordHash,omitempty"`
PrimaryGroup string `json:"primaryGroup,omitempty" yaml:"primaryGroup,omitempty"`
SSHAuthorizedKeys []SSHAuthorizedKey `json:"sshAuthorizedKeys,omitempty" yaml:"sshAuthorizedKeys,omitempty"`
Shell string `json:"shell,omitempty" yaml:"shell,omitempty"`
System bool `json:"system,omitempty" yaml:"system,omitempty"`
UID *int `json:"uid,omitempty" yaml:"uid,omitempty"`
}
type Raid struct {
Devices []Device `json:"devices,omitempty" yaml:"devices,omitempty"`
Level string `json:"level,omitempty" yaml:"level,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
Options []RaidOption `json:"options,omitempty" yaml:"options,omitempty"`
Spares int `json:"spares,omitempty" yaml:"spares,omitempty"`
}
type RaidOption string
type SSHAuthorizedKey string
type Security struct {
TLS TLS `json:"tls,omitempty" yaml:"tls,omitempty"`
}
type Storage struct {
Directories []Directory `json:"directories,omitempty" yaml:"directories,omitempty"`
Disks []Disk `json:"disks,omitempty" yaml:"disks,omitempty"`
Files []File `json:"files,omitempty" yaml:"files,omitempty"`
Filesystems []Filesystem `json:"filesystems,omitempty" yaml:"filesystems,omitempty"`
Links []Link `json:"links,omitempty" yaml:"links,omitempty"`
Raid []Raid `json:"raid,omitempty" yaml:"raid,omitempty"`
}
type Systemd struct {
Units []Unit `json:"units,omitempty" yaml:"units,omitempty"`
}
type SystemdDropin struct {
Contents string `json:"contents,omitempty" yaml:"contents,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
}
type TLS struct {
CertificateAuthorities []CaReference `json:"certificateAuthorities,omitempty" yaml:"certificateAuthorities,omitempty"`
}
type Timeouts struct {
HTTPResponseHeaders *int `json:"httpResponseHeaders,omitempty" yaml:"httpResponseHeaders,omitempty"`
HTTPTotal *int `json:"httpTotal,omitempty" yaml:"httpTotal,omitempty"`
}
type Unit struct {
Contents string `json:"contents,omitempty" yaml:"contents,omitempty"`
Dropins []SystemdDropin `json:"dropins,omitempty" yaml:"dropins,omitempty"`
Enable bool `json:"enable,omitempty" yaml:"enable,omitempty"`
Enabled *bool `json:"enabled,omitempty" yaml:"enabled,omitempty"`
Mask bool `json:"mask,omitempty" yaml:"mask,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
}
type Usercreate struct {
Gecos string `json:"gecos,omitempty" yaml:"gecos,omitempty"`
Groups []UsercreateGroup `json:"groups,omitempty" yaml:"groups,omitempty"`
HomeDir string `json:"homeDir,omitempty" yaml:"homeDir,omitempty"`
NoCreateHome bool `json:"noCreateHome,omitempty" yaml:"noCreateHome,omitempty"`
NoLogInit bool `json:"noLogInit,omitempty" yaml:"noLogInit,omitempty"`
NoUserGroup bool `json:"noUserGroup,omitempty" yaml:"noUserGroup,omitempty"`
PrimaryGroup string `json:"primaryGroup,omitempty" yaml:"primaryGroup,omitempty"`
Shell string `json:"shell,omitempty" yaml:"shell,omitempty"`
System bool `json:"system,omitempty" yaml:"system,omitempty"`
UID *int `json:"uid,omitempty" yaml:"uid,omitempty"`
}
type UsercreateGroup string
type Verification struct {
Hash *string `json:"hash,omitempty" yaml:"hash,omitempty"`
}<|fim▁end|>
|
type Link struct {
Filesystem string `json:"filesystem,omitempty" yaml:"filesystem,omitempty"`
Group *NodeGroup `json:"group,omitempty" yaml:"group,omitempty"`
|
<|file_name|>base_noeud.py<|end_file_name|><|fim▁begin|># -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier définissant la classe BaseNoeud détaillée plus bas."""
class BaseNoeud:
"""Classe représentant la base d'un noeud.
Cette classe est héritée par tous les autres types de noeuds.
"""
importeur = None
def __init__(self):
"""Constructeur du noeud de base"""
self.nom = ""
self.suivant = None
def valider(self, personnage, dic_masques, commande, tester_fils=True):
"""Validation du noeud.
Cette méthode est à redéfinir dans chacune des classes-filles créée.
Chaque type de noeud a sa propre méthode de validation.
Dans tous les cas, une booléen doit être retourné :
- True si le noeud a pu être interprété ;
- False sinon.
Note : pour la plupart des noeuds, la validation est aussi fonction
des fils.
"""
raise NotImplementedError
def _get_fils(self):
"""Retourne les fils du noeud sous la forme d'une liste."""
return [self.suivant]
fils = property(_get_fils)
<|fim▁hole|> return ""<|fim▁end|>
|
def afficher(self, personnage=None):
"""Retourne un affichage du masque pour les joueurs."""
|
<|file_name|>reftypes.py<|end_file_name|><|fim▁begin|># ------------------------------------------------------------------------------
# Copyright (c) 2010-2013, EVEthing team
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
# ------------------------------------------------------------------------------
from .apitask import APITask
from thing.models import RefType
class RefTypes(APITask):
name = 'thing.ref_types'
def run(self, url, taskstate_id, apikey_id, zero):
if self.init(taskstate_id, apikey_id) is False:
return
# Fetch the API data
if self.fetch_api(url, {}, use_auth=False) is False or self.root is None:
return
# Build a refTypeID:row dictionary
bulk_data = {}
for row in self.root.findall('result/rowset/row'):
bulk_data[int(row.attrib['refTypeID'])] = row
# Bulk retrieve all of those stations that exist
rt_map = RefType.objects.in_bulk(bulk_data.keys())
new = []
for refTypeID, row in bulk_data.items():
reftype = rt_map.get(refTypeID)
# RefType does not exist, make a new one
if reftype is None:
new.append(RefType(
id=refTypeID,
name=row.attrib['refTypeName'],
))
# RefType exists and name has changed, update it
elif reftype.name != row.attrib['refTypeName']:
reftype.name = row.attrib['refTypeName']
reftype.save()
# Create any new stations<|fim▁hole|>
# ---------------------------------------------------------------------------<|fim▁end|>
|
if new:
RefType.objects.bulk_create(new)
return True
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import streamkinect2.version as meta
# Utility function to read the README file.
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = meta.__project__,
version = meta.__version__,
author = meta.__author__,
author_email = meta.__author_email__,
description = "A simple network streamer for kinect2 data.",
license = "BSD",
keywords = "kinect kinect2 zeroconf bonjour",
url = "https://github.com/rjw57/stramkinect2",
packages=find_packages(exclude='test'),
long_description=read('README.md'),
classifiers=[<|fim▁hole|> "Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
install_requires=[
'blinker',
'enum34',
'lz4',
'numpy',
'pillow',
'pyzmq',
'tornado',
'zeroconf',
],
setup_requires=[
'nose',
],
tests_require=[
'coverage'
],
extras_require={
'docs': [ 'sphinx', 'docutils', ],
},
)<|fim▁end|>
|
"Development Status :: 3 - Alpha",
|
<|file_name|>dependency-injection.js<|end_file_name|><|fim▁begin|>/*! MVW-Injection (0.2.5). (C) 2015 Xavier Boubert. MIT @license: en.wikipedia.org/wiki/MIT_License */
(function(root) {
'use strict';
var DependencyInjection = new (function DependencyInjection() {
var _this = this,
_interfaces = {};
function _formatFactoryFunction(factoryFunction) {
if (typeof factoryFunction == 'function') {
var funcString = factoryFunction
.toString()
// remove comments
.replace(/((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg, '');
var matches = funcString.match(/^function\s*[^\(]*\s*\(\s*([^\)]*)\)/m);
if (matches === null || matches.length < 2) {
factoryFunction = [factoryFunction];
}
else {
factoryFunction = matches[1]
.replace(/\s/g, '')
.split(',')
.filter(function(arg) {
return arg.trim().length > 0;
})
.concat(factoryFunction);
}
return factoryFunction;
}
else {
var factoryArrayCopy = [];
for (var i = 0; i < factoryFunction.length; i++) {
factoryArrayCopy.push(factoryFunction[i]);
}
factoryFunction = factoryArrayCopy;
}
return factoryFunction;
}
function Injector(instanceName) {
function _getInjections(dependencies, name, customDependencies, noError) {
var interfaces = _interfaces[name].interfacesSupported,
injections = [],
i,
j;
for (i = 0; i < dependencies.length; i++) {
var factory = null;
if (customDependencies && typeof customDependencies[dependencies[i]] != 'undefined') {
factory = customDependencies[dependencies[i]];
}
else {
for (j = 0; j < interfaces.length; j++) {
if (!_interfaces[interfaces[j]]) {
if (noError) {
return false;
}
throw new Error('DependencyInjection: "' + interfaces[j] + '" interface is not registered.');
}
factory = _interfaces[interfaces[j]].factories[dependencies[i]];
if (factory) {
factory.interfaceName = interfaces[j];
break;
}
}
}
if (factory) {
if (!factory.instantiated) {
var deps = _formatFactoryFunction(factory.result);
factory.result = deps.pop();
var factoryInjections = _getInjections(deps, factory.interfaceName);
factory.result = factory.result.apply(_this, factoryInjections);
factory.instantiated = true;
}
injections.push(factory.result);
}
else {
if (noError) {
return false;
}
throw new Error('DependencyInjection: "' + dependencies[i] + '" is not registered or accessible in ' + name + '.');
}
}
return injections;
}
this.get = function(factoryName, noError) {
var injections = _getInjections([factoryName], instanceName, null, noError);
if (injections.length) {
return injections[0];
}
return false;
};
this.invoke = function(thisArg, func, customDependencies) {
var dependencies = _formatFactoryFunction(func);
func = dependencies.pop();
if (customDependencies) {
var formatcustomDependencies = {},
interfaceName,
factory;
for (interfaceName in customDependencies) {
for (factory in customDependencies[interfaceName]) {
formatcustomDependencies[factory] = {
interfaceName: interfaceName,
instantiated: false,
result: customDependencies[interfaceName][factory]
};
}
}
customDependencies = formatcustomDependencies;
}
var injections = _getInjections(dependencies, instanceName, customDependencies);
return func.apply(thisArg, injections);
};
}
this.injector = {};
this.registerInterface = function(name, canInjectInterfaces) {
if (_this[name]) {
return _this;
}
_interfaces[name] = {
interfacesSupported: (canInjectInterfaces || []).concat(name),
factories: {}
};
_this.injector[name] = new Injector(name);<|fim▁hole|>
_this[name] = function DependencyInjectionFactory(factoryName, factoryFunction, replaceIfExists) {
if (!replaceIfExists && _interfaces[name].factories[factoryName]) {
return _this;
}
_interfaces[name].factories[factoryName] = {
instantiated: false,
result: factoryFunction
};
return _this;
};
return _this;
};
})();
if (typeof module != 'undefined' && typeof module.exports != 'undefined') {
module.exports = DependencyInjection;
}
else {
root.DependencyInjection = DependencyInjection;
}
})(this);<|fim▁end|>
| |
<|file_name|>gdcvault.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
HEADRequest,
sanitized_Request,
urlencode_postdata,
)
class GDCVaultIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?gdcvault\.com/play/(?P<id>\d+)/(?P<name>(\w|-)+)?'
_NETRC_MACHINE = 'gdcvault'
_TESTS = [
{
'url': 'http://www.gdcvault.com/play/1019721/Doki-Doki-Universe-Sweet-Simple',
'md5': '7ce8388f544c88b7ac11c7ab1b593704',
'info_dict': {
'id': '1019721',
'display_id': 'Doki-Doki-Universe-Sweet-Simple',
'ext': 'mp4',
'title': 'Doki-Doki Universe: Sweet, Simple and Genuine (GDC Next 10)'
}
},
{
'url': 'http://www.gdcvault.com/play/1015683/Embracing-the-Dark-Art-of',
'info_dict': {
'id': '1015683',
'display_id': 'Embracing-the-Dark-Art-of',
'ext': 'flv',
'title': 'Embracing the Dark Art of Mathematical Modeling in AI'
},
'params': {
'skip_download': True, # Requires rtmpdump
}
},
{
'url': 'http://www.gdcvault.com/play/1015301/Thexder-Meets-Windows-95-or',
'md5': 'a5eb77996ef82118afbbe8e48731b98e',
'info_dict': {
'id': '1015301',
'display_id': 'Thexder-Meets-Windows-95-or',
'ext': 'flv',
'title': 'Thexder Meets Windows 95, or Writing Great Games in the Windows 95 Environment',
},
'skip': 'Requires login',
},
{
'url': 'http://gdcvault.com/play/1020791/',
'only_matching': True,
},
{
# Hard-coded hostname
'url': 'http://gdcvault.com/play/1023460/Tenacious-Design-and-The-Interface',
'md5': 'a8efb6c31ed06ca8739294960b2dbabd',
'info_dict': {
'id': '1023460',
'ext': 'mp4',
'display_id': 'Tenacious-Design-and-The-Interface',
'title': 'Tenacious Design and The Interface of \'Destiny\'',
},
},
{
# Multiple audios
'url': 'http://www.gdcvault.com/play/1014631/Classic-Game-Postmortem-PAC',
'info_dict': {
'id': '1014631',
'ext': 'flv',<|fim▁hole|> 'params': {
'skip_download': True, # Requires rtmpdump
'format': 'jp', # The japanese audio
}
},
{
# gdc-player.html
'url': 'http://www.gdcvault.com/play/1435/An-American-engine-in-Tokyo',
'info_dict': {
'id': '1435',
'display_id': 'An-American-engine-in-Tokyo',
'ext': 'flv',
'title': 'An American Engine in Tokyo:/nThe collaboration of Epic Games and Square Enix/nFor THE LAST REMINANT',
},
'params': {
'skip_download': True, # Requires rtmpdump
},
},
]
def _login(self, webpage_url, display_id):
username, password = self._get_login_info()
if username is None or password is None:
self.report_warning('It looks like ' + webpage_url + ' requires a login. Try specifying a username and password and try again.')
return None
mobj = re.match(r'(?P<root_url>https?://.*?/).*', webpage_url)
login_url = mobj.group('root_url') + 'api/login.php'
logout_url = mobj.group('root_url') + 'logout'
login_form = {
'email': username,
'password': password,
}
request = sanitized_Request(login_url, urlencode_postdata(login_form))
request.add_header('Content-Type', 'application/x-www-form-urlencoded')
self._download_webpage(request, display_id, 'Logging in')
start_page = self._download_webpage(webpage_url, display_id, 'Getting authenticated video page')
self._download_webpage(logout_url, display_id, 'Logging out')
return start_page
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('name') or video_id
webpage_url = 'http://www.gdcvault.com/play/' + video_id
start_page = self._download_webpage(webpage_url, display_id)
direct_url = self._search_regex(
r's1\.addVariable\("file",\s*encodeURIComponent\("(/[^"]+)"\)\);',
start_page, 'url', default=None)
if direct_url:
title = self._html_search_regex(
r'<td><strong>Session Name</strong></td>\s*<td>(.*?)</td>',
start_page, 'title')
video_url = 'http://www.gdcvault.com' + direct_url
# resolve the url so that we can detect the correct extension
head = self._request_webpage(HEADRequest(video_url), video_id)
video_url = head.geturl()
return {
'id': video_id,
'display_id': display_id,
'url': video_url,
'title': title,
}
PLAYER_REGEX = r'<iframe src="(?P<xml_root>.+?)/(?:gdc-)?player.*?\.html.*?".*?</iframe>'
xml_root = self._html_search_regex(
PLAYER_REGEX, start_page, 'xml root', default=None)
if xml_root is None:
# Probably need to authenticate
login_res = self._login(webpage_url, display_id)
if login_res is None:
self.report_warning('Could not login.')
else:
start_page = login_res
# Grab the url from the authenticated page
xml_root = self._html_search_regex(
PLAYER_REGEX, start_page, 'xml root')
xml_name = self._html_search_regex(
r'<iframe src=".*?\?xml=(.+?\.xml).*?".*?</iframe>',
start_page, 'xml filename', default=None)
if xml_name is None:
# Fallback to the older format
xml_name = self._html_search_regex(
r'<iframe src=".*?\?xmlURL=xml/(?P<xml_file>.+?\.xml).*?".*?</iframe>',
start_page, 'xml filename')
return {
'_type': 'url_transparent',
'id': video_id,
'display_id': display_id,
'url': '%s/xml/%s' % (xml_root, xml_name),
'ie_key': 'DigitallySpeaking',
}<|fim▁end|>
|
'title': 'How to Create a Good Game - From My Experience of Designing Pac-Man',
},
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# This program is free software; you can redistribute it and/or<|fim▁hole|>#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
Module for smallrnaseq configuration file. Used with command line app.
Created Jan 2017
Copyright (C) Damien Farrell
"""
from __future__ import absolute_import, print_function
import sys, os, string, time
import types, re, subprocess, glob, shutil
import pandas as pd
try:
import configparser
except:
import ConfigParser as configparser
path = os.path.dirname(os.path.abspath(__file__))
datadir = os.path.join(path, 'data')
from . import aligners
baseoptions = {'base': [('filenames',''),('path',''),('overwrite',0),
('adapter',''),
('index_path','indexes'),
('libraries',''),
('ref_fasta',''),('features',''),
('output','results'),('add_labels',0),
('aligner','bowtie'),
('mirna',0),('species','hsa'),('pad5',3),('pad3',5),
('verbose', 1),
('cpus',1)],
'aligner': [('default_params','-v 1 --best'),
('mirna_params',aligners.BOWTIE_MIRBASE_PARAMS)],
'novel': [('score_cutoff',.7), ('read_cutoff',100),
('strict',0)],
'de': [('count_file',''),('sample_labels',''),('sep',','),
('sample_col',''),('factors_col',''),
('conditions',''),('logfc_cutoff',1.5),
('de_plot','point')]
}
def write_default_config(conffile='default.conf', defaults={}):
"""Write a default config file"""
if not os.path.exists(conffile):
cp = create_config_parser_from_dict(defaults, ['base','novel','aligner','de'])
cp.write(open(conffile,'w'))
print ('wrote config file %s' %conffile)
return conffile
def create_config_parser_from_dict(data, sections, **kwargs):
"""Helper method to create a ConfigParser from a dict and/or keywords"""
cp = configparser.ConfigParser()
for s in sections:
cp.add_section(s)
if not data.has_key(s):
continue
for i in data[s]:
name,val = i
cp.set(s, name, str(val))
#use kwargs to create specific settings in the appropriate section
for s in cp.sections():
opts = cp.options(s)
for k in kwargs:
if k in opts:
cp.set(s, k, kwargs[k])
return cp
def parse_config(conffile=None):
"""Parse a configparser file"""
f = open(conffile,'r')
cp = configparser.ConfigParser()
try:
cp.read(conffile)
except Exception as e:
print ('failed to read config file! check format')
print ('Error returned:', e)
return
f.close()
return cp
def get_options(cp):
"""Makes sure boolean opts are parsed"""
from collections import OrderedDict
options = OrderedDict()
#options = cp._sections['base']
for section in cp.sections():
options.update( (cp._sections[section]) )
for o in options:
for section in cp.sections():
try:
options[o] = cp.getboolean(section, o)
except:
pass
try:
options[o] = cp.getint(section, o)
except:
pass
return options
def print_options(options):
"""Print option key/value pairs"""
for key in options:
print (key, ':', options[key])
print ()
def check_options(opts):
"""Check for missing default options in dict. Meant to handle
incomplete config files"""
sections = baseoptions.keys()
for s in sections:
defaults = dict(baseoptions[s])
for i in defaults:
if i not in opts:
opts[i] = defaults[i]
return opts<|fim▁end|>
|
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.