prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>fun1.rs<|end_file_name|><|fim▁begin|>fn sqr(x: f64) -> f64 {
x * x
}
//absolute value
fn abs(x: f64) -> f64 {
if x > 0.0 {
x<|fim▁hole|>}
//ensure a number always falls in the given range
fn clamp(x: f64, x1: f64, x2: f64) -> f64 {
if x < x1 {
x1
} else if x > x2 {
x2
} else {
x
}
}
fn main() {
let res = sqr(2.0);
println!("square is {}", res);
let num = -1.0;
let abs1 = abs(num);
println!("absolute value of {} is {}", num, abs1);
let min = 5.0;
let max = 18.0;
let clamp_min = 1.0;
let clamp_max = 99.0;
let dont_clamp = 7.0;
println!("clamping {}: {}", clamp_min, clamp(clamp_min, min, max));
println!("clamping {}: {}", clamp_max, clamp(clamp_max, min, max));
println!("clamping {}: {}", dont_clamp, clamp(dont_clamp, min, max));
}<|fim▁end|>
|
} else {
-x
}
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2017 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
extern crate nom;
use nom::character::complete::{digit1, multispace0};
use std::str;
use std;
use std::str::FromStr;
// We transform an integer string into a i64, ignoring surrounding whitespaces
// We look for a digit suite, and try to convert it.
// If either str::from_utf8 or FromStr::from_str fail,
// we fallback to the parens parser defined above
named!(getu16<u16>,
map_res!(
map_res!(
delimited!(multispace0, digit1, multispace0),
str::from_utf8
),
FromStr::from_str
)
);
named!(parse_u16<u16>,
map_res!(map_res!(digit1, str::from_utf8), u16::from_str));
// PORT 192,168,0,13,234,10
named!(pub ftp_active_port<u16>,
do_parse!(
tag!("PORT") >>
delimited!(multispace0, digit1, multispace0) >> tag!(",") >> digit1 >> tag!(",") >>
digit1 >> tag!(",") >> digit1 >> tag!(",") >>
part1: verify!(parse_u16, |&v| v <= std::u8::MAX as u16) >>
tag!(",") >>
part2: verify!(parse_u16, |&v| v <= std::u8::MAX as u16) >>
(
part1 * 256 + part2
)
)
);
// 227 Entering Passive Mode (212,27,32,66,221,243).
named!(pub ftp_pasv_response<u16>,
do_parse!(
tag!("227") >>
take_until!("(") >>
tag!("(") >>
digit1 >> tag!(",") >> digit1 >> tag!(",") >>
digit1 >> tag!(",") >> digit1 >> tag!(",") >>
part1: verify!(getu16, |&v| v <= std::u8::MAX as u16) >>
tag!(",") >>
part2: verify!(getu16, |&v| v <= std::u8::MAX as u16) >><|fim▁hole|> part1 * 256 + part2
)
)
);
#[no_mangle]
pub extern "C" fn rs_ftp_active_port(input: *const u8, len: u32) -> u16 {
let buf = build_slice!(input, len as usize);
match ftp_active_port(buf) {
Ok((_, dport)) => {
return dport;
},
Err(nom::Err::Incomplete(_)) => {
SCLogDebug!("port incomplete: '{:?}'", buf);
},
Err(_) => {
SCLogDebug!("port error on '{:?}'", buf);
},
}
return 0;
}
#[no_mangle]
pub extern "C" fn rs_ftp_pasv_response(input: *const u8, len: u32) -> u16 {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
match ftp_pasv_response(buf) {
Ok((_, dport)) => {
return dport;
},
Err(nom::Err::Incomplete(_)) => {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
SCLogDebug!("pasv incomplete: '{:?}'", String::from_utf8_lossy(buf));
},
Err(_) => {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
SCLogDebug!("pasv error on '{:?}'", String::from_utf8_lossy(buf));
},
}
return 0;
}
// 229 Entering Extended Passive Mode (|||48758|).
named!(pub ftp_epsv_response<u16>,
do_parse!(
tag!("229") >>
take_until!("|||") >>
tag!("|||") >>
port: getu16 >>
tag!("|)") >> opt!(complete!(tag!("."))) >>
(
port
)
)
);
// EPRT |2|2a01:e34:ee97:b130:8c3e:45ea:5ac6:e301|41813|
named!(pub ftp_active_eprt<u16>,
do_parse!(
tag!("EPRT") >>
take_until_and_consume!("|") >>
take_until_and_consume!("|") >>
take_until_and_consume!("|") >>
port: getu16 >>
tag!("|") >>
(
port
)
)
);
#[no_mangle]
pub extern "C" fn rs_ftp_active_eprt(input: *const u8, len: u32) -> u16 {
let buf = build_slice!(input, len as usize);
match ftp_active_eprt(buf) {
Ok((_, dport)) => {
return dport;
},
Err(nom::Err::Incomplete(_)) => {
SCLogDebug!("eprt incomplete: '{:?}'", String::from_utf8_lossy(buf));
},
Err(_) => {
SCLogDebug!("epsv incomplete: '{:?}'", String::from_utf8_lossy(buf));
},
}
return 0;
}
#[no_mangle]
pub extern "C" fn rs_ftp_epsv_response(input: *const u8, len: u32) -> u16 {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
match ftp_epsv_response(buf) {
Ok((_, dport)) => {
return dport;
},
Err(nom::Err::Incomplete(_)) => {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
SCLogDebug!("epsv incomplete: '{:?}'", String::from_utf8_lossy(buf));
},
Err(_) => {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
SCLogDebug!("epsv incomplete: '{:?}'", String::from_utf8_lossy(buf));
},
}
return 0;
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_pasv_response_valid() {
let port = ftp_pasv_response("227 Entering Passive Mode (212,27,32,66,221,243).".as_bytes());
assert_eq!(port, Ok((&b""[..], 56819)));
let port_notdot = ftp_pasv_response("227 Entering Passive Mode (212,27,32,66,221,243)".as_bytes());
assert_eq!(port_notdot, Ok((&b""[..], 56819)));
let port_epsv_dot = ftp_epsv_response("229 Entering Extended Passive Mode (|||48758|).".as_bytes());
assert_eq!(port_epsv_dot, Ok((&b""[..], 48758)));
let port_epsv_nodot = ftp_epsv_response("229 Entering Extended Passive Mode (|||48758|)".as_bytes());
assert_eq!(port_epsv_nodot, Ok((&b""[..], 48758)));
}
#[test]
fn test_active_eprt_valid() {
let port = ftp_active_eprt("EPRT |2|2a01:e34:ee97:b130:8c3e:45ea:5ac6:e301|41813|".as_bytes());
assert_eq!(port, Ok((&b""[..], 41813)));
}
#[test]
fn test_active_port_valid() {
let port = ftp_active_port("PORT 192,168,0,13,234,10".as_bytes());
assert_eq!(port, Ok((&b""[..], 59914)));
}
// A port that is too large for a u16.
#[test]
fn test_pasv_response_too_large() {
let port = ftp_pasv_response("227 Entering Passive Mode (212,27,32,66,257,243).".as_bytes());
assert!(port.is_err());
let port = ftp_pasv_response("227 Entering Passive Mode (212,27,32,66,255,65535).".as_bytes());
assert!(port.is_err());
}
#[test]
fn test_active_eprt_too_large() {
let port = ftp_active_eprt("EPRT |2|2a01:e34:ee97:b130:8c3e:45ea:5ac6:e301|81813|".as_bytes());
assert!(port.is_err());
}
#[test]
fn test_active_port_too_large() {
let port = ftp_active_port("PORT 212,27,32,66,257,243".as_bytes());
assert!(port.is_err());
let port = ftp_active_port("PORT 212,27,32,66,255,65535".as_bytes());
assert!(port.is_err());
}
}<|fim▁end|>
|
// may also be completed by a final point
tag!(")") >> opt!(complete!(tag!("."))) >>
(
|
<|file_name|>itemList_spec.js<|end_file_name|><|fim▁begin|>'use strict';
describe('itemListService', function() {
beforeEach(module('superdesk.mocks'));
beforeEach(module('superdesk.templates-cache'));
beforeEach(module('superdesk.itemList'));
beforeEach(module(function($provide) {
$provide.service('api', function($q) {
return function ApiService(endpoint, endpointParam) {
return {
query: function(params) {
params._endpoint = endpoint;
params._endpointParam = endpointParam;
return $q.when(params);
}
};
};
});
}));
it('can query with default values', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch()
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams).toEqual({
_endpoint: 'search',
_endpointParam: undefined,
source: {
query: {
filtered: {}
},
size: 25,
from: 0,
sort: [{_updated: 'desc'}]
}
});
}));
it('can query with endpoint', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
endpoint: 'archive',
endpointParam: 'param'
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams._endpoint).toBe('archive');
expect(queryParams._endpointParam).toBe('param');
}));
it('can query with page', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
pageSize: 15,
page: 3
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.size).toBe(15);
expect(queryParams.source.from).toBe(30);
}));
it('can query with sort', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
sortField: '_id',
sortDirection: 'asc'
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.sort).toEqual([{_id: 'asc'}]);
}));
it('can query with repos', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
repos: ['archive', 'ingest']
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.repo).toBe('archive,ingest');
}));
it('can query with types', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
types: ['text', 'picture', 'composite']
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.query.filtered.filter.and[0].terms.type).toEqual(
['text', 'picture', 'composite']
);
}));
it('can query with states', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
states: ['spiked', 'published']
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.query.filtered.filter.and[0].or).toEqual([
{term: {state: 'spiked'}},
{term: {state: 'published'}}
]);
}));
it('can query with notStates', inject(function($rootScope, itemListService, api) {
var queryParams = null;<|fim▁hole|> itemListService.fetch({
notStates: ['spiked', 'published']
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.query.filtered.filter.and).toEqual([
{not: {term: {state: 'spiked'}}},
{not: {term: {state: 'published'}}}
]);
}));
it('can query with dates', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
creationDateBefore: 1,
creationDateAfter: 2,
modificationDateBefore: 3,
modificationDateAfter: 4
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.query.filtered.filter.and).toEqual([
{range: {_created: {lte: 1, gte: 2}}},
{range: {_updated: {lte: 3, gte: 4}}}
]);
}));
it('can query with provider, source and urgency', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
provider: 'reuters',
source: 'reuters_1',
urgency: 5
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.query.filtered.filter.and).toEqual([
{term: {provider: 'reuters'}},
{term: {source: 'reuters_1'}},
{term: {urgency: 5}}
]);
}));
it('can query with headline, subject, keyword, uniqueName and body search',
inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
headline: 'h',
subject: 's',
keyword: 'k',
uniqueName: 'u',
body: 'b'
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.query.filtered.query).toEqual({
query_string: {
query: 'headline:(*h*) subject.name:(*s*) slugline:(*k*) unique_name:(*u*) body_html:(*b*)',
lenient: false,
default_operator: 'AND'
}
});
}));
it('can query with general search', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
search: 's'
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.query.filtered.query).toEqual({
query_string: {
query: 'headline:(*s*) subject.name:(*s*) slugline:(*s*) unique_name:(*s*) body_html:(*s*)',
lenient: false,
default_operator: 'OR'
}
});
}));
it('can query with saved search', inject(function($rootScope, itemListService, api, $q) {
var params;
api.get = angular.noop;
spyOn(api, 'get').and.returnValue($q.when({filter: {query: {type: '["text"]'}}}));
itemListService.fetch({
savedSearch: {_links: {self: {href: 'url'}}}
}).then(function(_params) {
params = _params;
});
$rootScope.$digest();
expect(params.source.post_filter.and).toContain({
terms: {type: ['text']}
});
}));
it('related items query without hypen', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
keyword: 'kilo',
related: true
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.query.filtered.query).toEqual({
prefix: {
'slugline.phrase': 'kilo'
}
});
}));
it('related items query with hypen', inject(function($rootScope, itemListService, api) {
var queryParams = null;
itemListService.fetch({
keyword: 'kilo-gram',
related: true
})
.then(function(params) {
queryParams = params;
});
$rootScope.$digest();
expect(queryParams.source.query.filtered.query).toEqual({
prefix: {
'slugline.phrase': 'kilo gram'
}
});
}));
});<|fim▁end|>
| |
<|file_name|>scout_requests.py<|end_file_name|><|fim▁begin|>"""Code for performing requests"""
import json
import logging
import urllib.request
import zlib
from urllib.error import HTTPError
import requests
from defusedxml import ElementTree
from scout.constants import CHROMOSOMES, HPO_URL, HPOTERMS_URL
from scout.utils.ensembl_rest_clients import EnsemblBiomartClient
LOG = logging.getLogger(__name__)
TIMEOUT = 20
def post_request_json(url, data, headers=None):
"""Send json data via POST request and return response
Args:
url(str): url to send request to
data(dict): data to be sent
headers(dict): request headers
Returns:
json_response(dict)
"""
resp = None
json_response = {}
try:
LOG.debug(f"Sending POST request with json data to {url}")
if headers:
resp = requests.post(url, headers=headers, json=data)
else:
resp = requests.post(url, json=data)
json_response["content"] = resp.json()
except Exception as ex:
return {"message": f"An error occurred while sending a POST request to url {url} -> {ex}"}
json_response["status_code"] = resp.status_code
return json_response
def get_request_json(url, headers=None):
"""Send GET request and return response's json data
Args:
url(str): url to send request to
headers(dict): eventual request HEADERS to use in request
Returns:
json_response(dict), example {"status_code":200, "content":{original json content}}
"""
resp = None
json_response = {}
try:
LOG.debug(f"Sending GET request to {url}")
if headers:
resp = requests.get(url, timeout=TIMEOUT, headers=headers)
else:
resp = requests.get(url, timeout=TIMEOUT)
json_response["content"] = resp.json()
except Exception as ex:
return {"message": f"An error occurred while sending a GET request to url {url} -> {ex}"}
json_response["status_code"] = resp.status_code
return json_response
def delete_request_json(url, headers=None, data=None):
"""Send a DELETE request to a remote API and return its response
Args:
url(str): url to send request to
headers(dict): eventual request HEADERS to use in request
data(dict): eventual request data to ba passed as a json object
Returns:
json_response(dict)
"""
resp = None
json_response = {}
try:
LOG.debug(f"Sending DELETE request to {url}")
if headers and data:
resp = requests.delete(url, headers=headers, json=data)
elif headers:
resp = requests.delete(url, headers=headers)
else:
resp = requests.delete(url)
json_response["content"] = resp.json()
except Exception as ex:
return {"message": f"An error occurred while sending a DELETE request to url {url} -> {ex}"}
json_response["status_code"] = resp.status_code
return json_response
def get_request(url):
"""Return a requests response from url
Args:
url(str)
Returns:
decoded_data(str): Decoded response
"""
try:
LOG.info("Requesting %s", url)
response = requests.get(url, timeout=TIMEOUT)
if response.status_code != 200:
response.raise_for_status()
LOG.info("Encoded to %s", response.encoding)
except requests.exceptions.HTTPError as err:
LOG.warning("Something went wrong, perhaps the api key is not valid?")
raise err
except requests.exceptions.MissingSchema as err:
LOG.warning("Something went wrong, perhaps url is invalid?")
raise err
except requests.exceptions.Timeout as err:
LOG.error("socket timed out - URL %s", url)
raise err
return response
def fetch_resource(url, json=False):
"""Fetch a resource and return the resulting lines in a list or a json object
Send file_name to get more clean log messages
Args:
url(str)
json(bool): if result should be in json
Returns:
data
"""
data = None
if url.startswith("ftp"):
# requests do not handle ftp
response = urllib.request.urlopen(url, timeout=TIMEOUT)
if isinstance(response, Exception):
raise response
data = response.read().decode("utf-8")
return data.split("\n")
response = get_request(url)
if json:
LOG.info("Return in json")
data = response.json()
else:
content = response.text
if response.url.endswith(".gz"):
LOG.info("gzipped!")
encoded_content = b"".join(chunk for chunk in response.iter_content(chunk_size=128))
content = zlib.decompress(encoded_content, 16 + zlib.MAX_WBITS).decode("utf-8")
data = content.split("\n")
return data
def fetch_hpo_terms():
"""Fetch the latest version of the hpo terms in .obo format
Returns:
res(list(str)): A list with the lines
"""
url = HPOTERMS_URL
return fetch_resource(url)
def fetch_genes_to_hpo_to_disease():
"""Fetch the latest version of the map from genes to phenotypes
Returns:
res(list(str)): A list with the lines formatted this way:
#Format: entrez-gene-id<tab>entrez-gene-symbol<tab>HPO-Term-Name<tab>\
HPO-Term-ID<tab>Frequency-Raw<tab>Frequency-HPO<tab>
Additional Info from G-D source<tab>G-D source<tab>disease-ID for link
72 ACTG2 HP:0002027 Abdominal pain - mim2gene OMIM:155310
72 ACTG2 HP:0000368 Low-set, posteriorly rotated ears HP:0040283 orphadata
ORPHA:2604
"""
url = HPO_URL.format("genes_to_phenotype.txt")
return fetch_resource(url)
def fetch_hpo_to_genes_to_disease():
"""Fetch the latest version of the map from phenotypes to genes
Returns:
res(list(str)): A list with the lines formatted this way:
#Format: HPO-id<tab>HPO label<tab>entrez-gene-id<tab>entrez-gene-symbol\
<tab>Additional Info from G-D source<tab>G-D source
<tab>disease-ID for link
HP:0000002 Abnormality of body height 3954 LETM1 - mim2gene OMIM:194190
HP:0000002 Abnormality of body height 197131 UBR1 - mim2gene OMIM:243800
HP:0000002 Abnormality of body height 79633 FAT4 orphadata ORPHA:314679
"""
url = HPO_URL.format("phenotype_to_genes.txt")
return fetch_resource(url)
def fetch_hpo_files(genes_to_phenotype=False, phenotype_to_genes=False, hpo_terms=False):
"""
Fetch the necessary HPO files from http://compbio.charite.de
Args:
genes_to_phenotype(bool): if file genes_to_phenotype.txt is required
phenotype_to_genes(bool): if file phenotype_to_genes.txt is required
hpo_terms(bool):if file hp.obo is required
Returns:
hpo_files(dict): A dictionary with the necessary files
"""
LOG.info("Fetching HPO information from http://compbio.charite.de")
hpo_files = {}
if genes_to_phenotype is True:
hpo_files["genes_to_phenotype"] = fetch_genes_to_hpo_to_disease()
if phenotype_to_genes is True:
hpo_files["phenotype_to_genes"] = fetch_hpo_to_genes_to_disease()
if hpo_terms is True:
hpo_files["hpo_terms"] = fetch_hpo_terms()
return hpo_files
def fetch_mim_files(api_key, mim2genes=False, mimtitles=False, morbidmap=False, genemap2=False):
"""Fetch the necessary mim files using a api key
Args:
api_key(str): A api key necessary to fetch mim data
Returns:
mim_files(dict): A dictionary with the neccesary files
"""
LOG.info("Fetching OMIM files from https://omim.org/")
mim2genes_url = "https://omim.org/static/omim/data/mim2gene.txt"
mimtitles_url = "https://data.omim.org/downloads/{0}/mimTitles.txt".format(api_key)
morbidmap_url = "https://data.omim.org/downloads/{0}/morbidmap.txt".format(api_key)
genemap2_url = "https://data.omim.org/downloads/{0}/genemap2.txt".format(api_key)
mim_files = {}
mim_urls = {}
if mim2genes is True:
mim_urls["mim2genes"] = mim2genes_url
if mimtitles is True:
mim_urls["mimtitles"] = mimtitles_url
if morbidmap is True:
mim_urls["morbidmap"] = morbidmap_url
if genemap2 is True:
mim_urls["genemap2"] = genemap2_url
for file_name in mim_urls:
url = mim_urls[file_name]
mim_files[file_name] = fetch_resource(url)
return mim_files
def fetch_ensembl_biomart(attributes, filters, build=None):
"""Fetch data from ensembl biomart
Args:
attributes(list): List of selected attributes
filters(dict): Select what filters to use
build(str): '37' or '38'
Returns:
client(EnsemblBiomartClient)
"""
build = build or "37"
client = EnsemblBiomartClient(build=build, filters=filters, attributes=attributes)
LOG.info("Selecting attributes: %s", ", ".join(attributes))
LOG.info("Use filter: %s", filters)
return client
def fetch_ensembl_genes(build=None, chromosomes=None):
"""Fetch the ensembl genes
Args:
build(str): ['37', '38']
chromosomes(iterable(str))
Returns:
result(iterable): Ensembl formated gene lines
"""
chromosomes = chromosomes or CHROMOSOMES
LOG.info("Fetching ensembl genes")
attributes = [
"chromosome_name",
"start_position",
"end_position",
"ensembl_gene_id",
"hgnc_symbol",
"hgnc_id",
]
filters = {"chromosome_name": chromosomes}
return fetch_ensembl_biomart(attributes, filters, build)
def fetch_ensembl_transcripts(build=None, chromosomes=None):
"""Fetch the ensembl genes
Args:
build(str): ['37', '38']
chromosomes(iterable(str))
Returns:
result(iterable): Ensembl formated transcript lines
"""
chromosomes = chromosomes or CHROMOSOMES
LOG.info("Fetching ensembl transcripts")
attributes = [
"chromosome_name",
"ensembl_gene_id",
"ensembl_transcript_id",
"transcript_start",
"transcript_end",
"refseq_mrna",
"refseq_mrna_predicted",
"refseq_ncrna",
]
filters = {"chromosome_name": chromosomes}
return fetch_ensembl_biomart(attributes, filters, build)
def fetch_ensembl_exons(build=None, chromosomes=None):
"""Fetch the ensembl genes
Args:
build(str): ['37', '38']
chromosomes(iterable(str))
"""
chromosomes = chromosomes or CHROMOSOMES<|fim▁hole|> attributes = [
"chromosome_name",
"ensembl_gene_id",
"ensembl_transcript_id",
"ensembl_exon_id",
"exon_chrom_start",
"exon_chrom_end",
"5_utr_start",
"5_utr_end",
"3_utr_start",
"3_utr_end",
"strand",
"rank",
]
filters = {"chromosome_name": chromosomes}
return fetch_ensembl_biomart(attributes, filters, build)
def fetch_hgnc():
"""Fetch the hgnc genes file from
ftp://ftp.ebi.ac.uk/pub/databases/genenames/new/tsv/hgnc_complete_set.txt
Returns:
hgnc_gene_lines(list(str))
"""
file_name = "hgnc_complete_set.txt"
url = "ftp://ftp.ebi.ac.uk/pub/databases/genenames/new/tsv/{0}".format(file_name)
LOG.info("Fetching HGNC genes from %s", url)
hgnc_lines = fetch_resource(url)
return hgnc_lines
def fetch_exac_constraint():
"""Fetch the file with exac constraint scores
Returns:
exac_lines(iterable(str))
"""
file_name = "fordist_cleaned_exac_r03_march16_z_pli_rec_null_data.txt"
url = (
"ftp://ftp.broadinstitute.org/pub/ExAC_release/release0.3/functional_gene_constraint" "/{0}"
).format(file_name)
exac_lines = None
LOG.info("Fetching ExAC genes")
try:
exac_lines = fetch_resource(url)
except HTTPError:
LOG.info("Failed to fetch exac constraint scores file from ftp server")
LOG.info("Try to fetch from google bucket...")
url = (
"https://storage.googleapis.com/gnomad-public/legacy/exacv1_downloads/release0.3.1"
"/manuscript_data/forweb_cleaned_exac_r03_march16_z_data_pLI.txt.gz"
)
if not exac_lines:
exac_lines = fetch_resource(url)
return exac_lines
def fetch_refseq_version(refseq_acc):
"""Fetch refseq version from entrez and return refseq version
Args:
refseq_acc(str) example: NM_020533
Returns
version(str) example: NM_020533.3 or NM_020533 if no version associated is found
"""
version = refseq_acc
base_url = (
"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=nuccore&"
"term={}&idtype=acc"
)
try:
resp = get_request(base_url.format(refseq_acc))
tree = ElementTree.fromstring(resp.content)
version = tree.find("IdList").find("Id").text or version
except (
requests.exceptions.HTTPError,
requests.exceptions.MissingSchema,
AttributeError,
):
LOG.warning("refseq accession not found")
return version<|fim▁end|>
|
LOG.info("Fetching ensembl exons")
|
<|file_name|>StaticData.ts<|end_file_name|><|fim▁begin|>/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { AppSettings } from './AppSettings';
import type { DataSource } from './DataSource';
import type { Phrase } from './Phrase';
import type { State } from './State';
import type { Unit } from './Unit';
import type { Variable } from './Variable';
import type { VariableCategory } from './VariableCategory';
export type StaticData = {
appSettings: AppSettings;
buildInfo?: any;
chcp?: any;
chromeExtensionManifest?: any;
commonVariables: Array<Variable>;
configXml?: any;
connectors: Array<DataSource>;
deepThoughts: Array<Phrase>;
dialogAgent: {
entities: any,
intents: any,
};<|fim▁hole|> docs: any;
privateConfig: any;
states: Array<State>;
units: Array<Unit>;
variableCategories: Array<VariableCategory>;
stateNames: any;
success: boolean;
status: string;
code: number;
description?: any;
summary?: any;
errors: Array<any>;
sessionTokenObject?: any;
avatar?: any;
warnings?: any;
data?: any;
}<|fim▁end|>
| |
<|file_name|>Vectors.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014 Radialpoint SafeCare Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.radialpoint.word2vec;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
/**
* This class stores the mapping of String->array of float that constitutes each vector.
*
* The class can serialize to/from a stream.
*
* The ConvertVectors allows to transform the C binary vectors into instances of this class.
*/
public class Vectors {
/**
* The vectors themselves.
*/
protected float[][] vectors;
/**
* The words associated with the vectors
*/
protected String[] vocabVects;
/**
* Size of each vector
*/
protected int size;
/**
* Inverse map, word-> index
*/
protected Map<String, Integer> vocab;<|fim▁hole|> * Package-level constructor, used by the ConvertVectors program.
*
* @param vectors
* , it cannot be empty
* @param vocabVects
* , the length should match vectors
*/
Vectors(float[][] vectors, String[] vocabVects) throws VectorsException {
this.vectors = vectors;
this.size = vectors[0].length;
if (vectors.length != vocabVects.length)
throw new VectorsException("Vectors and vocabulary size mismatch");
this.vocabVects = vocabVects;
this.vocab = new HashMap<String, Integer>();
for (int i = 0; i < vocabVects.length; i++)
vocab.put(vocabVects[i], i);
}
/**
* Initialize a Vectors instance from an open input stream. This method closes the stream.
*
* @param is
* the open stream
* @throws IOException
* if there are problems reading from the stream
*/
public Vectors(InputStream is) throws IOException {
DataInputStream dis = new DataInputStream(is);
int words = dis.readInt();
int size = dis.readInt();
this.size = size;
this.vectors = new float[words][];
this.vocabVects = new String[words];
for (int i = 0; i < words; i++) {
this.vocabVects[i] = dis.readUTF();
float[] vector = new float[size];
for (int j = 0; j < size; j++)
vector[j] = dis.readFloat();
this.vectors[i] = vector;
}
this.vocab = new HashMap<String, Integer>();
for (int i = 0; i < vocabVects.length; i++)
vocab.put(vocabVects[i], i);
dis.close();
}
/**
* Writes this vector to an open output stream. This method closes the stream.
*
* @param os
* the stream to write to
* @throws IOException
* if there are problems writing to the stream
*/
public void writeTo(OutputStream os) throws IOException {
DataOutputStream dos = new DataOutputStream(os);
dos.writeInt(this.vectors.length);
dos.writeInt(this.size);
for (int i = 0; i < vectors.length; i++) {
dos.writeUTF(this.vocabVects[i]);
for (int j = 0; j < size; j++)
dos.writeFloat(this.vectors[i][j]);
}
dos.close();
}
public float[][] getVectors() {
return vectors;
}
public float[] getVector(int i) {
return vectors[i];
}
public float[] getVector(String term) throws OutOfVocabularyException {
Integer idx = vocab.get(term);
if (idx == null)
throw new OutOfVocabularyException("Unknown term '" + term + "'");
return vectors[idx];
}
public int getIndex(String term) throws OutOfVocabularyException {
Integer idx = vocab.get(term);
if (idx == null)
throw new OutOfVocabularyException("Unknown term '" + term + "'");
return idx;
}
public Integer getIndexOrNull(String term) {
return vocab.get(term);
}
public String getTerm(int index) {
return vocabVects[index];
}
public Map<String, Integer> getVocabulary() {
return vocab;
}
public boolean hasTerm(String term) {
return vocab.containsKey(term);
}
public int vectorSize() {
return size;
}
public int wordCount() {
return vectors.length;
}
}<|fim▁end|>
|
/**
|
<|file_name|>config.spec.js<|end_file_name|><|fim▁begin|>import config from './config';
describe('config', function(){
it('should exist', function(){
expect(config).to.be.an('object');
});
it('should contain the required keys', function(){
expect(config.ngxDirective).to.be.an('object');
expect(config.ngxDirective.name).to.be.a('string');
});
<|fim▁hole|><|fim▁end|>
|
});
|
<|file_name|>dataset_bool_io.cpp<|end_file_name|><|fim▁begin|>//
// (c) Copyright 2017 DESY,ESS
// 2021 Eugen Wintersberger <[email protected]>
//
// This file is part of h5cpp.
//
// This library is free software; you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published<|fim▁hole|>// WITHOUT ANY WARRANTY; without even the implied warranty ofMERCHANTABILITY
// or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
// License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this library; if not, write to the
// Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor
// Boston, MA 02110-1301 USA
// ===========================================================================
//
// Author: Eugen Wintersberger <[email protected]>
// Created on: Oct 24, 2017
//
#include <catch2/catch.hpp>
#include <h5cpp/hdf5.hpp>
using namespace hdf5;
SCENARIO("testing bool IO") {
auto f = file::create("DatasetBoolIO.h5", file::AccessFlags::Truncate);
auto r = f.root();
auto type = hdf5::datatype::create<bool>();
hdf5::dataspace::Scalar space;
GIVEN("a dataset of type bool") {
auto d = node::Dataset(r, Path("data"), type, space);
THEN("we can write a boolean value to it") {
bool write = true;
REQUIRE_NOTHROW(d.write(write));
AND_THEN("we can read the value back") {
bool read = false;
REQUIRE_NOTHROW(d.read(read));
REQUIRE(read);
}
}
}
}<|fim▁end|>
|
// by the Free Software Foundation; either version 2.1 of the License, or
// (at your option) any later version.
//
// This library is distributed in the hope that it will be useful, but
|
<|file_name|>get_users.py<|end_file_name|><|fim▁begin|>import emission.analysis.modelling.tour_model.data_preprocessing as preprocess
# to determine if the user is valid:<|fim▁hole|> if len(filter_trips) >= 10 and len(filter_trips) / len(trips) >= 0.5:
valid = True
return valid
# - user_ls: a list of strings representing short user names, such as [user1, user2, user3...]
# - valid_user_ls: a subset of `user_ls` for valid users, so also string representation of user names
# - all_users: a collection of all user ids, in terms of user id objects
def get_user_ls(all_users,radius):
user_ls = []
valid_user_ls = []
for i in range(len(all_users)):
curr_user = 'user' + str(i + 1)
user = all_users[i]
trips = preprocess.read_data(user)
filter_trips = preprocess.filter_data(trips,radius)
if valid_user(filter_trips,trips):
valid_user_ls.append(curr_user)
user_ls.append(curr_user)
else:
user_ls.append(curr_user)
continue
return user_ls,valid_user_ls<|fim▁end|>
|
# valid user should have >= 10 trips for further analysis and the proportion of filter_trips is >=50%
def valid_user(filter_trips,trips):
valid = False
|
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::env;
use common::Config;
/// Conversion table from triple OS name to Rust SYSNAME
const OS_TABLE: &'static [(&'static str, &'static str)] = &[
("mingw32", "windows"),
("win32", "windows"),
("windows", "windows"),
("darwin", "macos"),
("android", "android"),
("linux", "linux"),
("freebsd", "freebsd"),
("dragonfly", "dragonfly"),
("bitrig", "bitrig"),
("openbsd", "openbsd"),
];
pub fn get_os(triple: &str) -> &'static str {
for &(triple_os, os) in OS_TABLE {
if triple.contains(triple_os) {
return os
}
}
panic!("Cannot determine OS from triple");
}
pub fn make_new_path(path: &str) -> String {
assert!(cfg!(windows));
// Windows just uses PATH as the library search path, so we have to<|fim▁hole|> // maintain the current value while adding our own
match env::var(lib_path_env_var()) {
Ok(curr) => {
format!("{}{}{}", path, path_div(), curr)
}
Err(..) => path.to_string()
}
}
pub fn lib_path_env_var() -> &'static str { "PATH" }
fn path_div() -> &'static str { ";" }
pub fn logv(config: &Config, s: String) {
debug!("{}", s);
if config.verbose { println!("{}", s); }
}<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2017-2018 Open Information Security Foundation<|fim▁hole|> * You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// written by Pierre Chifflier <[email protected]>
extern crate ipsec_parser;
pub mod ikev2;
pub mod state;
pub mod log;<|fim▁end|>
|
*
|
<|file_name|>fibonacci.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Calculate the 1000th element of the Fibonacci series. Fast.
# (Another popular tech interview question.)
import numpy;
# Definition of Fibonacci numbers:
# F(1) = 1
# F(2) = 1
# For n = 3, 4, 5, ...: F(n) = F(n-2) + F(n-1).
# Method one: recursion.
# Very inefficient: F(n) is called once, it calls F(n-1) once,
# F(n-2) is called twice (once by F(n) and once by F(n-1)),
# F(n-3) is called thrice (once by F(n-1) and twice by F(n-2)),
# F(n-k) is called F(k+1) times, that is an insane number of calls.
fibonaccirecursion = lambda n: 1 if n <=2 else fibonaccirecursion(n-2) + fibonaccirecursion(n-1);
# Method two: dual recursion. Returns the list [F(n-1),F(n)].
# Calling it with n triggers one call for each of 2, 3, ..., n-1: that is only O(n) calls.
def fibonaccidualrecursion(n):
if n >= 3:
a, b = fibonaccidualrecursion(n-1);
# F(n-2) = a, F(n-1) = b, F(n) = a+b.
return b, a+b;
elif n == 2:
return 1, 1;
elif n == 1:
# F(0) = 0.
return 0, 1;
else:
raise NotImplementedError;
# Method three: loop. Nothing fancy.
# Should be much like dual recursion without function call overhead.
def fibonacciloop(n):
a = 1;
b = 1;
for i in xrange(n-2):
c = a+b;
a = b;
b = c;
return b;
<|fim▁hole|> b = 1;
for i in xrange(int(0.5*(n-2))):
a = a+b;
b = a+b;
if n % 2 == 1:
if n == 1:
return 1;
else:
return a+b;
else:
return b;
# Method five: direct formula.
# This is not faster if we need to calculate all Fibonacci numbers up to F(n),
# but much-much faster if we only need F(n), especially if n is large.
# This is how we solve second order homogeneous linear recursions in general:
# The characteristic polynomial of the recursion is x^2 = 1 + x.
# It has two distinct roots, x_12 = (1 pm sqrt(5)) / 2.
# Therefore a general series is alpha*x_1^n + beta*x_2^n.
# Two initial values, two coefficients, two degrees of freedom.
# (We would use alpha*x^n + beta*n*x^n if x was a multiple root.)
# Turns out |x_2| < 1, so we can omit this term and round.
# Note that if n >= 1475, F(n) cannot be represented as long int any more,
# but the float approximation is still valid.
sqrt5 = numpy.sqrt(5);
fibonaccipower = lambda n: int(numpy.power(0.5*(1.0+sqrt5), n)/sqrt5 + 0.5);
print;
print "Testing.";
print [fibonaccirecursion(n) for n in xrange(1,15)];
print [fibonaccidualrecursion(n)[1] for n in xrange(1,15)];
print [fibonacciloop(n) for n in xrange(1,15)];
print [fibonaccievenloop(n) for n in xrange(1,15)];
print [fibonaccipower(n) for n in xrange(1,15)];
if __name__ == "__main__":
import timeit;
number = 20;
n = 25;
print;
print "Timing n={0:d}.".format(n);
for i in ["fibonaccirecursion", "fibonaccidualrecursion", "fibonacciloop", "fibonaccievenloop", "fibonaccipower"]:
print "{0:s}: {1:f} us".format(i, 1e6*timeit.timeit("{0:s}({1:d})".format(i,n), setup="from __main__ import {0:s}".format(i), number=number));
number = 20;
n = 500;
print;
print "Timing n={0:d} (simple recursion would fill up stack).".format(n);
for i in ["fibonaccidualrecursion", "fibonacciloop", "fibonaccievenloop", "fibonaccipower"]:
print "{0:s}: {1:f} us".format(i, 1e6*timeit.timeit("{0:s}({1:d})".format(i,n), setup="from __main__ import {0:s}".format(i), number=number));
number = 20;
n = 1000;
print;
print "Timing n={0:d} (even dual recursion would fill up stack).".format(n);
for i in ["fibonacciloop", "fibonaccievenloop", "fibonaccipower"]:
print "{0:s}: {1:f} us".format(i, 1e6*timeit.timeit("{0:s}({1:d})".format(i,n), setup="from __main__ import {0:s}".format(i), number=number));<|fim▁end|>
|
# Method four: even loop. Do two iterations at once to avoid moving around values.
# Slightly faster than simple loop.
def fibonaccievenloop(n):
a = 1;
|
<|file_name|>UdgerParserTest.java<|end_file_name|><|fim▁begin|>package org.udger.parser;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.net.URL;
import java.net.UnknownHostException;
import java.sql.SQLException;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CyclicBarrier;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class UdgerParserTest {
private UdgerParser parser;
private UdgerParser inMemoryParser;
private UdgerParser.ParserDbData parserDbData;
@Before
public void initialize() throws SQLException {
URL resource = this.getClass().getClassLoader().getResource("udgerdb_test_v3.dat");
parserDbData = new UdgerParser.ParserDbData(resource.getFile());
parser = new UdgerParser(parserDbData);
inMemoryParser = new UdgerParser(parserDbData, true, 0); // no cache
}
@After
public void close() throws IOException {
parser.close();
}
@Test
public void testUaString1() throws SQLException {
String uaQuery = "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0";
UdgerUaResult qr = parser.parseUa(uaQuery);
assertEquals(qr.getUa(), "Firefox 40.0");
assertEquals(qr.getOs(), "Windows 10");
assertEquals(qr.getUaFamily(), "Firefox");
}
@Test
public void testIp() throws SQLException, UnknownHostException {
String ipQuery = "108.61.199.93";
UdgerIpResult qr = parser.parseIp(ipQuery);
assertEquals(qr.getIpClassificationCode(), "crawler");
}
<|fim▁hole|> @Test
public void testUaStringInMemoryParser() throws SQLException {
String uaQuery = "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0";
UdgerUaResult qr = inMemoryParser.parseUa(uaQuery);
assertEquals(qr.getUa(), "Firefox 40.0");
assertEquals(qr.getOs(), "Windows 10");
assertEquals(qr.getUaFamily(), "Firefox");
}
@Test
public void testIpInMemoryParser() throws SQLException, UnknownHostException {
String ipQuery = "108.61.199.93";
UdgerIpResult qr = inMemoryParser.parseIp(ipQuery);
assertEquals(qr.getIpClassificationCode(), "crawler");
}
@Test
public void testParserDbDataThreadSafety() throws Throwable {
final int numThreads = 500;
final String uaQuery = "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0";
final CyclicBarrier gate = new CyclicBarrier(numThreads);
final ConcurrentLinkedQueue<Throwable> failures = new ConcurrentLinkedQueue<>();
Thread[] threads = new Thread[numThreads];
for (int i = 0; i < numThreads; i++) {
threads[i] = new Thread(new Runnable() {
@Override
public void run() {
UdgerParser threadParser = new UdgerParser(parserDbData);
try {
gate.await();
for (int j = 0; j < 100; j++) {
UdgerUaResult qr = threadParser.parseUa(uaQuery);
assertEquals(qr.getUa(), "Firefox 40.0");
assertEquals(qr.getOs(), "Windows 10");
assertEquals(qr.getUaFamily(), "Firefox");
}
} catch (Throwable t) {
failures.add(t);
}
}
});
threads[i].start();
}
for (int i = 0; i < numThreads; i++) {
threads[i].join();
}
if (!failures.isEmpty()) {
for (Throwable throwable : failures) {
throwable.printStackTrace();
}
fail("Parsing threads failed, see printed exceptions");
}
}
}<|fim▁end|>
| |
<|file_name|>TestCarbon.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
# Standard imports
from future import standard_library
standard_library.install_aliases()
from builtins import *
from past.utils import old_div
import unittest
import json
import logging
import re
from datetime import datetime, timedelta
# Our imports
from emission.analysis.result import carbon
import emission.core.get_database as edb
from emission.core.get_database import get_mode_db, get_section_db
import emission.tests.common as etc
from emission.core import common
class TestCarbon(unittest.TestCase):
def setUp(self):
from copy import copy
self.testUsers = ["[email protected]", "[email protected]", "[email protected]",
"[email protected]", "[email protected]"]
self.serverName = 'localhost'
# Sometimes, we may have entries left behind in the database if one of the tests failed
# or threw an exception, so let us start by cleaning up all entries
etc.dropAllCollections(edb._get_current_db())
self.ModesColl = get_mode_db()
self.assertEquals(self.ModesColl.estimated_document_count(), 0)
etc.loadTable(self.serverName, "Stage_Modes", "emission/tests/data/modes.json")
etc.loadTable(self.serverName, "Stage_Sections", "emission/tests/data/testCarbonFile")
self.SectionsColl = get_section_db()
self.walkExpect = 1057.2524056424411
self.busExpect = 2162.668467546699
self.busCarbon = old_div(267.0,1609)
self.airCarbon = old_div(217.0,1609)
self.driveCarbon = old_div(278.0,1609)
self.busOptimalCarbon = old_div(92.0,1609)
self.now = datetime.now()
self.dayago = self.now - timedelta(days=1)
self.weekago = self.now - timedelta(weeks = 1)
for section in self.SectionsColl.find():
section['section_start_datetime'] = self.dayago
section['section_end_datetime'] = self.dayago + timedelta(hours = 1)
if section['confirmed_mode'] == 5:
airSection = copy(section)
airSection['confirmed_mode'] = 9
airSection['_id'] = section['_id'] + "_air"
self.SectionsColl.insert(airSection)
# print("Section start = %s, section end = %s" %
# (section['section_start_datetime'], section['section_end_datetime']))
self.SectionsColl.save(section)
def tearDown(self):
for testUser in self.testUsers:
etc.purgeSectionData(self.SectionsColl, testUser)
self.ModesColl.remove()
self.assertEquals(self.ModesColl.estimated_document_count(), 0)
def getMyQuerySpec(self, user, modeId):
return common.getQuerySpec(user, modeId, self.weekago, self.now)
def testGetModes(self):
modes = carbon.getAllModes()
for mode in modes:
print(mode['mode_id'], mode['mode_name'])
self.assertEquals(len(modes), 9)
def testGetDisplayModes(self):
modes = carbon.getDisplayModes()
for mode in modes:
print(mode['mode_id'], mode['mode_name'])
# skipping transport, underground and not a trip
self.assertEquals(len(modes), 8)
def testGetTripCountForMode(self):
modes = carbon.getDisplayModes()
# try different modes
self.assertEqual(carbon.getTripCountForMode("[email protected]", 1, self.weekago, self.now), 1) # walk
self.assertEqual(carbon.getTripCountForMode("[email protected]", 5, self.weekago, self.now), 1) # bus
self.assertEqual(carbon.getTripCountForMode("[email protected]", 9, self.weekago, self.now), 1) # bus
# try different users
self.assertEqual(carbon.getTripCountForMode("[email protected]", 1, self.weekago, self.now), 1) # walk
self.assertEqual(carbon.getTripCountForMode("[email protected]", 5, self.weekago, self.now), 1) # bus
# try to sum across users
# We have 5 users - best, fest, rest, nest and test
self.assertEqual(carbon.getTripCountForMode(None, 1, self.weekago, self.now), 5) # walk
self.assertEqual(carbon.getTripCountForMode(None, 5, self.weekago, self.now), 5) # bus
def testTotalModeShare(self):
modeshare = carbon.getModeShare(None, self.weekago, self.now)
self.assertEqual(modeshare['walking'], 5)
self.assertEqual(modeshare['bus'], 5)
self.assertEqual(modeshare['cycling'], 0)
self.assertEqual(modeshare['car'], 0)
self.assertEqual(modeshare['train'], 0)
# self.assertFalse(modeshare.keys() contains 'not a trip')
# self.assertFalse(modeshare.keys() contains 'transport')
def testMyModeShare(self):
modeshare = carbon.getModeShare('[email protected]', self.weekago, self.now)
print(modeshare)
self.assertEqual(modeshare['walking'], 1)
self.assertEqual(modeshare['bus'], 1)
self.assertEqual(modeshare['cycling'], 0)
self.assertEqual(modeshare['car'], 0)
self.assertEqual(modeshare['train'], 0)
# self.assertFalse(modeshare.keys() contains 'not a trip')
# self.assertFalse(modeshare.keys() contains 'transport')
def testDistanceForMode(self):
# try different modes
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec("[email protected]", 1)),
self.walkExpect) # walk
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec("[email protected]", 5)),
self.busExpect) # bus
# try different users
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec("[email protected]", 1)), self.walkExpect) # walk
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec("[email protected]", 5)), self.busExpect) # bus
# try to sum across users
# We have 5 users - best, fest, rest, nest and test
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec(None, 1)), len(self.testUsers) * self.walkExpect) # walk
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec(None, 5)), len(self.testUsers) * self.busExpect) # bus
def testMyModeDistance(self):
myModeDistance = carbon.getModeShareDistance('[email protected]', self.weekago, self.now)
self.assertEqual(myModeDistance['walking'], self.walkExpect)
self.assertEqual(myModeDistance['cycling'], 0)
self.assertEqual(myModeDistance['bus'], self.busExpect)
self.assertEqual(myModeDistance['train'], 0)
def testTotalModeDistance(self):
totalModeDistance = carbon.getModeShareDistance(None, self.weekago, self.now)
self.assertEqual(totalModeDistance['walking'], len(self.testUsers) * self.walkExpect)
self.assertEqual(totalModeDistance['cycling'], 0)
self.assertEqual(totalModeDistance['bus'], len(self.testUsers) * self.busExpect)
self.assertEqual(totalModeDistance['train'], 0)
def testMyCarbonFootprint(self):
myModeDistance = carbon.getModeCarbonFootprint('[email protected]', carbon.carbonFootprintForMode, self.weekago, self.now)
self.assertEqual(myModeDistance['walking'], 0)
self.assertEqual(myModeDistance['cycling'], 0)
self.assertEqual(myModeDistance['bus_short'], (self.busCarbon * self.busExpect/1000))
self.assertEqual(myModeDistance['train_short'], 0)
# We duplicate the bus trips to get air trips, so the distance should be the same
self.assertEqual(myModeDistance['air_short'], (self.airCarbon * self.busExpect/1000))
def testTotalCarbonFootprint(self):
totalModeDistance = carbon.getModeCarbonFootprint(None, carbon.carbonFootprintForMode, self.weekago, self.now)
self.assertEqual(totalModeDistance['walking'], 0)
self.assertEqual(totalModeDistance['cycling'], 0)
# We divide by 1000 to make it comprehensible in getModeCarbonFootprint
self.assertEqual(totalModeDistance['bus_short'], old_div((self.busCarbon * len(self.testUsers) * self.busExpect),1000))
self.assertEqual(totalModeDistance['air_short'], old_div((self.airCarbon * len(self.testUsers) * self.busExpect),1000))
self.assertEqual(totalModeDistance['train_short'], 0)
def testMySummary(self):
(myModeShareCount, avgModeShareCount,
myModeShareDistance, avgModeShareDistance,
myModeCarbonFootprint, avgModeCarbonFootprint,
myModeCarbonFootprintNoLongMotorized, avgModeCarbonFootprintNoLongMotorized,
myOptimalCarbonFootprint, avgOptimalCarbonFootprint,
myOptimalCarbonFootprintNoLongMotorized, avgOptimalCarbonFootprintNoLongMotorized) = carbon.getFootprintCompare('[email protected]')
# >>> m = {'air_long': 0, 'air_short': 0.2, 'bus_long': 0, 'bus_short': 0.3}
# >>> f = [(i, m[i]) for i in m if m[i] != 0]
# >>> f
# [('bus_short', 0.3), ('air_short', 0.2)]
# >>> dict(f)
# {'bus_short': 0.3, 'air_short': 0.2}
filterZero = lambda m: dict([(i, m[i]) for i in m if m[i] != 0])
self.assertEqual(len(myModeShareCount), len(carbon.getDisplayModes()))
self.assertEqual(len(myModeShareDistance), len(carbon.getDisplayModes()))
# We have duplicated the bus trip to get bus, air and unconfirmed trips.
# we ignore the unconfirmed trip, so only expect to get three values...
self.assertAlmostEqual(sum(myModeShareDistance.values()), 2 * self.busExpect + self.walkExpect, places = 4)
self.assertEqual(filterZero(myModeShareDistance),
{'bus': self.busExpect,
'walking': self.walkExpect,
'air': self.busExpect})
logging.debug(filterZero(myModeShareDistance))
self.assertEqual(filterZero(myModeCarbonFootprint),
{'bus_short': old_div((self.busExpect * self.busCarbon),1000),
'air_short': old_div((self.busExpect * self.airCarbon),1000)})
self.assertEqual(filterZero(myModeCarbonFootprintNoLongMotorized),
{'bus_short': old_div((self.busExpect * self.busCarbon),1000)})
self.assertEqual(filterZero(myOptimalCarbonFootprint),
{'air_short': old_div((self.busExpect * self.busOptimalCarbon),1000)})
self.assertEqual(filterZero(myOptimalCarbonFootprintNoLongMotorized),
{})
def testSummaryAllTrips(self):
summary = carbon.getSummaryAllTrips(self.weekago, self.now)
# *2 because the walking trips don't count, but we have doubled the bus
# trips to count as air trips
self.assertEqual(summary['current'], old_div((self.busCarbon * self.busExpect + self.airCarbon * self.busExpect),1000))
# No * 2 because the optimal value for short bus trips is to actually move to bikes :)
self.assertEqual(summary['optimal'], old_div((self.busOptimalCarbon * self.busExpect),1000))
# These are are without air, so will only count the bus trips
self.assertEqual(summary['current no air'], old_div((self.busCarbon * self.busExpect),1000))
self.assertEqual(summary['optimal no air'], 0)
self.assertAlmostEqual(summary['all drive'], old_div((self.driveCarbon * (self.busExpect * 2 + self.walkExpect)),1000), places = 4)
<|fim▁hole|> def testFilteredDistinctUserCount(self):
# Now, move all the sections before a week
# Now there should be no matches in the last week
for section in self.SectionsColl.find():
section['section_start_datetime'] = self.weekago + timedelta(days = -1)
section['section_end_datetime'] = self.weekago + timedelta(days = -1) + timedelta(hours = 1)
# print("Section start = %s, section end = %s" %
# (section['section_start_datetime'], section['section_end_datetime']))
self.SectionsColl.save(section)
print("About to check for distinct users from a week ago")
self.assertEqual(carbon.getDistinctUserCount(carbon.getQuerySpec(None, None,
self.weekago, self.now)), 0)
self.assertEqual(carbon.getDistinctUserCount(carbon.getQuerySpec(None, None,
self.weekago + timedelta(weeks = -1), self.now)), len(self.testUsers))
def testDelLongMotorizedModes(self):
testMap = {'bus': 1, 'air': 3}
carbon.delLongMotorizedModes(testMap)
self.assertEqual(len(testMap), 1)
self.assertEqual(testMap, {'bus': 1})
def testDelLongMotorizedModesShortLong(self):
testMap = {'bus_short': 1, 'bus_long': 2, 'air_short': 3, 'air_long': 4}
carbon.delLongMotorizedModes(testMap)
self.assertEqual(len(testMap), 2)
self.assertIn('bus_short', testMap)
self.assertIn('bus_long', testMap)
self.assertNotIn('air_short', testMap)
self.assertNotIn('air_long', testMap)
def testGetCarbonFootprintsForMap(self):
testDistanceMap = {'a': 1, 'b': 2, 'c': 3}
testModeFootprintMap = {'a': 1, 'b': 2, 'c': 3}
footprintMap = carbon.getCarbonFootprintsForMap(testDistanceMap, testModeFootprintMap)
self.assertEqual(footprintMap, {'a': 0.001, 'b': 0.004, 'c': 0.009})
def testAvgCalculation(self):
testMap = {'a': 5, 'b': 10, 'c': 15, 'd': 3, 'e': 7, 'f': 13}
avgTestMap = carbon.convertToAvg(testMap, 5)
self.assertEquals(avgTestMap['a'], 1)
self.assertEquals(avgTestMap['b'], 2)
self.assertEquals(avgTestMap['c'], 3)
self.assertEquals(avgTestMap['d'], 0.6)
self.assertEquals(avgTestMap['e'], 1.4)
self.assertEquals(avgTestMap['f'], 2.6)
if __name__ == '__main__':
etc.configLogging()
unittest.main()<|fim▁end|>
|
def testDistinctUserCount(self):
self.assertEqual(carbon.getDistinctUserCount({}), len(self.testUsers))
|
<|file_name|>htmlheadingelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::utils::{DOMString, null_string};
use dom::htmlelement::HTMLElement;
pub enum HeadingLevel {
Heading1,
Heading2,
Heading3,
Heading4,
Heading5,
Heading6,
}
pub struct HTMLHeadingElement {
parent: HTMLElement,
level: HeadingLevel,
}
impl HTMLHeadingElement {
pub fn Align(&self) -> DOMString {
null_string
}
pub fn SetAlign(&mut self, _align: &DOMString) {
}<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import {
Card,
} from '@card-game/core';
class Suits extends Enum {}
Suits.initEnum(['RED', 'GREEN', 'BLUE', 'YELLOW']);
class Ranks extends Enum {}
Ranks.initEnum(['ZERO', 'ONE', 'TWO', 'THREE', 'FOUR', 'FIVE', 'SIX', 'SEVEN', 'EIGHT', 'NINE', 'SKIP', 'DRAW_TWO', 'REVERSE']);
class Wilds extends Enum {}<|fim▁hole|>export default class UNOCard extends Card {
static get SUITS() {
return Suits;
}
static get RANKS() {
return Ranks;
}
static get WILDS() {
return Wilds;
}
constructor(suit, rank, owner) {
super({ suit, rank }, owner);
}
}<|fim▁end|>
|
Wilds.initEnum(['WILD', 'DRAW_FOUR']);
|
<|file_name|>factories.py<|end_file_name|><|fim▁begin|>import uuid
import factory.fuzzy
from django.conf import settings
from .. import models
from utils.factories import FuzzyMoney
class UserFactory(factory.DjangoModelFactory):
class Meta:
model = settings.AUTH_USER_MODEL
username = factory.Sequence('terminator{0}'.format)
email = factory.Sequence('terminator{0}@skynet.com'.format)
password = 'hunter2'
is_superuser = False
is_staff = False
@classmethod
def _create(cls, model_class, *args, **kwargs):
manager = cls._get_manager(model_class)
return manager.create_user(*args, **kwargs)
class AccountFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Account
user = factory.SubFactory(UserFactory)
class CardFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Card
account = factory.SubFactory(AccountFactory)
number = factory.fuzzy.FuzzyInteger(0, (1 << 32) - 1)
class PurchaseFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Purchase
account = factory.SubFactory(AccountFactory)
amount = factory.fuzzy.FuzzyInteger(0, 1337)
<|fim▁hole|>
purchase = factory.SubFactory(PurchaseFactory)
product_id = factory.fuzzy.FuzzyAttribute(uuid.uuid4)
qty = 1
amount = FuzzyMoney(0, 1000)
class PurchaseStatusFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.PurchaseStatus
purchase = factory.SubFactory(PurchaseFactory)<|fim▁end|>
|
class PurchaseItemFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.PurchaseItem
|
<|file_name|>RandomMutateImage.py<|end_file_name|><|fim▁begin|>im = open('006993_photoA.tif', 'rb')
ord(im.read(1))<|fim▁hole|>chr(ord(im.read(1)))<|fim▁end|>
| |
<|file_name|>StandaloneWindow.js<|end_file_name|><|fim▁begin|>/**
* Standalone Window.
*
* A window in standalone display mode.
*/
/**
* Standalone Window Constructor.
*
* @extends BaseWindow.
* @param {number} id Window ID to give standalone window.
* @param {string} url URL to navigate to.
* @param {Object} webApp WebApp with metadata to generate window from.
*/
var StandaloneWindow = function(id, url, webApp) {
this.currentUrl = url;
if (webApp && webApp.name) {
this.name = webApp.name;
} else if (webApp && webApp.shortName) {
this.name = webApp.shortName;
} else {
try {
this.name = new URL(url).hostname;
} catch(e) {
this.name = '';
}
}
if (webApp && webApp.themeColor) {
this.themeColor = webApp.themeColor;
}
BaseWindow.call(this, id);
return this;
};
StandaloneWindow.prototype = Object.create(BaseWindow.prototype);
/**
* Window View.
*/
StandaloneWindow.prototype.view = function() {
var titleBarStyle = '';
var titleBarClass = 'standalone-window-title-bar';
if (this.themeColor) {
titleBarStyle = 'background-color: ' + this.themeColor + ';';
var rgb = this.hexToRgb(this.themeColor);
backgroundBrightness = this.darkOrLight(rgb);
titleBarClass += ' ' + backgroundBrightness;
}
return '<div id="window' + this.id + '"class="standalone-window">' +
'<div class="' + titleBarClass + '" style="' + titleBarStyle + '">' +
'<span id="standalone-window-title' + this.id +
'" class="standalone-window-title">' + this.name + '</span>' +
'<button type="button" id="close-window-button' + this.id + '" ' +
'class="close-window-button">' +
'</div>' +
'<webview src="' + this.currentUrl + '" id="standalone-window-frame' +
this.id + '" class="standalone-window-frame">' +
'</div>';
};
/**
* Render the window.
*/
StandaloneWindow.prototype.render = function() {
this.container.insertAdjacentHTML('beforeend', this.view());
this.element = document.getElementById('window' + this.id);
this.title = document.getElementById('standalone-window-title' + this.id);
this.closeButton = document.getElementById('close-window-button' + this.id);
this.closeButton.addEventListener('click', this.close.bind(this));
this.frame = document.getElementById('standalone-window-frame' + this.id);
this.frame.addEventListener('did-navigate',
this.handleLocationChange.bind(this));
this.frame.addEventListener('did-navigate-in-page',
this.handleLocationChange.bind(this));
this.frame.addEventListener('new-window',
this.handleOpenWindow.bind(this));
};
/**
* Show the Window.
*/
StandaloneWindow.prototype.show = function() {
this.element.classList.remove('hidden');
};
/**
* Hide the window.
*/
StandaloneWindow.prototype.hide = function() {
this.element.classList.add('hidden');
};
/**
* Handle location change.
*
* @param {Event} e mozbrowserlocationchange event.
*/
StandaloneWindow.prototype.handleLocationChange = function(e) {
this.currentUrl = e.url;
};
/**
* Convert hex color value to rgb.
*
* @argument {String} hex color string e.g. #ff0000
* @returns {Object} RGB object with separate r, g and b properties
*/
StandaloneWindow.prototype.hexToRgb = function(hex) {
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result ? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16)
} : null;
};
/**
* Measure whether color is dark or light.
*
* @param {Object} RGB object with r, g, b properties.
* @return {String} 'dark' or 'light'.
*/
StandaloneWindow.prototype.darkOrLight = function(rgb) {
if ((rgb.r*0.299 + rgb.g*0.587 + rgb.b*0.114) > 186) {<|fim▁hole|> return 'light';
} else {
return 'dark';
}
};<|fim▁end|>
| |
<|file_name|>SerBanDesGen.java<|end_file_name|><|fim▁begin|>//Copyright (c) 2011 Municipalidad de Rosario and Coop. de Trabajo Tecso Ltda.
//This file is part of SIAT. SIAT is licensed under the terms
//of the GNU General Public License, version 3.
//See terms in COPYING file or <http://www.gnu.org/licenses/gpl.txt>
package ar.gov.rosario.siat.gde.buss.bean;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import ar.gov.rosario.siat.base.iface.util.BaseError;
import ar.gov.rosario.siat.def.buss.bean.ServicioBanco;
import ar.gov.rosario.siat.def.iface.util.DefError;
import ar.gov.rosario.siat.gde.buss.dao.GdeDAOFactory;
import ar.gov.rosario.siat.gde.iface.util.GdeError;
import coop.tecso.demoda.buss.bean.BaseBO;
import coop.tecso.demoda.iface.helper.DateUtil;
/**
* Bean correspondiente a Servicio Banco Descuentos Generales
*
* @author tecso
*/
@Entity
@Table(name = "gde_serBanDesGen")
public class SerBanDesGen extends BaseBO {
private static final long serialVersionUID = 1L;
@Column(name = "fechaDesde")
private Date fechaDesde;
@Column(name = "fechaHasta")
private Date fechaHasta;
@ManyToOne()
@JoinColumn(name="idServicioBanco")
private ServicioBanco servicioBanco;
@ManyToOne()
@JoinColumn(name="idDesGen")
private DesGen desGen;
//Constructores
public SerBanDesGen(){
super();
}
// Getters y Setters
public Date getFechaDesde(){
return fechaDesde;
}
public void setFechaDesde(Date fechaDesde){
this.fechaDesde = fechaDesde;
}
public Date getFechaHasta(){
return fechaHasta;
}
public void setFechaHasta(Date fechaHasta){
this.fechaHasta = fechaHasta;
}
public ServicioBanco getServicioBanco(){
return servicioBanco;
}
public void setServicioBanco(ServicioBanco servicioBanco){
this.servicioBanco = servicioBanco;<|fim▁hole|> return desGen;
}
public void setDesGen(DesGen desGen){
this.desGen = desGen;
}
// Metodos de Clase
public static SerBanDesGen getById(Long id) {
return (SerBanDesGen) GdeDAOFactory.getSerBanDesGenDAO().getById(id);
}
public static SerBanDesGen getByIdNull(Long id) {
return (SerBanDesGen) GdeDAOFactory.getSerBanDesGenDAO().getByIdNull(id);
}
public static List<SerBanDesGen> getList() {
return (ArrayList<SerBanDesGen>) GdeDAOFactory.getSerBanDesGenDAO().getList();
}
public static List<SerBanDesGen> getListActivos() {
return (ArrayList<SerBanDesGen>) GdeDAOFactory.getSerBanDesGenDAO().getListActiva();
}
// Metodos de Instancia
// Validaciones
/**
* Valida la creacion
* @author
*/
public boolean validateCreate() throws Exception{
//limpiamos la lista de errores
clearError();
this.validate();
if (hasError()) {
return false;
}
return !hasError();
}
/**
* Valida la actualizacion
* @author
*/
public boolean validateUpdate() throws Exception{
//limpiamos la lista de errores
clearError();
this.validate();
if (hasError()) {
return false;
}
return !hasError();
}
private boolean validate() throws Exception{
//limpiamos la lista de errores
clearError();
//UniqueMap uniqueMap = new UniqueMap();
//Validaciones de Requeridos
if (getServicioBanco()==null) {
addRecoverableError(BaseError.MSG_CAMPO_REQUERIDO, GdeError.SERBANDESGEN_SERVICIOBANCO);
}
if (getDesGen()==null) {
addRecoverableError(BaseError.MSG_CAMPO_REQUERIDO, GdeError.SERBANDESGEN_DESGEN);
}
if (getFechaDesde()==null) {
addRecoverableError(BaseError.MSG_CAMPO_REQUERIDO, GdeError.SERBANDESGEN_FECHADESDE);
}
if (hasError()) {
return false;
}
// Validaciones de Unicidad
// Otras Validaciones
// Valida que la Fecha Desde no sea mayor que la fecha Hasta
if(!DateUtil.isDateBefore(this.fechaDesde, this.fechaHasta)){
addRecoverableError(BaseError.MSG_VALORMAYORQUE, DefError.SERBANREC_FECHADESDE, DefError.SERBANREC_FECHAHASTA);
}
return !hasError();
}
/**
* Valida la eliminacion
* @author
*/
public boolean validateDelete() {
//limpiamos la lista de errores
clearError();
//Validaciones de VO
if (hasError()) {
return false;
}
// Validaciones de Negocio
return true;
}
// Metodos de negocio
}<|fim▁end|>
|
}
public DesGen getDesGen(){
|
<|file_name|>test_kvstore.py<|end_file_name|><|fim▁begin|>import os
import unittest
import zope.testrunner
from zope import component
from sparc.testing.fixture import test_suite_mixin
from sparc.testing.testlayer import SPARC_INTEGRATION_LAYER
from sparc.db.splunk.testing import SPARC_DB_SPLUNK_INTEGRATION_LAYER
from zope import schema
from zope.interface import Interface
class ITestSchema(Interface):
date = schema.Date(title=u"date")
datetime = schema.Datetime(title=u"datetime")
decimal = schema.Decimal(title=u"decimal")
float = schema.Float(title=u"float")
int = schema.Int(title=u"int")<|fim▁hole|> value_type=schema.Text(title=u"value"))
ip = schema.DottedName(title=u"ip",min_dots=3,max_dots=3)
ascii = schema.ASCII(title=u"ascii")
class SparcCacheSplunkAreaTestCase(unittest.TestCase):
layer = SPARC_INTEGRATION_LAYER
sm = component.getSiteManager()
def test_ISplunkKVCollectionSchema_adapter_for_schemas(self):
from sparc.db.splunk import ISplunkKVCollectionSchema
schema = ISplunkKVCollectionSchema(ITestSchema)
self.assertIn('field.date', schema)
self.assertEquals(schema['field.date'], 'time')
self.assertIn('field.datetime', schema)
self.assertEquals(schema['field.datetime'], 'time')
self.assertIn('field.decimal', schema)
self.assertEquals(schema['field.decimal'], 'number')
self.assertIn('field.float', schema)
self.assertEquals(schema['field.float'], 'number')
self.assertIn('field.int', schema)
self.assertEquals(schema['field.int'], 'number')
self.assertIn('field.bool', schema)
self.assertEquals(schema['field.bool'], 'bool')
self.assertIn('field.list', schema)
self.assertEquals(schema['field.list'], 'array')
self.assertIn('field.set', schema)
self.assertEquals(schema['field.set'], 'array')
self.assertIn('field.dict', schema)
self.assertEquals(schema['field.dict'], 'array')
self.assertIn('field.ip', schema)
self.assertEquals(schema['field.ip'], 'cidr')
self.assertIn('field.ascii', schema)
self.assertEquals(schema['field.ascii'], 'string')
def test_bad_collection(self):
from sparc.db.splunk import ISplunkKVCollectionSchema
class ITestSchemaDict(Interface):
list = schema.List(title=u'bad',
value_type=schema.Dict(title=u'bad'))
sschema = ISplunkKVCollectionSchema(ITestSchemaDict)
self.assertNotIn('field.list', sschema)
class ITestSchemaCollection(Interface):
list = schema.List(title=u'bad',
value_type=schema.List(title=u'bad'))
sschema = ISplunkKVCollectionSchema(ITestSchemaDict)
self.assertNotIn('field.list', sschema)
kv_names = {}
kv_names['test_collection'] = {}
kv_names['test_collection']['field.id'] = "string"
kv_names['test_collection']['field.name'] = "string"
SPARC_DB_SPLUNK_INTEGRATION_LAYER.kv_names.update(kv_names)
class SparcDBSplunkKVTestCase(unittest.TestCase):
layer = SPARC_DB_SPLUNK_INTEGRATION_LAYER
level = 2
sm = component.getSiteManager()
def test_current_kv_names(self):
from sparc.db.splunk.kvstore import current_kv_names
req = component.createObject(u'sparc.utils.requests.request')
req.req_kwargs['verify'] = False
req.gooble_warnings = True
self.assertIn('test_collection', \
current_kv_names(self.layer.sci,
self.layer.kv_username,
self.layer.kv_appname,
request=req))
def test_schema_adapter_for_named_collection(self):
# tests SplunkKVCollectionSchemaFromSplunkInstance
from sparc.db.splunk import ISplunkKVCollectionSchema
from sparc.utils.requests import IRequest
kv_id = self.layer.get_kv_id(u'test_collection')
schema = component.getMultiAdapter((self.layer.sci,
kv_id,
self.sm.getUtility(IRequest)),
ISplunkKVCollectionSchema)
for k in self.layer.kv_names['test_collection'].keys():
self.assertEquals(self.layer.kv_names['test_collection'][k], schema[k])
class test_suite(test_suite_mixin):
package = 'sparc.db.splunk'
module = 'kvstore'
def __new__(cls):
suite = super(test_suite, cls).__new__(cls)
suite.addTest(unittest.makeSuite(SparcCacheSplunkAreaTestCase))
suite.addTest(unittest.makeSuite(SparcDBSplunkKVTestCase))
return suite
if __name__ == '__main__':
zope.testrunner.run([
'--path', os.path.dirname(__file__),
'--tests-pattern', os.path.splitext(
os.path.basename(__file__))[0]
])<|fim▁end|>
|
bool = schema.Bool(title=u"bool")
list = schema.Set(title=u"list", value_type=schema.Field(title=u"field"))
set = schema.Set(title=u"set", value_type=schema.Field(title=u"field"))
dict = schema.Dict(title=u"dict", key_type=schema.TextLine(title=u"key"),
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>define(function(require, exports, module) {
require('jquery.cycle2');
exports.run = function() {
$('.homepage-feature').cycle({
fx:"scrollHorz",
slides: "> a, > img",
log: "false",
pauseOnHover: "true"
});
<|fim▁hole|> $( this ).find( ".rating" ).show();
}, function() {
$( this ).find( ".rating" ).hide();
});
};
});<|fim▁end|>
|
$('.live-rating-course').find('.media-body').hover(function() {
|
<|file_name|>TestMultiWordCommands.py<|end_file_name|><|fim▁begin|>"""
Test multiword commands ('platform' in this case).
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
class MultiwordCommandsTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@no_debug_info_test
def test_ambiguous_subcommand(self):
self.expect("platform s", error=True,
substrs=["ambiguous command 'platform s'. Possible completions:",<|fim▁hole|> "\tselect\n",
"\tshell\n",
"\tsettings\n"])
@no_debug_info_test
def test_empty_subcommand(self):
self.expect("platform \"\"", error=True, substrs=["Need to specify a non-empty subcommand."])
@no_debug_info_test
def test_help(self):
# <multiword> help brings up help.
self.expect("platform help",
substrs=["Commands to manage and create platforms.",
"Syntax: platform [",
"The following subcommands are supported:",
"connect",
"Select the current platform"])<|fim▁end|>
| |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>import os
import dj_database_url
import re
from django.conf import settings
from cabot.celeryconfig import *
from cabot.cabot_config import *
settings_dir = os.path.dirname(__file__)
PROJECT_ROOT = os.path.abspath(settings_dir)
TEMPLATE_DEBUG = DEBUG = os.environ.get("DEBUG", False)
ADMINS = (
('Admin', os.environ.get('ADMIN_EMAIL', '[email protected]')),
)
MANAGERS = ADMINS
DATABASES = {'default': dj_database_url.parse(os.environ["DATABASE_URL"])}
if not DEBUG:
DATABASES['default']['OPTIONS'] = {'autocommit': True}<|fim▁hole|>
USE_TZ = True
ALLOWED_HOSTS = os.environ.get('ALLOWED_HOSTS', '*').split(',')
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = os.environ.get('TIME_ZONE', 'Etc/UTC')
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media/')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, os.path.pardir, 'static/')
COMPRESS_ROOT = STATIC_ROOT
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = [os.path.join(PROJECT_ROOT, 'static')]
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = os.environ.get(
'DJANGO_SECRET_KEY', '2FL6ORhHwr5eX34pP9mMugnIOd3jzVuT45f7w430Mt5PnEwbcJgma0q8zUXNZ68A')
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'cabot.urls'
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'south',
'compressor',
'polymorphic',
'djcelery',
'mptt',
'jsonify',
'cabot.cabotapp',
'rest_framework',
)
# Load additional apps from configuration file
CABOT_PLUGINS_ENABLED_PARSED = []
for plugin in CABOT_PLUGINS_ENABLED.split(","):
# Hack to clean up if versions of plugins specified
exploded = re.split(r'[<>=]+', plugin)
CABOT_PLUGINS_ENABLED_PARSED.append(exploded[0])
INSTALLED_APPS += tuple(CABOT_PLUGINS_ENABLED_PARSED)
COMPRESS_PRECOMPILERS = (
('text/coffeescript', 'coffee --compile --stdio'),
('text/eco',
'eco -i TEMPLATES {infile} && cat "$(echo "{infile}" | sed -e "s/\.eco$/.js/g")"'),
('text/less', 'lessc {infile} > {outfile}'),
)
EMAIL_HOST = os.environ.get('SES_HOST', 'localhost')
EMAIL_PORT = int(os.environ.get('SES_PORT', 25))
EMAIL_HOST_USER = os.environ.get('SES_USER', '')
EMAIL_HOST_PASSWORD = os.environ.get('SES_PASS', '')
EMAIL_BACKEND = os.environ.get('SES_BACKEND', 'django_smtp_ssl.SSLEmailBackend')
EMAIL_USE_TLS = os.environ.get('SES_USE_TLS', 0)
COMPRESS_OFFLINE = not DEBUG
COMPRESS_URL = '/static/'
RECOVERY_SNIPPETS_WHITELIST = (
r'https?://[^.]+\.hackpad\.com/[^./]+\.js',
r'https?://gist\.github\.com/[^.]+\.js',
r'https?://www\.refheap\.com/[^.]+\.js',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
},
'log_file': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'verbose',
'filename': os.environ['LOG_FILE'],
'maxBytes': 1024 * 1024 * 25, # 25 MB
'backupCount': 5,
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django': {
'handlers': ['console', 'log_file', 'mail_admins'],
'level': 'INFO',
'propagate': True,
},
'django.request': {
'handlers': ['console', 'log_file', 'mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'django.db.backends': {
'handlers': ['console', 'log_file', 'mail_admins'],
'level': 'INFO',
'propagate': False,
},
# Catch All Logger -- Captures any other logging
'': {
'handlers': ['console', 'log_file', 'mail_admins'],
'level': 'INFO',
'propagate': True,
}
}
}
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissions',
],
'DEFAULT_FILTER_BACKENDS': [
'rest_framework.filters.DjangoFilterBackend',
'rest_framework.filters.OrderingFilter',
]
}
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
AUTH_LDAP = os.environ.get('AUTH_LDAP', 'false')
if AUTH_LDAP.lower() == "true":
from settings_ldap import *
AUTHENTICATION_BACKENDS += tuple(['django_auth_ldap.backend.LDAPBackend'])<|fim▁end|>
| |
<|file_name|>spell_hunter.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2013 TrinityCore <http://www.trinitycore.org/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Scripts for spells with SPELLFAMILY_HUNTER, SPELLFAMILY_PET and SPELLFAMILY_GENERIC spells used by hunter players.
* Ordered alphabetically using scriptname.
* Scriptnames of files in this file should be prefixed with "spell_hun_".
*/
#include "Pet.h"
#include "ScriptMgr.h"
#include "Cell.h"
#include "CellImpl.h"
#include "GridNotifiers.h"
#include "GridNotifiersImpl.h"
#include "SpellScript.h"
#include "SpellAuraEffects.h"
enum HunterSpells
{
SPELL_HUNTER_ASPECT_OF_THE_BEAST_PET = 61669,
SPELL_HUNTER_BESTIAL_WRATH = 19574,
SPELL_HUNTER_CHIMERA_SHOT_SERPENT = 53353,
SPELL_HUNTER_CHIMERA_SHOT_VIPER = 53358,
SPELL_HUNTER_CHIMERA_SHOT_SCORPID = 53359,
SPELL_HUNTER_INVIGORATION_TRIGGERED = 53398,
SPELL_HUNTER_MASTERS_CALL_TRIGGERED = 62305,
SPELL_HUNTER_PET_LAST_STAND_TRIGGERED = 53479,
SPELL_HUNTER_PET_HEART_OF_THE_PHOENIX = 55709,
SPELL_HUNTER_PET_HEART_OF_THE_PHOENIX_TRIGGERED = 54114,
SPELL_HUNTER_PET_HEART_OF_THE_PHOENIX_DEBUFF = 55711,
SPELL_HUNTER_PET_CARRION_FEEDER_TRIGGERED = 54045,
SPELL_HUNTER_READINESS = 23989,
SPELL_HUNTER_SNIPER_TRAINING_R1 = 53302,
SPELL_HUNTER_SNIPER_TRAINING_BUFF_R1 = 64418,
SPELL_DRAENEI_GIFT_OF_THE_NAARU = 59543,
};
// 13161 - Aspect of the Beast
class spell_hun_aspect_of_the_beast : public SpellScriptLoader
{
public:
spell_hun_aspect_of_the_beast() : SpellScriptLoader("spell_hun_aspect_of_the_beast") { }
class spell_hun_aspect_of_the_beast_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_aspect_of_the_beast_AuraScript);
bool Load()
{
return GetCaster()->GetTypeId() == TYPEID_PLAYER;
}
bool Validate(SpellInfo const* /*spellInfo*/)
{
if (!sSpellMgr->GetSpellInfo(SPELL_HUNTER_ASPECT_OF_THE_BEAST_PET))
return false;
return true;
}
void OnRemove(AuraEffect const* /*aurEff*/, AuraEffectHandleModes /*mode*/)
{
if (Player* caster = GetCaster()->ToPlayer())
if (Pet* pet = caster->GetPet())
pet->RemoveAurasDueToSpell(SPELL_HUNTER_ASPECT_OF_THE_BEAST_PET);
}
void OnApply(AuraEffect const* /*aurEff*/, AuraEffectHandleModes /*mode*/)
{
if (Player* caster = GetCaster()->ToPlayer())
if (caster->GetPet())
caster->CastSpell(caster, SPELL_HUNTER_ASPECT_OF_THE_BEAST_PET, true);
}
void Register()
{
AfterEffectApply += AuraEffectApplyFn(spell_hun_aspect_of_the_beast_AuraScript::OnApply, EFFECT_0, SPELL_AURA_UNTRACKABLE, AURA_EFFECT_HANDLE_REAL);
AfterEffectRemove += AuraEffectRemoveFn(spell_hun_aspect_of_the_beast_AuraScript::OnRemove, EFFECT_0, SPELL_AURA_UNTRACKABLE, AURA_EFFECT_HANDLE_REAL);
}
};
AuraScript* GetAuraScript() const
{
return new spell_hun_aspect_of_the_beast_AuraScript();
}
};
// 53209 - Chimera Shot
class spell_hun_chimera_shot : public SpellScriptLoader
{
public:
spell_hun_chimera_shot() : SpellScriptLoader("spell_hun_chimera_shot") { }
class spell_hun_chimera_shot_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_chimera_shot_SpellScript);
bool Validate(SpellInfo const* /*spellInfo*/)
{
if (!sSpellMgr->GetSpellInfo(SPELL_HUNTER_CHIMERA_SHOT_SERPENT) || !sSpellMgr->GetSpellInfo(SPELL_HUNTER_CHIMERA_SHOT_VIPER) || !sSpellMgr->GetSpellInfo(SPELL_HUNTER_CHIMERA_SHOT_SCORPID))
return false;
return true;
}
void HandleScriptEffect(SpellEffIndex /*effIndex*/)
{
Unit* caster = GetCaster();
if (Unit* unitTarget = GetHitUnit())
{
uint32 spellId = 0;
int32 basePoint = 0;
Unit::AuraApplicationMap& Auras = unitTarget->GetAppliedAuras();
for (Unit::AuraApplicationMap::iterator i = Auras.begin(); i != Auras.end(); ++i)
{
Aura* aura = i->second->GetBase();
if (aura->GetCasterGUID() != caster->GetGUID())
continue;
// Search only Serpent Sting, Viper Sting, Scorpid Sting auras
flag96 familyFlag = aura->GetSpellInfo()->SpellFamilyFlags;
if (!(familyFlag[1] & 0x00000080 || familyFlag[0] & 0x0000C000))
continue;
if (AuraEffect const* aurEff = aura->GetEffect(0))
{
// Serpent Sting - Instantly deals 40% of the damage done by your Serpent Sting.
if (familyFlag[0] & 0x4000)
{
int32 TickCount = aurEff->GetTotalTicks();
spellId = SPELL_HUNTER_CHIMERA_SHOT_SERPENT;
basePoint = caster->SpellDamageBonusDone(unitTarget, aura->GetSpellInfo(), aurEff->GetAmount(), DOT, aura->GetStackAmount());
ApplyPct(basePoint, TickCount * 40);
basePoint = unitTarget->SpellDamageBonusTaken(caster, aura->GetSpellInfo(), basePoint, DOT, aura->GetStackAmount());
}
// Viper Sting - Instantly restores mana to you equal to 60% of the total amount drained by your Viper Sting.
else if (familyFlag[1] & 0x00000080)
{
int32 TickCount = aura->GetEffect(0)->GetTotalTicks();
spellId = SPELL_HUNTER_CHIMERA_SHOT_VIPER;
// Amount of one aura tick
basePoint = int32(CalculatePct(unitTarget->GetMaxPower(POWER_MANA), aurEff->GetAmount()));
int32 casterBasePoint = aurEff->GetAmount() * unitTarget->GetMaxPower(POWER_MANA) / 50; // TODO: WTF? caster uses unitTarget?
if (basePoint > casterBasePoint)
basePoint = casterBasePoint;
ApplyPct(basePoint, TickCount * 60);
}
// Scorpid Sting - Attempts to Disarm the target for 10 sec. This effect cannot occur more than once per 1 minute.
else if (familyFlag[0] & 0x00008000)
spellId = SPELL_HUNTER_CHIMERA_SHOT_SCORPID;
// ?? nothing say in spell desc (possibly need addition check)
//if (familyFlag & 0x0000010000000000LL || // dot
// familyFlag & 0x0000100000000000LL) // stun
//{
// spellId = 53366; // 53366 Chimera Shot - Wyvern
//}
// Refresh aura duration
aura->RefreshDuration();
}
break;
}
if (spellId)
caster->CastCustomSpell(unitTarget, spellId, &basePoint, 0, 0, true);
if (spellId == SPELL_HUNTER_CHIMERA_SHOT_SCORPID && caster->ToPlayer()) // Scorpid Sting - Add 1 minute cooldown
caster->ToPlayer()->AddSpellCooldown(spellId, 0, uint32(time(NULL) + 60));
}
}
void Register()
{
OnEffectHitTarget += SpellEffectFn(spell_hun_chimera_shot_SpellScript::HandleScriptEffect, EFFECT_0, SPELL_EFFECT_SCRIPT_EFFECT);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_chimera_shot_SpellScript();
}
};
// 781 - Disengage
class spell_hun_disengage : public SpellScriptLoader
{
public:
spell_hun_disengage() : SpellScriptLoader("spell_hun_disengage") { }
class spell_hun_disengage_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_disengage_SpellScript);
SpellCastResult CheckCast()
{
Unit* caster = GetCaster();
if (caster->GetTypeId() == TYPEID_PLAYER && !caster->isInCombat())
return SPELL_FAILED_CANT_DO_THAT_RIGHT_NOW;
return SPELL_CAST_OK;
}
void Register()
{
OnCheckCast += SpellCheckCastFn(spell_hun_disengage_SpellScript::CheckCast);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_disengage_SpellScript();
}
};
// 53412 - Invigoration
class spell_hun_invigoration : public SpellScriptLoader
{
public:
spell_hun_invigoration() : SpellScriptLoader("spell_hun_invigoration") { }
class spell_hun_invigoration_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_invigoration_SpellScript);
bool Validate(SpellInfo const* /*spellInfo*/)
{
if (!sSpellMgr->GetSpellInfo(SPELL_HUNTER_INVIGORATION_TRIGGERED))
return false;
return true;
}
void HandleScriptEffect(SpellEffIndex /*effIndex*/)
{
if (Unit* unitTarget = GetHitUnit())
if (AuraEffect* aurEff = unitTarget->GetDummyAuraEffect(SPELLFAMILY_HUNTER, 3487, 0))
if (roll_chance_i(aurEff->GetAmount()))
unitTarget->CastSpell(unitTarget, SPELL_HUNTER_INVIGORATION_TRIGGERED, true);
}
void Register()
{
OnEffectHitTarget += SpellEffectFn(spell_hun_invigoration_SpellScript::HandleScriptEffect, EFFECT_0, SPELL_EFFECT_SCRIPT_EFFECT);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_invigoration_SpellScript();
}
};
// 53478 - Last Stand Pet
class spell_hun_last_stand_pet : public SpellScriptLoader
{
public:
spell_hun_last_stand_pet() : SpellScriptLoader("spell_hun_last_stand_pet") { }
class spell_hun_last_stand_pet_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_last_stand_pet_SpellScript);
bool Validate(SpellInfo const* /*spellInfo*/)
{
if (!sSpellMgr->GetSpellInfo(SPELL_HUNTER_PET_LAST_STAND_TRIGGERED))
return false;
return true;
}
void HandleDummy(SpellEffIndex /*effIndex*/)
{
Unit* caster = GetCaster();
int32 healthModSpellBasePoints0 = int32(caster->CountPctFromMaxHealth(30));
caster->CastCustomSpell(caster, SPELL_HUNTER_PET_LAST_STAND_TRIGGERED, &healthModSpellBasePoints0, NULL, NULL, true, NULL);
}
void Register()
{
// add dummy effect spell handler to pet's Last Stand
OnEffectHitTarget += SpellEffectFn(spell_hun_last_stand_pet_SpellScript::HandleDummy, EFFECT_0, SPELL_EFFECT_DUMMY);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_last_stand_pet_SpellScript();
}
};
// 53271 - Masters Call
class spell_hun_masters_call : public SpellScriptLoader
{
public:
spell_hun_masters_call() : SpellScriptLoader("spell_hun_masters_call") { }
class spell_hun_masters_call_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_masters_call_SpellScript);
bool Validate(SpellInfo const* spellInfo)
{
if (!sSpellMgr->GetSpellInfo(SPELL_HUNTER_MASTERS_CALL_TRIGGERED) || !sSpellMgr->GetSpellInfo(spellInfo->Effects[EFFECT_0].CalcValue()) || !sSpellMgr->GetSpellInfo(spellInfo->Effects[EFFECT_1].CalcValue()))
return false;
return true;
}
void HandleDummy(SpellEffIndex /*effIndex*/)
{
if (Unit* ally = GetHitUnit())
if (Player* caster = GetCaster()->ToPlayer())
if (Pet* target = caster->GetPet())
{
TriggerCastFlags castMask = TriggerCastFlags(TRIGGERED_FULL_MASK & ~TRIGGERED_IGNORE_CASTER_AURASTATE);
target->CastSpell(ally, GetEffectValue(), castMask);
target->CastSpell(ally, GetSpellInfo()->Effects[EFFECT_0].CalcValue(), castMask);
}
}
void HandleScriptEffect(SpellEffIndex /*effIndex*/)
{
if (Unit* target = GetHitUnit())
{
// Cannot be processed while pet is dead
TriggerCastFlags castMask = TriggerCastFlags(TRIGGERED_FULL_MASK & ~TRIGGERED_IGNORE_CASTER_AURASTATE);
target->CastSpell(target, SPELL_HUNTER_MASTERS_CALL_TRIGGERED, castMask);
}
}
void Register()
{
OnEffectHitTarget += SpellEffectFn(spell_hun_masters_call_SpellScript::HandleDummy, EFFECT_0, SPELL_EFFECT_DUMMY);
OnEffectHitTarget += SpellEffectFn(spell_hun_masters_call_SpellScript::HandleScriptEffect, EFFECT_1, SPELL_EFFECT_SCRIPT_EFFECT);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_masters_call_SpellScript();
}
};
// 34477 - Misdirection
class spell_hun_misdirection : public SpellScriptLoader
{
public:
spell_hun_misdirection() : SpellScriptLoader("spell_hun_misdirection") { }
class spell_hun_misdirection_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_misdirection_AuraScript);
void OnRemove(AuraEffect const* /*aurEff*/, AuraEffectHandleModes /*mode*/)
{
if (Unit* caster = GetCaster())
if (GetTargetApplication()->GetRemoveMode() != AURA_REMOVE_BY_DEFAULT)
caster->SetReducedThreatPercent(0, 0);
}
void Register()
{
AfterEffectRemove += AuraEffectRemoveFn(spell_hun_misdirection_AuraScript::OnRemove, EFFECT_1, SPELL_AURA_DUMMY, AURA_EFFECT_HANDLE_REAL);
}
};
AuraScript* GetAuraScript() const
{
return new spell_hun_misdirection_AuraScript();
}
};
// 35079 - Misdirection proc
class spell_hun_misdirection_proc : public SpellScriptLoader
{
public:
spell_hun_misdirection_proc() : SpellScriptLoader("spell_hun_misdirection_proc") { }
class spell_hun_misdirection_proc_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_misdirection_proc_AuraScript);
void OnRemove(AuraEffect const* /*aurEff*/, AuraEffectHandleModes /*mode*/)
{
if (GetCaster())
GetCaster()->SetReducedThreatPercent(0, 0);
}
void Register()
{
AfterEffectRemove += AuraEffectRemoveFn(spell_hun_misdirection_proc_AuraScript::OnRemove, EFFECT_0, SPELL_AURA_DUMMY, AURA_EFFECT_HANDLE_REAL);
}
};
AuraScript* GetAuraScript() const
{
return new spell_hun_misdirection_proc_AuraScript();
}
};
// 54044 - Pet Carrion Feeder
class spell_hun_pet_carrion_feeder : public SpellScriptLoader
{
public:
spell_hun_pet_carrion_feeder() : SpellScriptLoader("spell_hun_pet_carrion_feeder") { }
class spell_hun_pet_carrion_feeder_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_pet_carrion_feeder_SpellScript);
bool Load()
{
if (!GetCaster()->isPet())
return false;
return true;
}
bool Validate(SpellInfo const* /*spellInfo*/)
{
if (!sSpellMgr->GetSpellInfo(SPELL_HUNTER_PET_CARRION_FEEDER_TRIGGERED))
return false;
return true;
}
SpellCastResult CheckIfCorpseNear()
{
Unit* caster = GetCaster();
float max_range = GetSpellInfo()->GetMaxRange(false);
WorldObject* result = NULL;
// search for nearby enemy corpse in range
Trinity::AnyDeadUnitSpellTargetInRangeCheck check(caster, max_range, GetSpellInfo(), TARGET_CHECK_ENEMY);
Trinity::WorldObjectSearcher<Trinity::AnyDeadUnitSpellTargetInRangeCheck> searcher(caster, result, check);
caster->GetMap()->VisitFirstFound(caster->m_positionX, caster->m_positionY, max_range, searcher);
if (!result)
return SPELL_FAILED_NO_EDIBLE_CORPSES;
return SPELL_CAST_OK;
}
void HandleDummy(SpellEffIndex /*effIndex*/)
{
Unit* caster = GetCaster();
caster->CastSpell(caster, SPELL_HUNTER_PET_CARRION_FEEDER_TRIGGERED, false);
}
void Register()
{
// add dummy effect spell handler to pet's Last Stand
OnEffectHit += SpellEffectFn(spell_hun_pet_carrion_feeder_SpellScript::HandleDummy, EFFECT_0, SPELL_EFFECT_DUMMY);
OnCheckCast += SpellCheckCastFn(spell_hun_pet_carrion_feeder_SpellScript::CheckIfCorpseNear);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_pet_carrion_feeder_SpellScript();
}
};
// 55709 - Pet Heart of the Phoenix
class spell_hun_pet_heart_of_the_phoenix : public SpellScriptLoader
{
public:<|fim▁hole|>
class spell_hun_pet_heart_of_the_phoenix_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_pet_heart_of_the_phoenix_SpellScript);
bool Load()
{
if (!GetCaster()->isPet())
return false;
return true;
}
bool Validate(SpellInfo const* /*spellInfo*/)
{
if (!sSpellMgr->GetSpellInfo(SPELL_HUNTER_PET_HEART_OF_THE_PHOENIX_TRIGGERED) || !sSpellMgr->GetSpellInfo(SPELL_HUNTER_PET_HEART_OF_THE_PHOENIX_DEBUFF))
return false;
return true;
}
void HandleScript(SpellEffIndex /*effIndex*/)
{
Unit* caster = GetCaster();
if (Unit* owner = caster->GetOwner())
if (!caster->HasAura(SPELL_HUNTER_PET_HEART_OF_THE_PHOENIX_DEBUFF))
{
owner->CastCustomSpell(SPELL_HUNTER_PET_HEART_OF_THE_PHOENIX_TRIGGERED, SPELLVALUE_BASE_POINT0, 100, caster, true);
caster->CastSpell(caster, SPELL_HUNTER_PET_HEART_OF_THE_PHOENIX_DEBUFF, true);
}
}
void Register()
{
// add dummy effect spell handler to pet's Last Stand
OnEffectHitTarget += SpellEffectFn(spell_hun_pet_heart_of_the_phoenix_SpellScript::HandleScript, EFFECT_0, SPELL_EFFECT_SCRIPT_EFFECT);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_pet_heart_of_the_phoenix_SpellScript();
}
};
// 23989 - Readiness
class spell_hun_readiness : public SpellScriptLoader
{
public:
spell_hun_readiness() : SpellScriptLoader("spell_hun_readiness") { }
class spell_hun_readiness_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_readiness_SpellScript);
bool Load()
{
return GetCaster()->GetTypeId() == TYPEID_PLAYER;
}
void HandleDummy(SpellEffIndex /*effIndex*/)
{
Player* caster = GetCaster()->ToPlayer();
// immediately finishes the cooldown on your other Hunter abilities except Bestial Wrath
const SpellCooldowns& cm = caster->ToPlayer()->GetSpellCooldownMap();
for (SpellCooldowns::const_iterator itr = cm.begin(); itr != cm.end();)
{
SpellInfo const* spellInfo = sSpellMgr->GetSpellInfo(itr->first);
///! If spellId in cooldown map isn't valid, the above will return a null pointer.
if (spellInfo &&
spellInfo->SpellFamilyName == SPELLFAMILY_HUNTER &&
spellInfo->Id != SPELL_HUNTER_READINESS &&
spellInfo->Id != SPELL_HUNTER_BESTIAL_WRATH &&
spellInfo->Id != SPELL_DRAENEI_GIFT_OF_THE_NAARU &&
spellInfo->GetRecoveryTime() > 0)
caster->RemoveSpellCooldown((itr++)->first, true);
else
++itr;
}
}
void Register()
{
// add dummy effect spell handler to Readiness
OnEffectHitTarget += SpellEffectFn(spell_hun_readiness_SpellScript::HandleDummy, EFFECT_0, SPELL_EFFECT_DUMMY);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_readiness_SpellScript();
}
};
// 37506 - Scatter Shot
class spell_hun_scatter_shot : public SpellScriptLoader
{
public:
spell_hun_scatter_shot() : SpellScriptLoader("spell_hun_scatter_shot") { }
class spell_hun_scatter_shot_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_scatter_shot_SpellScript);
bool Load()
{
return GetCaster()->GetTypeId() == TYPEID_PLAYER;
}
void HandleDummy(SpellEffIndex /*effIndex*/)
{
Player* caster = GetCaster()->ToPlayer();
// break Auto Shot and autohit
caster->InterruptSpell(CURRENT_AUTOREPEAT_SPELL);
caster->AttackStop();
caster->SendAttackSwingCancelAttack();
}
void Register()
{
OnEffectHitTarget += SpellEffectFn(spell_hun_scatter_shot_SpellScript::HandleDummy, EFFECT_0, SPELL_EFFECT_DUMMY);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_scatter_shot_SpellScript();
}
};
// -53302 - Sniper Training
class spell_hun_sniper_training : public SpellScriptLoader
{
public:
spell_hun_sniper_training() : SpellScriptLoader("spell_hun_sniper_training") { }
class spell_hun_sniper_training_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_sniper_training_AuraScript);
bool Validate(SpellInfo const* /*spellInfo*/)
{
if (!sSpellMgr->GetSpellInfo(SPELL_HUNTER_SNIPER_TRAINING_R1) || !sSpellMgr->GetSpellInfo(SPELL_HUNTER_SNIPER_TRAINING_BUFF_R1))
return false;
return true;
}
void HandlePeriodic(AuraEffect const* aurEff)
{
PreventDefaultAction();
if (aurEff->GetAmount() <= 0)
{
Unit* caster = GetCaster();
uint32 spellId = SPELL_HUNTER_SNIPER_TRAINING_BUFF_R1 + GetId() - SPELL_HUNTER_SNIPER_TRAINING_R1;
if (Unit* target = GetTarget())
if (!target->HasAura(spellId))
{
SpellInfo const* triggeredSpellInfo = sSpellMgr->GetSpellInfo(spellId);
Unit* triggerCaster = triggeredSpellInfo->NeedsToBeTriggeredByCaster() ? caster : target;
triggerCaster->CastSpell(target, triggeredSpellInfo, true, 0, aurEff);
}
}
}
void HandleUpdatePeriodic(AuraEffect* aurEff)
{
if (Player* playerTarget = GetUnitOwner()->ToPlayer())
{
int32 baseAmount = aurEff->GetBaseAmount();
int32 amount = playerTarget->isMoving() ?
playerTarget->CalculateSpellDamage(playerTarget, GetSpellInfo(), aurEff->GetEffIndex(), &baseAmount) :
aurEff->GetAmount() - 1;
aurEff->SetAmount(amount);
}
}
void Register()
{
OnEffectPeriodic += AuraEffectPeriodicFn(spell_hun_sniper_training_AuraScript::HandlePeriodic, EFFECT_0, SPELL_AURA_PERIODIC_TRIGGER_SPELL);
OnEffectUpdatePeriodic += AuraEffectUpdatePeriodicFn(spell_hun_sniper_training_AuraScript::HandleUpdatePeriodic, EFFECT_0, SPELL_AURA_PERIODIC_TRIGGER_SPELL);
}
};
AuraScript* GetAuraScript() const
{
return new spell_hun_sniper_training_AuraScript();
}
};
// 1515 - Tame Beast
class spell_hun_tame_beast : public SpellScriptLoader
{
public:
spell_hun_tame_beast() : SpellScriptLoader("spell_hun_tame_beast") { }
class spell_hun_tame_beast_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_tame_beast_SpellScript);
SpellCastResult CheckCast()
{
Unit* caster = GetCaster();
if (caster->GetTypeId() != TYPEID_PLAYER)
return SPELL_FAILED_DONT_REPORT;
if (!GetExplTargetUnit())
return SPELL_FAILED_BAD_IMPLICIT_TARGETS;
if (Creature* target = GetExplTargetUnit()->ToCreature())
{
if (target->getLevel() > caster->getLevel())
return SPELL_FAILED_HIGHLEVEL;
// use SMSG_PET_TAME_FAILURE?
if (!target->GetCreatureTemplate()->isTameable(caster->ToPlayer()->CanTameExoticPets()))
return SPELL_FAILED_BAD_TARGETS;
if (caster->GetPetGUID())
return SPELL_FAILED_ALREADY_HAVE_SUMMON;
if (caster->GetCharmGUID())
return SPELL_FAILED_ALREADY_HAVE_CHARM;
}
else
return SPELL_FAILED_BAD_IMPLICIT_TARGETS;
return SPELL_CAST_OK;
}
void Register()
{
OnCheckCast += SpellCheckCastFn(spell_hun_tame_beast_SpellScript::CheckCast);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_tame_beast_SpellScript();
}
};
// -24604 - Furious Howl
// 53434 - Call of the Wild
class spell_hun_target_only_pet_and_owner : public SpellScriptLoader
{
public:
spell_hun_target_only_pet_and_owner() : SpellScriptLoader("spell_hun_target_only_pet_and_owner") { }
class spell_hun_target_only_pet_and_owner_SpellScript : public SpellScript
{
PrepareSpellScript(spell_hun_target_only_pet_and_owner_SpellScript);
void FilterTargets(std::list<WorldObject*>& targets)
{
targets.clear();
targets.push_back(GetCaster());
if (Unit* owner = GetCaster()->GetOwner())
targets.push_back(owner);
}
void Register()
{
OnObjectAreaTargetSelect += SpellObjectAreaTargetSelectFn(spell_hun_target_only_pet_and_owner_SpellScript::FilterTargets, EFFECT_0, TARGET_UNIT_CASTER_AREA_PARTY);
OnObjectAreaTargetSelect += SpellObjectAreaTargetSelectFn(spell_hun_target_only_pet_and_owner_SpellScript::FilterTargets, EFFECT_1, TARGET_UNIT_CASTER_AREA_PARTY);
}
};
SpellScript* GetSpellScript() const
{
return new spell_hun_target_only_pet_and_owner_SpellScript();
}
};
void AddSC_hunter_spell_scripts()
{
new spell_hun_aspect_of_the_beast();
new spell_hun_chimera_shot();
new spell_hun_disengage();
new spell_hun_invigoration();
new spell_hun_last_stand_pet();
new spell_hun_masters_call();
new spell_hun_misdirection();
new spell_hun_misdirection_proc();
new spell_hun_pet_carrion_feeder();
new spell_hun_pet_heart_of_the_phoenix();
new spell_hun_readiness();
new spell_hun_scatter_shot();
new spell_hun_sniper_training();
new spell_hun_tame_beast();
new spell_hun_target_only_pet_and_owner();
}<|fim▁end|>
|
spell_hun_pet_heart_of_the_phoenix() : SpellScriptLoader("spell_hun_pet_heart_of_the_phoenix") { }
|
<|file_name|>shar.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2012-2015 Bastian Kleineidam
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Archive commands for the shar program."""
from .. import util
<|fim▁hole|> """Create a SHAR archive."""
cmdlist = [util.shell_quote(cmd)]
cmdlist.extend([util.shell_quote(x) for x in filenames])
cmdlist.extend(['>', util.shell_quote(archive)])
return (cmdlist, {'shell': True})<|fim▁end|>
|
def create_shar (archive, compression, cmd, verbosity, interactive, filenames):
|
<|file_name|>LCM.py<|end_file_name|><|fim▁begin|>#Program to find the LCM of two numbers
#Function to find GCD<|fim▁hole|>def gcd(num1, num2):
if num1 == num2:
return num1
if num1 > num2:
return gcd(num1-num2, num2)
return gcd(num1, num2-num1)
#Function to find LCM
def lcm(num1, num2):
return (num1*num2) // gcd(num1, num2)
#Driver function for testing above
def test():
num1, num2 = 12, 4
print('LCM of {} and {} is {}'.format(num1, num2, lcm(num1, num2)))<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var curl = require('./lib/curl');<|fim▁hole|> clean(content, {
blockSize: 3
});
});<|fim▁end|>
|
var clean = require('./lib/extract');
curl('http://blog.rainy.im/2015/09/02/web-content-and-main-image-extractor/', function (content) {
console.log('fetch done');
|
<|file_name|>engine.property.spec.ts<|end_file_name|><|fim▁begin|>import { expect } from "chai"
import { ContextItem, Engine } from "./engine"
describe("engine/property", () => {
let el: HTMLDivElement
beforeEach(() => {
if (el) {
el.parentNode?.removeChild(el)
}
el = document.body.appendChild(document.createElement("div"))
})
it("should set property", () => {
el.innerHTML = ""
Engine.update(el, (engine) => {
engine.voidElement("input", {
properties: [
["value", "1"],
["type", "number"],
],
})
})
const input = el.firstElementChild as HTMLInputElement & ContextItem<|fim▁hole|> expect(input.valueAsNumber).to.be.eq(1)
expect(input.__ceb_engine_updated_properties).to.include.members(["value", "type"])
})
it("should unset property", () => {
el.innerHTML = ""
Engine.update(el, (engine) => {
engine.openElement("p", {
properties: [["keyA", "valueA"]],
})
engine.closeElement()
})
const p = el.firstElementChild as HTMLParagraphElement & ContextItem
expect(p).to.have.property("keyA").eq("valueA")
expect(p.__ceb_engine_updated_properties).to.include.members(["keyA"])
Engine.update(el, (engine) => {
engine.openElement("p")
engine.closeElement()
})
expect(p).to.have.property("keyA").eq(undefined)
expect(p.__ceb_engine_updated_properties).to.have.lengthOf(0)
})
})<|fim▁end|>
|
expect(input.getAttribute("type")).to.be.eq("number")
expect(input.value).to.be.eq("1")
|
<|file_name|>layer.py<|end_file_name|><|fim▁begin|># --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
# --------------------------------------------------------
# R*CNN
# Written by Georgia Gkioxari, 2015.
# See LICENSE in the project root for license information.
# --------------------------------------------------------
"""The data layer used during training to train a R*CNN network.
AttributesDataLayer implements a Caffe Python layer.
"""
import caffe
from fast_rcnn.config import cfg
from attr_data_layer.minibatch import get_minibatch
import numpy as np
import yaml
from multiprocessing import Process, Queue
# import pdb
class AttributesDataLayer(caffe.Layer):
"""R*CNN data layer used during training for attributes."""
def _shuffle_roidb_inds(self):
"""Randomly permute the training roidb."""
self._perm = np.random.permutation(np.arange(len(self._roidb)))
self._cur = 0
def _get_next_minibatch_inds(self):
"""Return the roidb indices for the next minibatch."""
if self._cur + cfg.TRAIN.IMS_PER_BATCH >= len(self._roidb):
self._shuffle_roidb_inds()
db_inds = self._perm[self._cur:self._cur + cfg.TRAIN.IMS_PER_BATCH]
self._cur += cfg.TRAIN.IMS_PER_BATCH
return db_inds
def _get_next_minibatch(self):
"""Return the blobs to be used for the next minibatch.
<|fim▁hole|> If cfg.TRAIN.USE_PREFETCH is True, then blobs will be computed in a
separate process and made available through self._blob_queue.
"""
if cfg.TRAIN.USE_PREFETCH:
return self._blob_queue.get()
else:
db_inds = self._get_next_minibatch_inds()
minibatch_db = [self._roidb[i] for i in db_inds]
return get_minibatch(minibatch_db, self._num_classes)
def set_roidb(self, roidb):
"""Set the roidb to be used by this layer during training."""
self._roidb = roidb
self._shuffle_roidb_inds()
if cfg.TRAIN.USE_PREFETCH:
self._blob_queue = Queue(10)
self._prefetch_process = BlobFetcher(self._blob_queue,
self._roidb,
self._num_classes)
self._prefetch_process.start()
# Terminate the child process when the parent exists
def cleanup():
print 'Terminating BlobFetcher'
self._prefetch_process.terminate()
self._prefetch_process.join()
import atexit
atexit.register(cleanup)
def setup(self, bottom, top):
"""Setup the RoIDataLayer."""
# parse the layer parameter string, which must be valid YAML
layer_params = yaml.load(self.param_str_)
self._num_classes = layer_params['num_classes']
self._name_to_top_map = {
'data': 0,
'rois': 1,
'labels': 2}
# data blob: holds a batch of N images, each with 3 channels
# The height and width (100 x 100) are dummy values
top[0].reshape(1, 3, 100, 100)
# rois blob: holds R regions of interest, each is a 5-tuple
# (n, x1, y1, x2, y2) specifying an image batch index n and a
# rectangle (x1, y1, x2, y2)
top[1].reshape(1, 5)
# labels blob: holds labels for each attribute
top[2].reshape(1, self._num_classes)
def forward(self, bottom, top):
"""Get blobs and copy them into this layer's top blob vector."""
blobs = self._get_next_minibatch()
for blob_name, blob in blobs.iteritems():
top_ind = self._name_to_top_map[blob_name]
# Reshape net's input blobs
top[top_ind].reshape(*(blob.shape))
# Copy data into net's input blobs
top[top_ind].data[...] = blob.astype(np.float32, copy=False)
def backward(self, top, propagate_down, bottom):
"""This layer does not propagate gradients."""
pass
def reshape(self, bottom, top):
"""Reshaping happens during the call to forward."""
pass
class BlobFetcher(Process):
"""Experimental class for prefetching blobs in a separate process."""
def __init__(self, queue, roidb, num_classes):
super(BlobFetcher, self).__init__()
self._queue = queue
self._roidb = roidb
self._num_classes = num_classes
self._perm = None
self._cur = 0
self._shuffle_roidb_inds()
# fix the random seed for reproducibility
np.random.seed(cfg.RNG_SEED)
def _shuffle_roidb_inds(self):
"""Randomly permute the training roidb."""
# TODO(rbg): remove duplicated code
self._perm = np.random.permutation(np.arange(len(self._roidb)))
self._cur = 0
def _get_next_minibatch_inds(self):
"""Return the roidb indices for the next minibatch."""
# TODO(rbg): remove duplicated code
if self._cur + cfg.TRAIN.IMS_PER_BATCH >= len(self._roidb):
self._shuffle_roidb_inds()
db_inds = self._perm[self._cur:self._cur + cfg.TRAIN.IMS_PER_BATCH]
self._cur += cfg.TRAIN.IMS_PER_BATCH
return db_inds
def run(self):
print 'BlobFetcher started'
while True:
db_inds = self._get_next_minibatch_inds()
minibatch_db = [self._roidb[i] for i in db_inds]
blobs = get_minibatch(minibatch_db, self._num_classes)
self._queue.put(blobs)<|fim▁end|>
| |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import csv
from datetime import datetime
import httplib
from itertools import count
import logging
import smtplib
from xml.dom.minidom import Document
from django.conf import settings
from django.core.exceptions import FieldError
from django.core.mail import EmailMultiAlternatives
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.core.urlresolvers import reverse, NoReverseMatch
from django.db.models import Q
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext as _rc
from django.utils.html import escape
from .forms import forms_for_survey
from .models import (
Answer,
BALLOT_STUFFING_FIELDS,
FORMAT_CHOICES,
OPTION_TYPE_CHOICES,
Question,
SURVEY_DISPLAY_TYPE_CHOICES,
Submission,
Survey,
SurveyReport,
SurveyReportDisplay,
extra_from_filters,
get_all_answers,
get_filters)
from .jsonutils import dump, dumps, datetime_to_string
from .util import ChoiceEnum, get_function
from . import settings as crowdsourcing_settings
def _user_entered_survey(request, survey):
if not request.user.is_authenticated():
return False
return bool(survey.submissions_for(
request.user,
request.session.session_key.lower()).count())
def _entered_no_more_allowed(request, survey):
""" The user entered the survey and the survey allows only one entry. """
return all((
not survey.allow_multiple_submissions,
_user_entered_survey(request, survey),))
def _get_remote_ip(request):
forwarded=request.META.get('HTTP_X_FORWARDED_FOR')
if forwarded:
return forwarded.split(',')[-1].strip()
return request.META['REMOTE_ADDR']
def _login_url(request):
if crowdsourcing_settings.LOGIN_VIEW:
start_with = reverse(crowdsourcing_settings.LOGIN_VIEW) + '?next=%s'
return start_with % request.path
return "/?login_required=true"
def _get_survey_or_404(slug, request=None):
manager = Survey.live
if request and request.user.is_staff:
manager = Survey.objects
return get_object_or_404(manager, slug=slug)
def _survey_submit(request, survey):
if survey.require_login and request.user.is_anonymous():
# again, the form should only be shown after the user is logged in, but
# to be safe...
return HttpResponseRedirect(_login_url(request))
if not hasattr(request, 'session'):
return HttpResponse("Cookies must be enabled to use this application.",
status=httplib.FORBIDDEN)
if (_entered_no_more_allowed(request, survey)):
slug_template = 'crowdsourcing/%s_already_submitted.html' % survey.slug
return render_to_response([slug_template,
'crowdsourcing/already_submitted.html'],
dict(survey=survey),
_rc(request))
forms = forms_for_survey(survey, request)
if _submit_valid_forms(forms, request, survey):
if survey.can_have_public_submissions():
return _survey_results_redirect(request, survey, thanks=True)
return _survey_show_form(request, survey, ())
else:
return _survey_show_form(request, survey, forms)
def _submit_valid_forms(forms, request, survey):
if not all(form.is_valid() for form in forms):
return False
submission_form = forms[0]
submission = submission_form.save(commit=False)
submission.survey = survey
submission.ip_address = _get_remote_ip(request)
submission.is_public = not survey.moderate_submissions
if request.user.is_authenticated():
submission.user = request.user
submission.save()
for form in forms[1:]:
answer = form.save(commit=False)
if isinstance(answer, (list, tuple)):
for a in answer:
a.submission = submission
a.save()
elif answer:
answer.submission = submission
answer.save()
if survey.email:
_send_survey_email(request, survey, submission)
return True
def _url_for_edit(request, obj):
view_args = (obj._meta.app_label, obj._meta.module_name,)
try:
edit_url = reverse("admin:%s_%s_change" % view_args, args=(obj.id,))
except NoReverseMatch:
# Probably 'admin' is not a registered namespace on a site without an
# admin. Just fake it.
edit_url = "/admin/%s/%s/%d/" % (view_args + (obj.id,))
admin_url = crowdsourcing_settings.SURVEY_ADMIN_SITE
if not admin_url:
admin_url = "http://" + request.META["HTTP_HOST"]
elif len(admin_url) < 4 or admin_url[:4].lower() != "http":
admin_url = "http://" + admin_url
return admin_url + edit_url
def _send_survey_email(request, survey, submission):
subject = survey.title
sender = crowdsourcing_settings.SURVEY_EMAIL_FROM
links = [(_url_for_edit(request, submission), "Edit Submission"),
(_url_for_edit(request, survey), "Edit Survey"),]
if survey.can_have_public_submissions():
u = "http://" + request.META["HTTP_HOST"] + _survey_report_url(survey)
links.append((u, "View Survey",))
parts = ["<a href=\"%s\">%s</a>" % link for link in links]
set = submission.answer_set.all()
lines = ["%s: %s" % (a.question.label, escape(a.value),) for a in set]
parts.extend(lines)
html_email = "<br/>\n".join(parts)
recipients = [a.strip() for a in survey.email.split(",")]
email_msg = EmailMultiAlternatives(subject,
html_email,
sender,
recipients)
email_msg.attach_alternative(html_email, 'text/html')
try:
email_msg.send()
except smtplib.SMTPException as ex:
logging.exception("SMTP error sending email: %s" % str(ex))
except Exception as ex:
logging.exception("Unexpected error sending email: %s" % str(ex))
def _survey_show_form(request, survey, forms):
specific_template = 'crowdsourcing/%s_survey_detail.html' % survey.slug
entered = _user_entered_survey(request, survey)
return render_to_response([specific_template,
'crowdsourcing/survey_detail.html'],
dict(survey=survey,
forms=forms,
entered=entered,
login_url=_login_url(request),
request=request),
_rc(request))
def _can_show_form(request, survey):
authenticated = request.user.is_authenticated()
return all((
survey.is_open,
authenticated or not survey.require_login,
not _entered_no_more_allowed(request, survey)))
def survey_detail(request, slug):
""" When you load the survey, this view decides what to do. It displays
the form, redirects to the results page, displays messages, or whatever
makes sense based on the survey, the user, and the user's entries. """
survey = _get_survey_or_404(slug, request)
if not survey.is_open and survey.can_have_public_submissions():
return _survey_results_redirect(request, survey)
need_login = (survey.is_open
and survey.require_login
and not request.user.is_authenticated())
if _can_show_form(request, survey):
if request.method == 'POST':
return _survey_submit(request, survey)
forms = forms_for_survey(survey, request)
elif need_login:
forms = ()
elif survey.can_have_public_submissions():
return _survey_results_redirect(request, survey)
else: # Survey is closed with private results.
forms = ()
return _survey_show_form(request, survey, forms)
def embeded_survey_questions(request, slug):
survey = _get_survey_or_404(slug, request)
templates = ['crowdsourcing/embeded_survey_questions_%s.html' % slug,
'crowdsourcing/embeded_survey_questions.html']
forms = ()
if _can_show_form(request, survey):
forms = forms_for_survey(survey, request)
if request.method == 'POST':
if _submit_valid_forms(forms, request, survey):
forms = ()
return render_to_response(templates, dict(
entered=_user_entered_survey(request, survey),
request=request,
forms=forms,
survey=survey,
login_url=_login_url(request)), _rc(request))
def _survey_results_redirect(request, survey, thanks=False):
response = HttpResponseRedirect(_survey_report_url(survey))
if thanks:
request.session['survey_thanks_%s' % survey.slug] = '1'
return response
def _survey_report_url(survey):
return reverse('survey_default_report_page_1',
kwargs={'slug': survey.slug})
def allowed_actions(request, slug):
survey = _get_survey_or_404(slug, request)
authenticated = request.user.is_authenticated()
response = HttpResponse(mimetype='application/json')
dump({"enter": _can_show_form(request, survey),
"view": survey.can_have_public_submissions(),
"open": survey.is_open,
"need_login": survey.require_login and not authenticated}, response)
return response
def questions(request, slug):
response = HttpResponse(mimetype='application/json')
dump(_get_survey_or_404(slug, request).to_jsondata(), response)
return response
def submissions(request, format):
""" Use this view to make arbitrary queries on submissions. If the user is
a logged in staff member, ignore submission.is_public,
question.answer_is_public, and survey.can_have_public_submissions. Use the
query string to pass keys and values. For example,
/crowdsourcing/submissions/?survey=my-survey will return all submissions
for the survey with slug my-survey.
survey - the slug for the survey
user - the username of the submittor. Leave blank for submissions without
a logged in user.
submitted_from and submitted_to - strings in the format YYYY-mm-ddThh:mm:ss
For example, 2010-04-05T13:02:03
featured - A blank value, 'f', 'false', 0, 'n', and 'no' all mean ignore
the featured flag. Everything else means display only featured.
You can also use filters in the survey report sense. Rather than document
exactly what parameters you would pass, follow these steps to figure it
out:
1. Enable filters on your survey and the questions you want to filter on.
2. Go to the report page and fill out the filters you want.
3. Click Submit.
4. Examine the query string of the page you end up on and note which
parameters are filled out. Use those same parameters here. """
format = format.lower()
if format not in FORMAT_CHOICES:
msg = ("%s is an unrecognized format. Crowdsourcing recognizes "
"these: %s") % (format, ", ".join(FORMAT_CHOICES))
return HttpResponse(msg)
is_staff = request.user.is_authenticated() and request.user.is_staff
if is_staff:
results = Submission.objects.all()
else:
# survey.can_have_public_submissions is complicated enough that
# we'll check it in Python, not the database.
results = Submission.objects.filter(is_public=True)
results = results.select_related("survey", "user")
get = request.GET.copy()
limit = int(get.pop("limit", [0])[0])
keys = get.keys()
basic_filters = (
'survey',
'user',
'submitted_from',
'submitted_to',
'featured',
'is_public')
if is_staff:
basic_filters += BALLOT_STUFFING_FIELDS
survey_slug = ""
for field in [f for f in keys if f in basic_filters]:
value = get[field]
search_field = field
if 'survey' == field:
search_field = 'survey__slug'
survey_slug = value
elif 'user' == field:
if '' == value:
value = None
else:
search_field = 'user__username'
elif field in ('submitted_from', 'submitted_to'):
date_format = "%Y-%m-%dT%H:%M:%S"
try:
value = datetime.strptime(value, date_format)
except ValueError:
return HttpResponse(
("Invalid %s format. Try, for example, "
"%s") % (field, datetime.now().strftime(date_format),))
if 'submitted_from' == field:
search_field = 'submitted_at__gte'
else:
search_field = 'submitted_at__lte'
elif field in('featured', 'is_public',):
falses = ('f', 'false', 'no', 'n', '0',)
value = len(value) and not value.lower() in falses
# search_field is unicode but needs to be ascii.
results = results.filter(**{str(search_field): value})
get.pop(field)
def get_survey():
survey = Survey.objects.get(slug=survey_slug)
get_survey = lambda: survey
return survey
if get:
if survey_slug:
results = extra_from_filters(
results,
"crowdsourcing_submission.id",
get_survey(),
get)
else:
message = (
"You've got a couple of extra filters here, and we "
"aren't sure what to do with them. You may have just "
"misspelled one of the basic filters (%s). You may have a "
"filter from a particular survey in mind. In that case, just "
"include survey=my-survey-slug in the query string. You may "
"also be trying to pull some hotshot move like, \"Get me all "
"submissions that belong to a survey with a filter named '%s' "
"that match '%s'.\" Crowdsourcing could support this, but it "
"would be pretty inefficient and, we're guessing, pretty "
"rare. If that's what you're trying to do I'm afraid you'll "
"have to do something more complicated like iterating through "
"all your surveys.")
item = get.items()[0]
message = message % (", ".join(basic_filters), item[0], item[1])
return HttpResponse(message)
if not is_staff:
if survey_slug:
if not get_survey().can_have_public_submissions():
results = []
else:
rs = [r for r in results if r.survey.can_have_public_submissions()]
results = rs
if limit:
results = results[:limit]
answer_lookup = get_all_answers(results,
include_private_questions=is_staff)
result_data = []
for r in results:
data = r.to_jsondata(answer_lookup, include_private_questions=is_staff)
result_data.append(data)
for data in result_data:
data.update(data["data"])
data.pop("data")
def get_keys():
key_lookup = {}
for data in result_data:
for key in data.keys():
key_lookup[key] = True
return sorted(key_lookup.keys())
if format == 'json':
response = HttpResponse(mimetype='application/json')
dump(result_data, response)
elif format == 'csv':
response = HttpResponse(mimetype='text/csv')
writer = csv.writer(response)
keys = get_keys()
writer.writerow(keys)
for data in result_data:
row = []
for k in keys:
row.append((u"%s" % _encode(data.get(k, ""))).encode("utf-8"))
writer.writerow(row)
elif format == 'xml':
doc = Document()
submissions = doc.createElement("submissions")
doc.appendChild(submissions)
for data in result_data:
submission = doc.createElement("submission")
submissions.appendChild(submission)
for key, value in data.items():
if value:
cell = doc.createElement(key)
submission.appendChild(cell)
cell.appendChild(doc.createTextNode(u"%s" % value))
response = HttpResponse(doc.toxml(), mimetype='text/xml')
elif format == 'html': # mostly for debugging.
keys = get_keys()
results = [
"<html><body><table>",
"<tr>%s</tr>" % "".join(["<th>%s</th>" % k for k in keys])]
for data in result_data:
cell = "<td>%s</td>"
cells = [cell % _encode(data.get(key, "")) for key in keys]
results.append("<tr>%s</tr>" % "".join(cells))
results.append("</table></body></html>")
response = HttpResponse("\n".join(results))
else:
return HttpResponse("Unsure how to handle %s format" % format)
return response
def _encode(possible):
if possible is True:
return 1
elif possible is False:
return 0
return datetime_to_string(possible) or possible
def submission(request, id):
template = 'crowdsourcing/submission.html'<|fim▁hole|> return render_to_response(template, dict(submission=sub), _rc(request))
def _default_report(survey):
field_count = count(1)
OTC = OPTION_TYPE_CHOICES
pie_choices = (
OTC.BOOL,
OTC.SELECT,
OTC.CHOICE,
OTC.NUMERIC_SELECT,
OTC.NUMERIC_CHOICE,
OTC.BOOL_LIST,)
all_choices = pie_choices + (OTC.LOCATION, OTC.PHOTO)
public_fields = survey.get_public_fields()
fields = [f for f in public_fields if f.option_type in all_choices]
report = SurveyReport(
survey=survey,
title=survey.title,
summary=survey.description or survey.tease)
displays = []
for field in fields:
if field.option_type in pie_choices:
type = SURVEY_DISPLAY_TYPE_CHOICES.PIE
elif field.option_type == OTC.LOCATION:
type = SURVEY_DISPLAY_TYPE_CHOICES.MAP
elif field.option_type == OTC.PHOTO:
type = SURVEY_DISPLAY_TYPE_CHOICES.SLIDESHOW
displays.append(SurveyReportDisplay(
report=report,
display_type=type,
fieldnames=field.fieldname,
annotation=field.label,
order=field_count.next()))
report.survey_report_displays = displays
return report
def survey_report(request, slug, report='', page=None):
templates = ['crowdsourcing/survey_report_%s.html' % slug,
'crowdsourcing/survey_report.html']
return _survey_report(request, slug, report, page, templates)
def embeded_survey_report(request, slug, report=''):
templates = ['crowdsourcing/embeded_survey_report_%s.html' % slug,
'crowdsourcing/embeded_survey_report.html']
return _survey_report(request, slug, report, None, templates)
def _survey_report(request, slug, report, page, templates):
""" Show a report for the survey. As rating is done in a separate
application we don't directly check request.GET["sort"] here.
crowdsourcing_settings.PRE_REPORT is the place for that. """
if page is None:
page = 1
else:
try:
page = int(page)
except ValueError:
raise Http404
survey = _get_survey_or_404(slug, request)
# is the survey anything we can actually have a report on?
is_public = survey.is_live and survey.can_have_public_submissions()
if not is_public and not request.user.is_staff:
raise Http404
reports = survey.surveyreport_set.all()
if report:
report_obj = get_object_or_404(reports, slug=report)
elif survey.default_report:
args = {"slug": survey.slug, "report": survey.default_report.slug}
return HttpResponseRedirect(reverse("survey_report_page_1",
kwargs=args))
else:
report_obj = _default_report(survey)
archive_fields = list(survey.get_public_archive_fields())
is_staff = request.user.is_staff
if is_staff:
submissions = survey.submission_set.all()
fields = list(survey.get_fields())
else:
submissions = survey.public_submissions()
fields = list(survey.get_public_fields())
filters = get_filters(survey, request.GET)
id_field = "crowdsourcing_submission.id"
if not report_obj.display_individual_results:
submissions = submissions.none()
else:
submissions = extra_from_filters(submissions,
id_field,
survey,
request.GET)
# If you want to sort based on rating, wire it up here.
if crowdsourcing_settings.PRE_REPORT:
pre_report = get_function(crowdsourcing_settings.PRE_REPORT)
submissions = pre_report(
submissions=submissions,
report=report_obj,
request=request)
if report_obj.featured:
submissions = submissions.filter(featured=True)
if report_obj.limit_results_to:
submissions = submissions[:report_obj.limit_results_to]
paginator, page_obj = paginate_or_404(submissions, page)
page_answers = get_all_answers(
page_obj.object_list,
include_private_questions=is_staff)
pages_to_link = pages_to_link_from_paginator(page, paginator)
display_individual_results = all([
report_obj.display_individual_results,
archive_fields or (is_staff and fields)])
context = dict(
survey=survey,
submissions=submissions,
paginator=paginator,
page_obj=page_obj,
pages_to_link=pages_to_link,
fields=fields,
archive_fields=archive_fields,
filters=filters,
report=report_obj,
page_answers=page_answers,
is_public=is_public,
display_individual_results=display_individual_results,
request=request)
return render_to_response(templates, context, _rc(request))
def pages_to_link_from_paginator(page, paginator):
""" Return an array with numbers where you should link to a page, and False
where you should show elipses. For example, if you have 9 pages and you are
on page 9, return [1, False, 5, 6, 7, 8, 9]. """
pages = []
for i in range(page - 4, page + 5):
if 1 <= i <= paginator.num_pages:
pages.append(i)
if pages[0] > 1:
pages = [1, False] + pages
if pages[-1] < paginator.num_pages:
pages = pages + [False, paginator.num_pages]
DISCARD = -999
for i in range(1, len(pages) - 1):
if pages[i - 1] + 2 == pages[i + 1]:
# Turn [1, False, 3... into [1, 2, 3
pages[i] = (pages[i - 1] + pages[i + 1]) / 2
elif pages[i - 1] + 1 == pages[i + 1]:
# Turn [1, False, 2... into [1, DISCARD, 2...
pages[i] = DISCARD
return [p for p in pages if p != DISCARD]
def paginate_or_404(queryset, page, num_per_page=20):
"""
paginate a queryset (or other iterator) for the given page, returning the
paginator and page object. Raises a 404 for an invalid page.
"""
if page is None:
page = 1
paginator = Paginator(queryset, num_per_page)
try:
page_obj = paginator.page(page)
except EmptyPage, InvalidPage:
raise Http404
return paginator, page_obj
def location_question_results(
request,
question_id,
limit_map_answers,
survey_report_slug=""):
question = get_object_or_404(Question.objects.select_related("survey"),
pk=question_id,
answer_is_public=True)
is_staff = request.user.is_staff
if not question.survey.can_have_public_submissions() and not is_staff:
raise Http404
featured = limit_results_to = False
if survey_report_slug:
survey_report = get_object_or_404(SurveyReport.objects,
survey=question.survey,
slug=survey_report_slug)
featured = survey_report.featured
limit_results_to = survey_report.limit_results_to
icon_lookup = {}
icon_questions = question.survey.icon_questions()
for icon_question in icon_questions:
icon_by_answer = {}
for (option, icon) in icon_question.parsed_option_icon_pairs():
if icon:
icon_by_answer[option] = icon
answer_set = icon_question.answer_set.all()
for answer in answer_set.select_related("question"):
if answer.value in icon_by_answer:
icon = icon_by_answer[answer.value]
icon_lookup[answer.submission_id] = icon
answers = question.answer_set.filter(
~Q(latitude=None),
~Q(longitude=None)).order_by("-submission__submitted_at")
if not is_staff:
answers = answers.filter(submission__is_public=True)
if featured:
answers = answers.filter(submission__featured=True)
answers = extra_from_filters(
answers,
"submission_id",
question.survey,
request.GET)
limit_map_answers = int(limit_map_answers) if limit_map_answers else 0
if limit_map_answers or limit_results_to:
answers = answers[:min(filter(None, [limit_map_answers,
limit_results_to,]))]
entries = []
view = "crowdsourcing.views.submission_for_map"
for answer in answers:
kwargs = {"id": answer.submission_id}
d = {
"lat": answer.latitude,
"lng": answer.longitude,
"url": reverse(view, kwargs=kwargs)}
if answer.submission_id in icon_lookup:
d["icon"] = icon_lookup[answer.submission_id]
entries.append(d)
response = HttpResponse(mimetype='application/json')
dump({"entries": entries}, response)
return response
def location_question_map(
request,
question_id,
display_id,
survey_report_slug=""):
question = Question.objects.get(pk=question_id)
if not question.answer_is_public and not request.user.is_staff:
raise Http404
report = None
limit = 0
if survey_report_slug:
report = SurveyReport.objects.get(slug=survey_report_slug,
survey=question.survey)
limit = report.limit_results_to
else:
report = _default_report(question.survey)
# This cast is not for validation since the urls file already guaranteed
# it would be a nonempty string of digits. It's simply because display_id
# is a string.
if int(display_id):
display = SurveyReportDisplay.objects.get(pk=display_id)
else:
for d in report.survey_report_displays:
if question.pk in [q.pk for q in d.questions()]:
display = d
display.limit_map_answers = limit
return render_to_response('crowdsourcing/location_question_map.html', dict(
display=display,
question=question,
report=report))
def submission_for_map(request, id):
template = 'crowdsourcing/submission_for_map.html'
if request.user.is_staff:
sub = get_object_or_404(Submission.objects, pk=id)
else:
sub = get_object_or_404(Submission.objects, is_public=True, pk=id)
return render_to_response(template, dict(submission=sub), _rc(request))<|fim▁end|>
|
sub = get_object_or_404(Submission.objects, is_public=True, pk=id)
|
<|file_name|>_keyset_reader_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tink.python.tink._keyset_reader."""
from typing import cast
from absl.testing import absltest
from tink.proto import tink_pb2
import tink
from tink import core
class JsonKeysetReaderTest(absltest.TestCase):
def test_read(self):
json_keyset = """
{
"primaryKeyId": 42,
"key": [
{
"keyData": {
"typeUrl": "type.googleapis.com/google.crypto.tink.AesGcmKey",
"keyMaterialType": "SYMMETRIC",
"value": "GhCS/1+ejWpx68NfGt6ziYHd"
},
"outputPrefixType": "TINK",
"keyId": 42,
"status": "ENABLED"
}
]
}"""
reader = tink.JsonKeysetReader(json_keyset)
keyset = reader.read()
self.assertEqual(keyset.primary_key_id, 42)
self.assertLen(keyset.key, 1)
def test_read_invalid(self):
reader = tink.JsonKeysetReader('not json')
with self.assertRaises(core.TinkError):
reader.read()
def test_read_encrypted(self):
# encryptedKeyset is a base64-encoding of 'some ciphertext with keyset'
json_encrypted_keyset = """
{
"encryptedKeyset": "c29tZSBjaXBoZXJ0ZXh0IHdpdGgga2V5c2V0",
"keysetInfo": {
"primaryKeyId": 42,
"keyInfo": [
{
"typeUrl": "type.googleapis.com/google.crypto.tink.AesGcmKey",
"outputPrefixType": "TINK",
"keyId": 42,
"status": "ENABLED"
}
]
}
}"""
reader = tink.JsonKeysetReader(json_encrypted_keyset)
enc_keyset = reader.read_encrypted()
self.assertEqual(enc_keyset.encrypted_keyset,
b'some ciphertext with keyset')
self.assertLen(enc_keyset.keyset_info.key_info, 1)
self.assertEqual(enc_keyset.keyset_info.key_info[0].type_url,
'type.googleapis.com/google.crypto.tink.AesGcmKey')
def test_read_encrypted_invalid(self):
reader = tink.JsonKeysetReader('not json')
with self.assertRaises(core.TinkError):
reader.read_encrypted()
class BinaryKeysetReaderTest(absltest.TestCase):
def test_read(self):
keyset = tink_pb2.Keyset()
keyset.primary_key_id = 42
key = keyset.key.add()
key.key_data.type_url = 'type.googleapis.com/google.crypto.tink.AesGcmKey'
key.key_data.key_material_type = tink_pb2.KeyData.SYMMETRIC
key.key_data.value = b'GhCS/1+ejWpx68NfGt6ziYHd'
key.output_prefix_type = tink_pb2.TINK
key.key_id = 42
key.status = tink_pb2.ENABLED
reader = tink.BinaryKeysetReader(keyset.SerializeToString())
self.assertEqual(keyset, reader.read())
def test_read_none(self):
with self.assertRaises(core.TinkError):
reader = tink.BinaryKeysetReader(cast(bytes, None))
reader.read()
def test_read_empty(self):
with self.assertRaises(core.TinkError):
reader = tink.BinaryKeysetReader(b'')
reader.read()
def test_read_invalid(self):
with self.assertRaises(core.TinkError):
reader = tink.BinaryKeysetReader(b'some weird data')
reader.read()
def test_read_encrypted(self):
encrypted_keyset = tink_pb2.EncryptedKeyset()
encrypted_keyset.encrypted_keyset = b'c29tZSBjaXBoZXJ0ZXh0IHdpdGgga2V5c2V0'
encrypted_keyset.keyset_info.primary_key_id = 42
key_info = encrypted_keyset.keyset_info.key_info.add()
key_info.type_url = 'type.googleapis.com/google.crypto.tink.AesGcmKey'
key_info.output_prefix_type = tink_pb2.TINK
key_info.key_id = 42
key_info.status = tink_pb2.ENABLED
reader = tink.BinaryKeysetReader(
encrypted_keyset.SerializeToString())
self.assertEqual(encrypted_keyset, reader.read_encrypted())
def test_read_encrypted_none(self):
with self.assertRaises(core.TinkError):
reader = tink.BinaryKeysetReader(cast(bytes, None))
reader.read_encrypted()
def test_read_encrypted_empty(self):
with self.assertRaises(core.TinkError):
reader = tink.BinaryKeysetReader(b'')
reader.read_encrypted()<|fim▁hole|> reader.read_encrypted()
if __name__ == '__main__':
absltest.main()<|fim▁end|>
|
def test_read_encrypted_invalid(self):
with self.assertRaises(core.TinkError):
reader = tink.BinaryKeysetReader(b'some weird data')
|
<|file_name|>plane.go<|end_file_name|><|fim▁begin|>package main
import (
"time"
"bytes"
"fmt"
)
const FreshPeriod = time.Minute * 10
type Location struct {
id int
Time time.Time
Latitude float32
Longitude float32
}
type ValuePair struct {
loaded bool
value string
}
// TODO: Don't change a value (or add squawk etc) if it already exists with that value.
// Don't add that to the history. However add new changes. Always update LastSeen if after
type Plane struct {
Icao uint
CallSign string
CallSigns []ValuePair
Squawks []ValuePair
Locations []Location
Altitude int
Track float32
Speed float32
Vertical int
LastSeen time.Time
History []*message // won't contain duplicate messages such as "on ground" unless they change
// Various flags
SquawkCh bool
Emergency bool
Ident bool
OnGround bool
}
func (p *Plane) ToJson() string {
buf := bytes.Buffer{}
buf.WriteString("{")
buf.WriteString(fmt.Sprintf("\"icao\": \"%06X\", ", p.Icao))
buf.WriteString(fmt.Sprintf("\"callsign\": %q, ", p.CallSign))
buf.WriteString("\"callsigns\": [")
for i, cs := range p.CallSigns {
buf.WriteString(fmt.Sprintf("%q", cs.value))
if i != len(p.CallSigns) - 1 {
buf.WriteString(", ")
}
}
if len(p.Locations) > 0 {
lastLoc := p.Locations[len(p.Locations) - 1]
buf.WriteString(fmt.Sprintf("], \"location\": \"%f,%f\", ", lastLoc.Latitude, lastLoc.Longitude))
} else {
buf.WriteString("], ")
}
buf.WriteString("\"squawks\": [")
for i, sq := range p.Squawks {
buf.WriteString(fmt.Sprintf("%q", sq.value))
if i != len(p.Squawks) - 1 {
buf.WriteString(", ")
}
}
buf.WriteString(fmt.Sprintf("], \"altitude\": %d, ", p.Altitude))
buf.WriteString(fmt.Sprintf("\"track\": %.2f, ", p.Track))
buf.WriteString(fmt.Sprintf("\"speed\": %.2f, ", p.Speed))
buf.WriteString(fmt.Sprintf("\"vertical\": %d, ", p.Vertical))
buf.WriteString(fmt.Sprintf("\"lastSeen\": %q", p.LastSeen.String()))
buf.WriteString("}")
return buf.String()
}
// SetCallSign tries to add the call sign to the slice of CallSigns for the Plane.
// Returns true if it was added, false if the callsign was already in the slice.
func (p *Plane) SetCallSign(cs string) bool {
if cs == "" || p.CallSign == cs {
return false
}
p.CallSign = cs
// More likely to find a match at the end, so search backwards.
for i := len(p.CallSigns) - 1; i >= 0; i-- {
if cs == p.CallSigns[i].value {
// if it exists, don't add it again
return true // True because at the least the current callsign has been set.
}
}
p.CallSigns = append(p.CallSigns, ValuePair{value: cs})
return true
}
// SetSquawk tries to add the Squawk code to the slice of Squawks for the Plane.
// Returns true if it was added, false if the Squawk was already in the slice.
func (p *Plane) SetSquawk(s string) bool {
// Search backwards as more likely to find result at end.
if s == "" {
return false
}
for i := len(p.Squawks) - 1; i >= 0; i-- {
if s == p.Squawks[i].value {
return false
}
}
p.Squawks = append(p.Squawks, ValuePair{value: s})
return true
}
// SetLocation creates a location from the specified Lat/lon and time and appends it
// to the locations slice. Returns true if successful, and false if there are no values to add
func (p *Plane) SetLocation(lat, lon float32, t time.Time) bool {
if lat == 0.0 || lon == 0.0 {
return false
}
l := Location{Time: t, Latitude: lat, Longitude: lon}
p.Locations = append(p.Locations, l)
return true
}<|fim▁hole|> if a != 0 && p.Altitude != a {
p.Altitude = a
return true
}
return false
}
// SetTrack sets the Plane's current track if different from existing value.
// Returns true if successful, and false if there is no change.
func (p *Plane) SetTrack(t float32) bool {
if p.Track != t {
p.Track = t
return true
}
return false
}
// SetSpeed sets the Plane's speed if different from existing value.
// Returns true if successful, and false if there is no change.
func (p *Plane) SetSpeed(s float32) bool {
if p.Speed != s && s != 0.0 {
p.Speed = s
return true
}
return false
}
// SetVertical sets the Plane's vertical speed, if different from existing value.
// Returns true if successful, and false if there was no change.
func (p *Plane) SetVertical(v int) bool {
if p.Vertical != v {
p.Vertical = v
return true
}
return false
}
// SetHistory appends the history message to the history slice.
func (p *Plane) SetHistory(m *message) {
p.History = append(p.History, m)
}
// SetSquawkCh sets the Plane's SquawkChange flag if different from existing value.
// Returns true on success, and false if there is no change.
func (p *Plane) SetSquawkCh(s bool) bool {
if p.SquawkCh != s {
p.SquawkCh = s
return true
}
return false
}
// SetEmergency sets the Plane's emergency flag if different from existing value.
// Returns true on success, and false if there is no change.
func (p *Plane) SetEmergency(e bool) bool {
if p.Emergency != e {
p.Emergency = e
return true
}
return false
}
// SetIdent sets the Plane's ident flag if different from existing value.
// Returns true on success, and false if there is no change.
func (p *Plane) SetIdent(i bool) bool {
if p.Ident != i {
p.Ident = i
return true
}
return false
}
// SetOnGround sets the Plane's onGround flag if different from existing value.
// Returns true on success, and false if there is no change.
func (p *Plane) SetOnGround(g bool) bool {
if p.OnGround != g {
p.OnGround = g
return true
}
return false
}
func updatePlane(m *message, pl *Plane) {
buf := bytes.Buffer{}
if m == nil {
return
}
if m.dGen.After(pl.LastSeen) {
pl.LastSeen = m.dGen
}
if verbose {
buf.WriteString(fmt.Sprintf("%s - %06X -", m.dGen.String(), m.icao))
}
var dataStr string
var written bool
switch m.tType {
case 1:
written = pl.SetCallSign(m.callSign)
if verbose {
dataStr = fmt.Sprintf(" Callsign: %q", m.callSign)
}
case 2:
written = pl.SetAltitude(m.altitude) || written
written = pl.SetSpeed(m.groundSpeed) || written
written = pl.SetTrack(m.track) || written
written = pl.SetLocation(m.latitude, m.longitude, m.dGen) || written
written = pl.SetOnGround(m.onGround) || written
if verbose {
dataStr = fmt.Sprintf(" Altitude: %d, Speed: %.2f, Track: %.2f, Lat: %s, Lon: %s", m.altitude, m.groundSpeed, m.track, m.latitude, m.longitude)
}
case 3:
written = pl.SetAltitude(m.altitude) || written
written = pl.SetLocation(m.latitude, m.longitude, m.dGen) || written
written = pl.SetSquawkCh(m.squawkCh) || written
written = pl.SetEmergency(m.emergency) || written
written = pl.SetIdent(m.ident) || written
written = pl.SetOnGround(m.onGround) || written
if verbose {
dataStr = fmt.Sprintf(" Altitude: %d, Lat: %f, Lon: %f", m.altitude, m.latitude, m.longitude)
}
case 4:
written = pl.SetSpeed(m.groundSpeed) || written
written = pl.SetTrack(m.track) || written
written = pl.SetVertical(m.vertical) || written
if verbose {
dataStr = fmt.Sprintf(" Speed: %.2f, Track: %.2f, Vertical Rate: %d", m.groundSpeed, m.track, m.vertical)
}
case 5:
written = pl.SetAltitude(m.altitude) || written
written = pl.SetSquawkCh(m.squawkCh) || written
written = pl.SetIdent(m.ident) || written
written = pl.SetOnGround(m.onGround) || written
if verbose {
dataStr = fmt.Sprintf(" Altitude: %d", m.altitude)
}
case 6:
written = pl.SetAltitude(m.altitude) || written
written = pl.SetSquawk(m.squawk) || written
written = pl.SetSquawkCh(m.squawkCh) || written
written = pl.SetEmergency(m.emergency) || written
written = pl.SetIdent(m.ident) || written
written = pl.SetOnGround(m.onGround) || written
if verbose {
dataStr = fmt.Sprintf(" Altitude: %d, SquawkCode: %q", m.altitude, m.squawk)
}
case 7:
written = pl.SetAltitude(m.altitude) || written
written = pl.SetOnGround(m.onGround) || written
if verbose {
dataStr = fmt.Sprintf(" Altitude: %d", m.altitude)
}
case 8:
written = pl.SetOnGround(m.onGround) || written
if verbose {
dataStr = fmt.Sprintf(" OnGround: %v", m.onGround)
}
}
// Log message if it updated a value, or the last message was more than 10 minutes ago
if written || m.dGen.Sub(pl.LastSeen) > FreshPeriod {
pl.SetHistory(m)
}
if verbose {
buf.WriteString(dataStr)
fmt.Println(buf.String())
buf.Reset()
}
}<|fim▁end|>
|
// SetAltitude will update the altitude if different from existing altitude.
// Returns true if successful, false if there is no change.
func (p *Plane) SetAltitude(a int) bool {
|
<|file_name|>properties.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// This file is a Mako template: http://www.makotemplates.org/
// Please note that valid Rust syntax may be mangled by the Mako parser.
// For example, Vec<&Foo> will be mangled as Vec&Foo>. To work around these issues, the code
// can be escaped. In the above example, Vec<<&Foo> or Vec< &Foo> achieves the desired result of Vec<&Foo>.
<%namespace name="helpers" file="/helpers.mako.rs" />
use std::ascii::AsciiExt;
use std::boxed::Box as StdBox;
use std::collections::HashSet;
use std::fmt::{self, Write};
use std::sync::Arc;
use Atom;
use app_units::Au;
#[cfg(feature = "servo")] use cssparser::{Color as CSSParserColor, RGBA};
use cssparser::{Parser, TokenSerializationType};
use error_reporting::ParseErrorReporter;
use url::Url;
#[cfg(feature = "servo")] use euclid::side_offsets::SideOffsets2D;
use euclid::size::Size2D;
use computed_values;
use font_metrics::FontMetricsProvider;
#[cfg(feature = "servo")] use logical_geometry::{LogicalMargin, PhysicalSide};
use logical_geometry::WritingMode;
use parser::{Parse, ParserContext, ParserContextExtraData};
use style_traits::ToCss;
use stylesheets::Origin;
#[cfg(feature = "servo")] use values::Either;
use values::{HasViewportPercentage, computed};
use cascade_info::CascadeInfo;
use rule_tree::StrongRuleNode;
#[cfg(feature = "servo")] use values::specified::BorderStyle;
use self::property_bit_field::PropertyBitField;
pub use self::declaration_block::*;
<%!
from data import Method, Keyword, to_rust_ident
import os.path
%>
#[path="${repr(os.path.join(os.path.dirname(__file__), 'declaration_block.rs'))[1:-1]}"]
pub mod declaration_block;
pub mod longhands {
use cssparser::Parser;
use parser::{Parse, ParserContext};
use values::specified;
<%include file="/longhand/background.mako.rs" />
<%include file="/longhand/border.mako.rs" />
<%include file="/longhand/box.mako.rs" />
<%include file="/longhand/color.mako.rs" />
<%include file="/longhand/column.mako.rs" />
<%include file="/longhand/counters.mako.rs" />
<%include file="/longhand/effects.mako.rs" />
<%include file="/longhand/font.mako.rs" />
<%include file="/longhand/inherited_box.mako.rs" />
<%include file="/longhand/inherited_table.mako.rs" />
<%include file="/longhand/inherited_text.mako.rs" />
<%include file="/longhand/list.mako.rs" />
<%include file="/longhand/margin.mako.rs" />
<%include file="/longhand/outline.mako.rs" />
<%include file="/longhand/padding.mako.rs" />
<%include file="/longhand/pointing.mako.rs" />
<%include file="/longhand/position.mako.rs" />
<%include file="/longhand/table.mako.rs" />
<%include file="/longhand/text.mako.rs" />
<%include file="/longhand/ui.mako.rs" />
<%include file="/longhand/inherited_svg.mako.rs" />
<%include file="/longhand/svg.mako.rs" />
<%include file="/longhand/xul.mako.rs" />
}
pub mod shorthands {
use cssparser::Parser;
use parser::{Parse, ParserContext};
use values::specified;
pub fn parse_four_sides<F, T>(input: &mut Parser, parse_one: F) -> Result<(T, T, T, T), ()>
where F: Fn(&mut Parser) -> Result<T, ()>, F: Copy, T: Clone {
// zero or more than four values is invalid.
// one value sets them all
// two values set (top, bottom) and (left, right)
// three values set top, (left, right) and bottom
// four values set them in order
let top = try!(parse_one(input));
let right;
let bottom;
let left;
match input.try(parse_one) {
Err(()) => {
right = top.clone();
bottom = top.clone();
left = top.clone();
}
Ok(value) => {
right = value;
match input.try(parse_one) {
Err(()) => {
bottom = top.clone();
left = right.clone();
}
Ok(value) => {
bottom = value;
match input.try(parse_one) {
Err(()) => {
left = right.clone();
}
Ok(value) => {
left = value;
}
}
}
}
}
}
Ok((top, right, bottom, left))
}
<%include file="/shorthand/serialize.mako.rs" />
<%include file="/shorthand/background.mako.rs" />
<%include file="/shorthand/border.mako.rs" />
<%include file="/shorthand/box.mako.rs" />
<%include file="/shorthand/column.mako.rs" />
<%include file="/shorthand/font.mako.rs" />
<%include file="/shorthand/inherited_text.mako.rs" />
<%include file="/shorthand/list.mako.rs" />
<%include file="/shorthand/margin.mako.rs" />
<%include file="/shorthand/mask.mako.rs" />
<%include file="/shorthand/outline.mako.rs" />
<%include file="/shorthand/padding.mako.rs" />
<%include file="/shorthand/position.mako.rs" />
<%include file="/shorthand/text.mako.rs" />
}
pub mod animated_properties {
<%include file="/helpers/animated_properties.mako.rs" />
}
// TODO(SimonSapin): Convert this to a syntax extension rather than a Mako template.
// Maybe submit for inclusion in libstd?
mod property_bit_field {
use logical_geometry::WritingMode;
pub struct PropertyBitField {
storage: [u32; (${len(data.longhands)} - 1 + 32) / 32]
}
impl PropertyBitField {
#[inline]
pub fn new() -> PropertyBitField {
PropertyBitField { storage: [0; (${len(data.longhands)} - 1 + 32) / 32] }
}
#[inline]
fn get(&self, bit: usize) -> bool {
(self.storage[bit / 32] & (1 << (bit % 32))) != 0
}
#[inline]
fn set(&mut self, bit: usize) {
self.storage[bit / 32] |= 1 << (bit % 32)
}
% for i, property in enumerate(data.longhands):
% if not property.derived_from:
#[allow(non_snake_case)]
#[inline]
pub fn get_${property.ident}(&self) -> bool {
self.get(${i})
}
#[allow(non_snake_case)]
#[inline]
pub fn set_${property.ident}(&mut self) {
self.set(${i})
}
% endif
% if property.logical:
#[allow(non_snake_case)]
pub fn get_physical_${property.ident}(&self, wm: WritingMode) -> bool {
<%helpers:logical_setter_helper name="${property.name}">
<%def name="inner(physical_ident)">
self.get_${physical_ident}()
</%def>
</%helpers:logical_setter_helper>
}
#[allow(non_snake_case)]
pub fn set_physical_${property.ident}(&mut self, wm: WritingMode) {
<%helpers:logical_setter_helper name="${property.name}">
<%def name="inner(physical_ident)">
self.set_${physical_ident}()
</%def>
</%helpers:logical_setter_helper>
}
% endif
% endfor
}
}
% for property in data.longhands:
% if not property.derived_from:
#[allow(non_snake_case)]
fn substitute_variables_${property.ident}<F>(
value: &DeclaredValue<longhands::${property.ident}::SpecifiedValue>,
custom_properties: &Option<Arc<::custom_properties::ComputedValuesMap>>,
f: F,
error_reporter: &mut StdBox<ParseErrorReporter + Send>)
where F: FnOnce(&DeclaredValue<longhands::${property.ident}::SpecifiedValue>)
{
if let DeclaredValue::WithVariables {
ref css, first_token_type, ref base_url, from_shorthand
} = *value {
// FIXME(heycam): A ParserContextExtraData should be built from data
// stored in the WithVariables, in case variable expansion results in
// a url() value.
let extra_data = ParserContextExtraData::default();
substitute_variables_${property.ident}_slow(css,
first_token_type,
base_url,
from_shorthand,
custom_properties,
f,
error_reporter,
extra_data);
} else {
f(value);
}
}
#[allow(non_snake_case)]
#[inline(never)]
fn substitute_variables_${property.ident}_slow<F>(
css: &String,
first_token_type: TokenSerializationType,
base_url: &Url,
from_shorthand: Option<Shorthand>,
custom_properties: &Option<Arc<::custom_properties::ComputedValuesMap>>,
f: F,
error_reporter: &mut StdBox<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData)
where F: FnOnce(&DeclaredValue<longhands::${property.ident}::SpecifiedValue>) {
f(&
::custom_properties::substitute(css, first_token_type, custom_properties)
.and_then(|css| {
// As of this writing, only the base URL is used for property values:
//
// FIXME(pcwalton): Cloning the error reporter is slow! But so are custom
// properties, so whatever...
let context = ParserContext::new_with_extra_data(
::stylesheets::Origin::Author, base_url, (*error_reporter).clone(),
extra_data);
Parser::new(&css).parse_entirely(|input| {
match from_shorthand {
None => {
longhands::${property.ident}::parse_specified(&context, input)
}
% for shorthand in data.shorthands:
% if property in shorthand.sub_properties:
Some(Shorthand::${shorthand.camel_case}) => {
shorthands::${shorthand.ident}::parse_value(&context, input)
.map(|result| match result.${property.ident} {
Some(value) => DeclaredValue::Value(value),
None => DeclaredValue::Initial,
})
}
% endif
% endfor
_ => unreachable!()
}
})
})
.unwrap_or(
// Invalid at computed-value time.
DeclaredValue::${"Inherit" if property.style_struct.inherited else "Initial"}
)
);
}
% endif
% endfor
/// Only keep the "winning" declaration for any given property, by importance then source order.
/// The input and output are in source order
fn deduplicate_property_declarations(block: &mut PropertyDeclarationBlock) {
let mut deduplicated = Vec::new();
let mut seen_normal = PropertyBitField::new();
let mut seen_important = PropertyBitField::new();
let mut seen_custom_normal = Vec::new();
let mut seen_custom_important = Vec::new();
for (declaration, importance) in block.declarations.drain(..).rev() {
match declaration {
% for property in data.longhands:
PropertyDeclaration::${property.camel_case}(..) => {
% if not property.derived_from:
if importance.important() {
if seen_important.get_${property.ident}() {
block.important_count -= 1;
continue
}
if seen_normal.get_${property.ident}() {
remove_one(&mut deduplicated, |d| {
matches!(d, &(PropertyDeclaration::${property.camel_case}(..), _))
});
}
seen_important.set_${property.ident}()
} else {
if seen_normal.get_${property.ident}() ||
seen_important.get_${property.ident}() {
continue
}
seen_normal.set_${property.ident}()
}
% else:
unreachable!();
% endif
},<|fim▁hole|> if importance.important() {
if seen_custom_important.contains(name) {
block.important_count -= 1;
continue
}
if seen_custom_normal.contains(name) {
remove_one(&mut deduplicated, |d| {
matches!(d, &(PropertyDeclaration::Custom(ref n, _), _) if n == name)
});
}
seen_custom_important.push(name.clone())
} else {
if seen_custom_normal.contains(name) ||
seen_custom_important.contains(name) {
continue
}
seen_custom_normal.push(name.clone())
}
}
}
deduplicated.push((declaration, importance))
}
deduplicated.reverse();
block.declarations = deduplicated;
}
#[inline]
fn remove_one<T, F: FnMut(&T) -> bool>(v: &mut Vec<T>, mut remove_this: F) {
let previous_len = v.len();
v.retain(|x| !remove_this(x));
debug_assert_eq!(v.len(), previous_len - 1);
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum CSSWideKeyword {
InitialKeyword,
InheritKeyword,
UnsetKeyword,
}
impl Parse for CSSWideKeyword {
fn parse(input: &mut Parser) -> Result<Self, ()> {
match_ignore_ascii_case! { try!(input.expect_ident()),
"initial" => Ok(CSSWideKeyword::InitialKeyword),
"inherit" => Ok(CSSWideKeyword::InheritKeyword),
"unset" => Ok(CSSWideKeyword::UnsetKeyword),
_ => Err(())
}
}
}
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum Shorthand {
% for property in data.shorthands:
${property.camel_case},
% endfor
}
impl Shorthand {
pub fn from_name(name: &str) -> Option<Shorthand> {
match_ignore_ascii_case! { name,
% for property in data.shorthands:
"${property.name}" => Some(Shorthand::${property.camel_case}),
% endfor
_ => None
}
}
pub fn name(&self) -> &'static str {
match *self {
% for property in data.shorthands:
Shorthand::${property.camel_case} => "${property.name}",
% endfor
}
}
pub fn longhands(&self) -> &'static [&'static str] {
% for property in data.shorthands:
static ${property.ident.upper()}: &'static [&'static str] = &[
% for sub in property.sub_properties:
"${sub.name}",
% endfor
];
% endfor
match *self {
% for property in data.shorthands:
Shorthand::${property.camel_case} => ${property.ident.upper()},
% endfor
}
}
pub fn longhands_to_css<'a, W, I>(&self, declarations: I, dest: &mut W) -> fmt::Result
where W: fmt::Write, I: Iterator<Item=&'a PropertyDeclaration> {
match *self {
% for property in data.shorthands:
Shorthand::${property.camel_case} => {
match shorthands::${property.ident}::LonghandsToSerialize::from_iter(declarations) {
Ok(longhands) => longhands.to_css(dest),
Err(_) => Err(fmt::Error)
}
},
% endfor
}
}
/// Serializes possible shorthand name with value to input buffer given a list of longhand declarations.
/// On success, returns true if shorthand value is written and false if no shorthand value is present.
pub fn serialize_shorthand_to_buffer<'a, W, I>(self,
dest: &mut W,
declarations: I,
is_first_serialization: &mut bool)
-> Result<bool, fmt::Error>
where W: Write, I: IntoIterator<Item=&'a PropertyDeclaration>, I::IntoIter: Clone {
match self.get_shorthand_appendable_value(declarations) {
None => Ok(false),
Some(appendable_value) => {
let property_name = self.name();
append_serialization(
dest,
property_name,
appendable_value,
Importance::Normal,
is_first_serialization
).and_then(|_| Ok(true))
}
}
}
fn get_shorthand_appendable_value<'a, I>(self, declarations: I)
-> Option<AppendableValue<'a, I::IntoIter>>
where I: IntoIterator<Item=&'a PropertyDeclaration>, I::IntoIter: Clone {
let declarations = declarations.into_iter();
// Only cloning iterators (a few pointers each) not declarations.
let mut declarations2 = declarations.clone();
let mut declarations3 = declarations.clone();
let first_declaration = match declarations2.next() {
Some(declaration) => declaration,
None => return None
};
// https://drafts.csswg.org/css-variables/#variables-in-shorthands
if let Some(css) = first_declaration.with_variables_from_shorthand(self) {
if declarations2.all(|d| d.with_variables_from_shorthand(self) == Some(css)) {
return Some(AppendableValue::Css(css));
}
else {
return None;
}
}
if !declarations3.any(|d| d.with_variables()) {
return Some(AppendableValue::DeclarationsForShorthand(self, declarations));
}
None
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum DeclaredValue<T> {
Value(T),
WithVariables {
css: String,
first_token_type: TokenSerializationType,
base_url: Url,
from_shorthand: Option<Shorthand>,
},
Initial,
Inherit,
// There is no Unset variant here.
// The 'unset' keyword is represented as either Initial or Inherit,
// depending on whether the property is inherited.
}
impl<T: HasViewportPercentage> HasViewportPercentage for DeclaredValue<T> {
fn has_viewport_percentage(&self) -> bool {
match *self {
DeclaredValue::Value(ref v)
=> v.has_viewport_percentage(),
DeclaredValue::WithVariables { .. }
=> panic!("DeclaredValue::has_viewport_percentage without resolving variables!"),
DeclaredValue::Initial |
DeclaredValue::Inherit => false,
}
}
}
impl<T: ToCss> ToCss for DeclaredValue<T> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
DeclaredValue::Value(ref inner) => inner.to_css(dest),
DeclaredValue::WithVariables { ref css, from_shorthand: None, .. } => {
dest.write_str(css)
}
// https://drafts.csswg.org/css-variables/#variables-in-shorthands
DeclaredValue::WithVariables { .. } => Ok(()),
DeclaredValue::Initial => dest.write_str("initial"),
DeclaredValue::Inherit => dest.write_str("inherit"),
}
}
}
#[derive(PartialEq, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum PropertyDeclaration {
% for property in data.longhands:
${property.camel_case}(DeclaredValue<longhands::${property.ident}::SpecifiedValue>),
% endfor
Custom(::custom_properties::Name, DeclaredValue<::custom_properties::SpecifiedValue>),
}
impl HasViewportPercentage for PropertyDeclaration {
fn has_viewport_percentage(&self) -> bool {
match *self {
% for property in data.longhands:
PropertyDeclaration::${property.camel_case}(ref val) => {
val.has_viewport_percentage()
},
% endfor
PropertyDeclaration::Custom(_, ref val) => {
val.has_viewport_percentage()
}
}
}
}
#[derive(Eq, PartialEq, Copy, Clone)]
pub enum PropertyDeclarationParseResult {
UnknownProperty,
ExperimentalProperty,
InvalidValue,
AnimationPropertyInKeyframeBlock,
ValidOrIgnoredDeclaration,
}
#[derive(Eq, PartialEq, Clone)]
pub enum PropertyDeclarationName {
Longhand(&'static str),
Custom(::custom_properties::Name),
Internal
}
impl PropertyDeclarationName {
pub fn eq_str_ignore_ascii_case(&self, other: &str) -> bool {
match *self {
PropertyDeclarationName::Longhand(s) => s.eq_ignore_ascii_case(other),
PropertyDeclarationName::Custom(ref n) => n.eq_str_ignore_ascii_case(other),
PropertyDeclarationName::Internal => false
}
}
}
impl PartialEq<str> for PropertyDeclarationName {
fn eq(&self, other: &str) -> bool {
match *self {
PropertyDeclarationName::Longhand(n) => n == other,
PropertyDeclarationName::Custom(ref n) => {
n.with_str(|s| ::custom_properties::parse_name(other) == Ok(s))
}
PropertyDeclarationName::Internal => false,
}
}
}
impl fmt::Display for PropertyDeclarationName {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
PropertyDeclarationName::Longhand(n) => f.write_str(n),
PropertyDeclarationName::Custom(ref n) => {
try!(f.write_str("--"));
n.with_str(|s| f.write_str(s))
}
PropertyDeclarationName::Internal => Ok(()),
}
}
}
impl fmt::Debug for PropertyDeclaration {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "{}: ", self.name()));
match *self {
% for property in data.longhands:
% if not property.derived_from:
PropertyDeclaration::${property.camel_case}(ref value) => value.to_css(f),
% endif
% endfor
PropertyDeclaration::Custom(_, ref value) => value.to_css(f),
% if any(property.derived_from for property in data.longhands):
_ => Err(fmt::Error),
% endif
}
}
}
impl ToCss for PropertyDeclaration {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
% for property in data.longhands:
% if not property.derived_from:
PropertyDeclaration::${property.camel_case}(ref value) =>
value.to_css(dest),
% endif
% endfor
PropertyDeclaration::Custom(_, ref value) => value.to_css(dest),
% if any(property.derived_from for property in data.longhands):
_ => Err(fmt::Error),
% endif
}
}
}
impl PropertyDeclaration {
pub fn name(&self) -> PropertyDeclarationName {
match *self {
% for property in data.longhands:
PropertyDeclaration::${property.camel_case}(..) =>
% if not property.derived_from:
PropertyDeclarationName::Longhand("${property.name}"),
% else:
PropertyDeclarationName::Internal,
% endif
% endfor
PropertyDeclaration::Custom(ref name, _) => {
PropertyDeclarationName::Custom(name.clone())
}
}
}
#[inline]
pub fn discriminant_value(&self) -> usize {
match *self {
% for i, property in enumerate(data.longhands):
PropertyDeclaration::${property.camel_case}(..) => ${i},
% endfor
PropertyDeclaration::Custom(..) => ${len(data.longhands)}
}
}
pub fn value(&self) -> String {
let mut value = String::new();
if let Err(_) = self.to_css(&mut value) {
panic!("unsupported property declaration: {}", self.name());
}
value
}
/// If this is a pending-substitution value from the given shorthand, return that value
// Extra space here because < seems to be removed by Mako when immediately followed by &.
// ↓
pub fn with_variables_from_shorthand(&self, shorthand: Shorthand) -> Option< &str> {
match *self {
% for property in data.longhands:
PropertyDeclaration::${property.camel_case}(ref value) => match *value {
DeclaredValue::WithVariables { ref css, from_shorthand: Some(s), .. }
if s == shorthand => {
Some(&**css)
}
_ => None
},
% endfor
PropertyDeclaration::Custom(..) => None,
}
}
/// Return whether this is a pending-substitution value.
/// https://drafts.csswg.org/css-variables/#variables-in-shorthands
pub fn with_variables(&self) -> bool {
match *self {
% for property in data.longhands:
PropertyDeclaration::${property.camel_case}(ref value) => match *value {
DeclaredValue::WithVariables { .. } => true,
_ => false,
},
% endfor
PropertyDeclaration::Custom(_, ref value) => match *value {
DeclaredValue::WithVariables { .. } => true,
_ => false,
}
}
}
/// Return whether the value is stored as it was in the CSS source, preserving whitespace
/// (as opposed to being parsed into a more abstract data structure).
/// This is the case of custom properties and values that contain unsubstituted variables.
pub fn value_is_unparsed(&self) -> bool {
match *self {
% for property in data.longhands:
PropertyDeclaration::${property.camel_case}(ref value) => {
matches!(*value, DeclaredValue::WithVariables { .. })
},
% endfor
PropertyDeclaration::Custom(..) => true
}
}
pub fn matches(&self, name: &str) -> bool {
match *self {
% for property in data.longhands:
PropertyDeclaration::${property.camel_case}(..) =>
% if not property.derived_from:
name.eq_ignore_ascii_case("${property.name}"),
% else:
false,
% endif
% endfor
PropertyDeclaration::Custom(ref declaration_name, _) => {
declaration_name.with_str(|s| ::custom_properties::parse_name(name) == Ok(s))
}
}
}
/// The `in_keyframe_block` parameter controls this:
///
/// https://drafts.csswg.org/css-animations/#keyframes
/// > The <declaration-list> inside of <keyframe-block> accepts any CSS property
/// > except those defined in this specification,
/// > but does accept the `animation-play-state` property and interprets it specially.
pub fn parse(name: &str, context: &ParserContext, input: &mut Parser,
result_list: &mut Vec<PropertyDeclaration>,
in_keyframe_block: bool)
-> PropertyDeclarationParseResult {
if let Ok(name) = ::custom_properties::parse_name(name) {
let value = match input.try(CSSWideKeyword::parse) {
Ok(CSSWideKeyword::UnsetKeyword) | // Custom properties are alawys inherited
Ok(CSSWideKeyword::InheritKeyword) => DeclaredValue::Inherit,
Ok(CSSWideKeyword::InitialKeyword) => DeclaredValue::Initial,
Err(()) => match ::custom_properties::SpecifiedValue::parse(input) {
Ok(value) => DeclaredValue::Value(value),
Err(()) => return PropertyDeclarationParseResult::InvalidValue,
}
};
result_list.push(PropertyDeclaration::Custom(Atom::from(name), value));
return PropertyDeclarationParseResult::ValidOrIgnoredDeclaration;
}
match_ignore_ascii_case! { name,
% for property in data.longhands:
% if not property.derived_from:
"${property.name}" => {
% if not property.allowed_in_keyframe_block:
if in_keyframe_block {
return PropertyDeclarationParseResult::AnimationPropertyInKeyframeBlock
}
% endif
% if property.internal:
if context.stylesheet_origin != Origin::UserAgent {
return PropertyDeclarationParseResult::UnknownProperty
}
% endif
% if property.experimental and product == "servo":
if !::util::prefs::PREFS.get("${property.experimental}")
.as_boolean().unwrap_or(false) {
return PropertyDeclarationParseResult::ExperimentalProperty
}
% endif
match longhands::${property.ident}::parse_declared(context, input) {
Ok(value) => {
result_list.push(PropertyDeclaration::${property.camel_case}(value));
PropertyDeclarationParseResult::ValidOrIgnoredDeclaration
},
Err(()) => PropertyDeclarationParseResult::InvalidValue,
}
},
% else:
"${property.name}" => PropertyDeclarationParseResult::UnknownProperty,
% endif
% endfor
% for shorthand in data.shorthands:
"${shorthand.name}" => {
% if not shorthand.allowed_in_keyframe_block:
if in_keyframe_block {
return PropertyDeclarationParseResult::AnimationPropertyInKeyframeBlock
}
% endif
% if shorthand.internal:
if context.stylesheet_origin != Origin::UserAgent {
return PropertyDeclarationParseResult::UnknownProperty
}
% endif
% if shorthand.experimental and product == "servo":
if !::util::prefs::PREFS.get("${shorthand.experimental}")
.as_boolean().unwrap_or(false) {
return PropertyDeclarationParseResult::ExperimentalProperty
}
% endif
match input.try(CSSWideKeyword::parse) {
Ok(CSSWideKeyword::InheritKeyword) => {
% for sub_property in shorthand.sub_properties:
result_list.push(
PropertyDeclaration::${sub_property.camel_case}(
DeclaredValue::Inherit));
% endfor
PropertyDeclarationParseResult::ValidOrIgnoredDeclaration
},
Ok(CSSWideKeyword::InitialKeyword) => {
% for sub_property in shorthand.sub_properties:
result_list.push(
PropertyDeclaration::${sub_property.camel_case}(
DeclaredValue::Initial));
% endfor
PropertyDeclarationParseResult::ValidOrIgnoredDeclaration
},
Ok(CSSWideKeyword::UnsetKeyword) => {
% for sub_property in shorthand.sub_properties:
result_list.push(PropertyDeclaration::${sub_property.camel_case}(
DeclaredValue::${"Inherit" if sub_property.style_struct.inherited else "Initial"}
));
% endfor
PropertyDeclarationParseResult::ValidOrIgnoredDeclaration
},
Err(()) => match shorthands::${shorthand.ident}::parse(context, input, result_list) {
Ok(()) => PropertyDeclarationParseResult::ValidOrIgnoredDeclaration,
Err(()) => PropertyDeclarationParseResult::InvalidValue,
}
}
},
% endfor
_ => {
if cfg!(all(debug_assertions, feature = "gecko")) && !name.starts_with('-') {
println!("stylo: Unimplemented property setter: {}", name);
}
PropertyDeclarationParseResult::UnknownProperty
}
}
}
pub fn shorthands(&self) -> &'static [Shorthand] {
// first generate longhand to shorthands lookup map
<%
longhand_to_shorthand_map = {}
for shorthand in data.shorthands:
for sub_property in shorthand.sub_properties:
if sub_property.ident not in longhand_to_shorthand_map:
longhand_to_shorthand_map[sub_property.ident] = []
longhand_to_shorthand_map[sub_property.ident].append(shorthand.camel_case)
for shorthand_list in longhand_to_shorthand_map.itervalues():
shorthand_list.sort()
%>
// based on lookup results for each longhand, create result arrays
% for property in data.longhands:
static ${property.ident.upper()}: &'static [Shorthand] = &[
% for shorthand in longhand_to_shorthand_map.get(property.ident, []):
Shorthand::${shorthand},
% endfor
];
% endfor
match *self {
% for property in data.longhands:
PropertyDeclaration::${property.camel_case}(_) => ${property.ident.upper()},
% endfor
PropertyDeclaration::Custom(_, _) => &[]
}
}
/// Returns true if this property is one of the animable properties, false
/// otherwise.
pub fn is_animatable(&self) -> bool {
match *self {
% for property in data.longhands:
PropertyDeclaration::${property.camel_case}(_) => {
% if property.animatable:
true
% else:
false
% endif
}
% endfor
PropertyDeclaration::Custom(..) => false,
}
}
}
#[cfg(feature = "gecko")]
pub use gecko_properties::style_structs;
#[cfg(feature = "servo")]
pub mod style_structs {
use fnv::FnvHasher;
use super::longhands;
use std::hash::{Hash, Hasher};
use logical_geometry::WritingMode;
% for style_struct in data.active_style_structs():
% if style_struct.name == "Font":
#[derive(Clone, Debug)]
% else:
#[derive(PartialEq, Clone, Debug)]
% endif
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct ${style_struct.name} {
% for longhand in style_struct.longhands:
pub ${longhand.ident}: longhands::${longhand.ident}::computed_value::T,
% endfor
% if style_struct.name == "Font":
pub hash: u64,
% endif
}
% if style_struct.name == "Font":
impl PartialEq for ${style_struct.name} {
fn eq(&self, other: &${style_struct.name}) -> bool {
self.hash == other.hash
% for longhand in style_struct.longhands:
&& self.${longhand.ident} == other.${longhand.ident}
% endfor
}
}
% endif
impl ${style_struct.name} {
% for longhand in style_struct.longhands:
% if longhand.logical:
${helpers.logical_setter(name=longhand.name)}
% else:
#[allow(non_snake_case)]
#[inline]
pub fn set_${longhand.ident}(&mut self, v: longhands::${longhand.ident}::computed_value::T) {
self.${longhand.ident} = v;
}
#[allow(non_snake_case)]
#[inline]
pub fn copy_${longhand.ident}_from(&mut self, other: &Self) {
self.${longhand.ident} = other.${longhand.ident}.clone();
}
% if longhand.need_clone:
#[allow(non_snake_case)]
#[inline]
pub fn clone_${longhand.ident}(&self) -> longhands::${longhand.ident}::computed_value::T {
self.${longhand.ident}.clone()
}
% endif
% endif
% if longhand.need_index:
#[allow(non_snake_case)]
pub fn ${longhand.ident}_count(&self) -> usize {
self.${longhand.ident}.0.len()
}
#[allow(non_snake_case)]
pub fn ${longhand.ident}_at(&self, index: usize)
-> longhands::${longhand.ident}::computed_value::SingleComputedValue {
self.${longhand.ident}.0[index].clone()
}
% endif
% endfor
% if style_struct.name == "Border":
% for side in ["top", "right", "bottom", "left"]:
#[allow(non_snake_case)]
pub fn border_${side}_has_nonzero_width(&self) -> bool {
self.border_${side}_width != ::app_units::Au(0)
}
% endfor
% elif style_struct.name == "Font":
pub fn compute_font_hash(&mut self) {
// Corresponds to the fields in `gfx::font_template::FontTemplateDescriptor`.
let mut hasher: FnvHasher = Default::default();
hasher.write_u16(self.font_weight as u16);
self.font_stretch.hash(&mut hasher);
self.font_family.hash(&mut hasher);
self.hash = hasher.finish()
}
% elif style_struct.name == "Outline":
#[inline]
pub fn outline_has_nonzero_width(&self) -> bool {
self.outline_width != ::app_units::Au(0)
}
% elif style_struct.name == "Text":
<% text_decoration_field = 'text_decoration' if product == 'servo' else 'text_decoration_line' %>
#[inline]
pub fn has_underline(&self) -> bool {
self.${text_decoration_field}.underline
}
#[inline]
pub fn has_overline(&self) -> bool {
self.${text_decoration_field}.overline
}
#[inline]
pub fn has_line_through(&self) -> bool {
self.${text_decoration_field}.line_through
}
% endif
}
% endfor
}
% for style_struct in data.active_style_structs():
impl style_structs::${style_struct.name} {
% for longhand in style_struct.longhands:
% if longhand.need_index:
#[allow(non_snake_case)]
#[inline]
pub fn ${longhand.ident}_iter(&self) -> ${longhand.camel_case}Iter {
${longhand.camel_case}Iter {
style_struct: self,
current: 0,
max: self.${longhand.ident}_count(),
}
}
#[allow(non_snake_case)]
#[inline]
pub fn ${longhand.ident}_mod(&self, index: usize)
-> longhands::${longhand.ident}::computed_value::SingleComputedValue {
self.${longhand.ident}_at(index % self.${longhand.ident}_count())
}
% endif
% endfor
}
% for longhand in style_struct.longhands:
% if longhand.need_index:
pub struct ${longhand.camel_case}Iter<'a> {
style_struct: &'a style_structs::${style_struct.name},
current: usize,
max: usize,
}
impl<'a> Iterator for ${longhand.camel_case}Iter<'a> {
type Item = longhands::${longhand.ident}::computed_value::SingleComputedValue;
fn next(&mut self) -> Option<Self::Item> {
self.current += 1;
if self.current <= self.max {
Some(self.style_struct.${longhand.ident}_at(self.current - 1))
} else {
None
}
}
}
% endif
% endfor
% endfor
#[cfg(feature = "gecko")]
pub use gecko_properties::ComputedValues;
#[cfg(feature = "servo")]
pub type ServoComputedValues = ComputedValues;
#[cfg(feature = "servo")]
#[cfg_attr(feature = "servo", derive(Clone, Debug))]
pub struct ComputedValues {
% for style_struct in data.active_style_structs():
${style_struct.ident}: Arc<style_structs::${style_struct.name}>,
% endfor
custom_properties: Option<Arc<::custom_properties::ComputedValuesMap>>,
shareable: bool,
pub writing_mode: WritingMode,
pub root_font_size: Au,
}
#[cfg(feature = "servo")]
impl ComputedValues {
pub fn new(custom_properties: Option<Arc<::custom_properties::ComputedValuesMap>>,
shareable: bool,
writing_mode: WritingMode,
root_font_size: Au,
% for style_struct in data.active_style_structs():
${style_struct.ident}: Arc<style_structs::${style_struct.name}>,
% endfor
) -> Self {
ComputedValues {
custom_properties: custom_properties,
shareable: shareable,
writing_mode: writing_mode,
root_font_size: root_font_size,
% for style_struct in data.active_style_structs():
${style_struct.ident}: ${style_struct.ident},
% endfor
}
}
pub fn initial_values() -> &'static Self { &*INITIAL_SERVO_VALUES }
#[inline]
pub fn do_cascade_property<F: FnOnce(&[CascadePropertyFn])>(f: F) {
f(&CASCADE_PROPERTY)
}
% for style_struct in data.active_style_structs():
#[inline]
pub fn clone_${style_struct.name_lower}(&self) -> Arc<style_structs::${style_struct.name}> {
self.${style_struct.ident}.clone()
}
#[inline]
pub fn get_${style_struct.name_lower}(&self) -> &style_structs::${style_struct.name} {
&self.${style_struct.ident}
}
#[inline]
pub fn mutate_${style_struct.name_lower}(&mut self) -> &mut style_structs::${style_struct.name} {
Arc::make_mut(&mut self.${style_struct.ident})
}
% endfor
// Cloning the Arc here is fine because it only happens in the case where we have custom
// properties, and those are both rare and expensive.
pub fn custom_properties(&self) -> Option<Arc<::custom_properties::ComputedValuesMap>> {
self.custom_properties.as_ref().map(|x| x.clone())
}
pub fn root_font_size(&self) -> Au { self.root_font_size }
pub fn set_root_font_size(&mut self, size: Au) { self.root_font_size = size }
pub fn set_writing_mode(&mut self, mode: WritingMode) { self.writing_mode = mode; }
#[inline]
pub fn is_multicol(&self) -> bool {
let style = self.get_column();
style.column_count.0.is_some() || style.column_width.0.is_some()
}
/// Resolves the currentColor keyword.
/// Any color value form computed values (except for the 'color' property itself)
/// should go through this method.
///
/// Usage example:
/// let top_color = style.resolve_color(style.Border.border_top_color);
#[inline]
pub fn resolve_color(&self, color: CSSParserColor) -> RGBA {
match color {
CSSParserColor::RGBA(rgba) => rgba,
CSSParserColor::CurrentColor => self.get_color().color,
}
}
#[inline]
pub fn content_inline_size(&self) -> computed::LengthOrPercentageOrAuto {
let position_style = self.get_position();
if self.writing_mode.is_vertical() {
position_style.height
} else {
position_style.width
}
}
#[inline]
pub fn content_block_size(&self) -> computed::LengthOrPercentageOrAuto {
let position_style = self.get_position();
if self.writing_mode.is_vertical() { position_style.width } else { position_style.height }
}
#[inline]
pub fn min_inline_size(&self) -> computed::LengthOrPercentage {
let position_style = self.get_position();
if self.writing_mode.is_vertical() { position_style.min_height } else { position_style.min_width }
}
#[inline]
pub fn min_block_size(&self) -> computed::LengthOrPercentage {
let position_style = self.get_position();
if self.writing_mode.is_vertical() { position_style.min_width } else { position_style.min_height }
}
#[inline]
pub fn max_inline_size(&self) -> computed::LengthOrPercentageOrNone {
let position_style = self.get_position();
if self.writing_mode.is_vertical() { position_style.max_height } else { position_style.max_width }
}
#[inline]
pub fn max_block_size(&self) -> computed::LengthOrPercentageOrNone {
let position_style = self.get_position();
if self.writing_mode.is_vertical() { position_style.max_width } else { position_style.max_height }
}
#[inline]
pub fn logical_padding(&self) -> LogicalMargin<computed::LengthOrPercentage> {
let padding_style = self.get_padding();
LogicalMargin::from_physical(self.writing_mode, SideOffsets2D::new(
padding_style.padding_top,
padding_style.padding_right,
padding_style.padding_bottom,
padding_style.padding_left,
))
}
#[inline]
pub fn border_width_for_writing_mode(&self, writing_mode: WritingMode) -> LogicalMargin<Au> {
let border_style = self.get_border();
LogicalMargin::from_physical(writing_mode, SideOffsets2D::new(
border_style.border_top_width,
border_style.border_right_width,
border_style.border_bottom_width,
border_style.border_left_width,
))
}
#[inline]
pub fn logical_border_width(&self) -> LogicalMargin<Au> {
self.border_width_for_writing_mode(self.writing_mode)
}
#[inline]
pub fn logical_margin(&self) -> LogicalMargin<computed::LengthOrPercentageOrAuto> {
let margin_style = self.get_margin();
LogicalMargin::from_physical(self.writing_mode, SideOffsets2D::new(
margin_style.margin_top,
margin_style.margin_right,
margin_style.margin_bottom,
margin_style.margin_left,
))
}
#[inline]
pub fn logical_position(&self) -> LogicalMargin<computed::LengthOrPercentageOrAuto> {
// FIXME(SimonSapin): should be the writing mode of the containing block, maybe?
let position_style = self.get_position();
LogicalMargin::from_physical(self.writing_mode, SideOffsets2D::new(
position_style.top,
position_style.right,
position_style.bottom,
position_style.left,
))
}
#[inline]
pub fn get_font_arc(&self) -> Arc<style_structs::Font> {
self.font.clone()
}
// http://dev.w3.org/csswg/css-transforms/#grouping-property-values
pub fn get_used_transform_style(&self) -> computed_values::transform_style::T {
use computed_values::mix_blend_mode;
use computed_values::transform_style;
let effects = self.get_effects();
let box_ = self.get_box();
// TODO(gw): Add clip-path, isolation, mask-image, mask-border-source when supported.
if effects.opacity < 1.0 ||
!effects.filter.is_empty() ||
effects.clip.0.is_some() {
effects.mix_blend_mode != mix_blend_mode::T::normal ||
return transform_style::T::flat;
}
if effects.transform_style == transform_style::T::auto {
if box_.transform.0.is_some() {
return transform_style::T::flat;
}
if let Either::First(ref _length) = effects.perspective {
return transform_style::T::flat;
}
}
// Return the computed value if not overridden by the above exceptions
effects.transform_style
}
pub fn transform_requires_layer(&self) -> bool {
// Check if the transform matrix is 2D or 3D
if let Some(ref transform_list) = self.get_box().transform.0 {
for transform in transform_list {
match *transform {
computed_values::transform::ComputedOperation::Perspective(..) => {
return true;
}
computed_values::transform::ComputedOperation::Matrix(m) => {
// See http://dev.w3.org/csswg/css-transforms/#2d-matrix
if m.m31 != 0.0 || m.m32 != 0.0 ||
m.m13 != 0.0 || m.m23 != 0.0 ||
m.m43 != 0.0 || m.m14 != 0.0 ||
m.m24 != 0.0 || m.m34 != 0.0 ||
m.m33 != 1.0 || m.m44 != 1.0 {
return true;
}
}
computed_values::transform::ComputedOperation::Translate(_, _, z) => {
if z != Au(0) {
return true;
}
}
_ => {}
}
}
}
// Neither perspective nor transform present
false
}
pub fn computed_value_to_string(&self, name: &str) -> Result<String, ()> {
match name {
% for style_struct in data.active_style_structs():
% for longhand in style_struct.longhands:
"${longhand.name}" => Ok(self.${style_struct.ident}.${longhand.ident}.to_css_string()),
% endfor
% endfor
_ => {
let name = try!(::custom_properties::parse_name(name));
let map = try!(self.custom_properties.as_ref().ok_or(()));
let value = try!(map.get(&Atom::from(name)).ok_or(()));
Ok(value.to_css_string())
}
}
}
}
/// Return a WritingMode bitflags from the relevant CSS properties.
pub fn get_writing_mode(inheritedbox_style: &style_structs::InheritedBox) -> WritingMode {
use logical_geometry;
let mut flags = WritingMode::empty();
match inheritedbox_style.clone_direction() {
computed_values::direction::T::ltr => {},
computed_values::direction::T::rtl => {
flags.insert(logical_geometry::FLAG_RTL);
},
}
match inheritedbox_style.clone_writing_mode() {
computed_values::writing_mode::T::horizontal_tb => {},
computed_values::writing_mode::T::vertical_rl => {
flags.insert(logical_geometry::FLAG_VERTICAL);
},
computed_values::writing_mode::T::vertical_lr => {
flags.insert(logical_geometry::FLAG_VERTICAL);
flags.insert(logical_geometry::FLAG_VERTICAL_LR);
},
}
match inheritedbox_style.clone_text_orientation() {
% if product == "servo":
computed_values::text_orientation::T::sideways_right => {},
computed_values::text_orientation::T::sideways_left => {
flags.insert(logical_geometry::FLAG_VERTICAL_LR);
},
% elif product == "gecko":
// FIXME(bholley): Need to make sure these are correct when we add
// full writing-mode support.
computed_values::text_orientation::T::mixed => {},
computed_values::text_orientation::T::upright => {},
% endif
computed_values::text_orientation::T::sideways => {
if flags.intersects(logical_geometry::FLAG_VERTICAL_LR) {
flags.insert(logical_geometry::FLAG_SIDEWAYS_LEFT);
}
},
}
flags
}
#[cfg(feature = "servo")]
pub use self::lazy_static_module::INITIAL_SERVO_VALUES;
// Use a module to work around #[cfg] on lazy_static! not being applied to every generated item.
#[cfg(feature = "servo")]
mod lazy_static_module {
use logical_geometry::WritingMode;
use std::sync::Arc;
use super::{ComputedValues, longhands, style_structs};
/// The initial values for all style structs as defined by the specification.
lazy_static! {
pub static ref INITIAL_SERVO_VALUES: ComputedValues = ComputedValues {
% for style_struct in data.active_style_structs():
${style_struct.ident}: Arc::new(style_structs::${style_struct.name} {
% for longhand in style_struct.longhands:
${longhand.ident}: longhands::${longhand.ident}::get_initial_value(),
% endfor
% if style_struct.name == "Font":
hash: 0,
% endif
}),
% endfor
custom_properties: None,
shareable: true,
writing_mode: WritingMode::empty(),
root_font_size: longhands::font_size::get_initial_value(),
};
}
}
pub type CascadePropertyFn =
extern "Rust" fn(declaration: &PropertyDeclaration,
inherited_style: &ComputedValues,
context: &mut computed::Context,
seen: &mut PropertyBitField,
cacheable: &mut bool,
cascade_info: &mut Option<<&mut CascadeInfo>,
error_reporter: &mut StdBox<ParseErrorReporter + Send>);
#[cfg(feature = "servo")]
static CASCADE_PROPERTY: [CascadePropertyFn; ${len(data.longhands)}] = [
% for property in data.longhands:
longhands::${property.ident}::cascade_property,
% endfor
];
bitflags! {
pub flags CascadeFlags: u8 {
/// Whether the `ComputedValues` structure to be constructed should be considered
/// shareable.
const SHAREABLE = 0x01,
/// Whether to inherit all styles from the parent. If this flag is not present,
/// non-inherited styles are reset to their initial values.
const INHERIT_ALL = 0x02,
}
}
/// Performs the CSS cascade, computing new styles for an element from its parent style.
///
/// The arguments are:
///
/// * `viewport_size`: The size of the initial viewport.
///
/// * `rule_node`: The rule node in the tree that represent the CSS rules that
/// matched.
///
/// * `parent_style`: The parent style, if applicable; if `None`, this is the root node.
///
/// Returns the computed values.
/// * `flags`: Various flags.
///
pub fn cascade(viewport_size: Size2D<Au>,
rule_node: &StrongRuleNode,
parent_style: Option<<&ComputedValues>,
cascade_info: Option<<&mut CascadeInfo>,
error_reporter: StdBox<ParseErrorReporter + Send>,
flags: CascadeFlags)
-> ComputedValues {
let (is_root_element, inherited_style) = match parent_style {
Some(parent_style) => (false, parent_style),
None => (true, ComputedValues::initial_values()),
};
// Hold locks until after the apply_declarations() call returns.
// Use filter_map because the root node has no style source.
let lock_guards = rule_node.self_and_ancestors().filter_map(|node| {
node.style_source().map(|source| (source.read(), node.importance()))
}).collect::<Vec<_>>();
let iter_declarations = || {
lock_guards.iter().flat_map(|&(ref source, source_importance)| {
source.declarations.iter()
// Yield declarations later in source order (with more precedence) first.
.rev()
.filter_map(move |&(ref declaration, declaration_importance)| {
if declaration_importance == source_importance {
Some(declaration)
} else {
None
}
})
})
};
apply_declarations(viewport_size,
is_root_element,
iter_declarations,
inherited_style,
cascade_info,
error_reporter,
None,
flags)
}
/// NOTE: This function expects the declaration with more priority to appear
/// first.
pub fn apply_declarations<'a, F, I>(viewport_size: Size2D<Au>,
is_root_element: bool,
iter_declarations: F,
inherited_style: &ComputedValues,
mut cascade_info: Option<<&mut CascadeInfo>,
mut error_reporter: StdBox<ParseErrorReporter + Send>,
font_metrics_provider: Option<<&FontMetricsProvider>,
flags: CascadeFlags)
-> ComputedValues
where F: Fn() -> I, I: Iterator<Item = &'a PropertyDeclaration>
{
let inherited_custom_properties = inherited_style.custom_properties();
let mut custom_properties = None;
let mut seen_custom = HashSet::new();
for declaration in iter_declarations() {
match *declaration {
PropertyDeclaration::Custom(ref name, ref value) => {
::custom_properties::cascade(
&mut custom_properties, &inherited_custom_properties,
&mut seen_custom, name, value)
}
_ => {}
}
}
let custom_properties =
::custom_properties::finish_cascade(
custom_properties, &inherited_custom_properties);
let initial_values = ComputedValues::initial_values();
let starting_style = if !flags.contains(INHERIT_ALL) {
ComputedValues::new(custom_properties,
flags.contains(SHAREABLE),
WritingMode::empty(),
inherited_style.root_font_size(),
% for style_struct in data.active_style_structs():
% if style_struct.inherited:
inherited_style.clone_${style_struct.name_lower}(),
% else:
initial_values.clone_${style_struct.name_lower}(),
% endif
% endfor
)
} else {
ComputedValues::new(custom_properties,
flags.contains(SHAREABLE),
WritingMode::empty(),
inherited_style.root_font_size(),
% for style_struct in data.active_style_structs():
inherited_style.clone_${style_struct.name_lower}(),
% endfor
)
};
let mut context = computed::Context {
is_root_element: is_root_element,
viewport_size: viewport_size,
inherited_style: inherited_style,
style: starting_style,
font_metrics_provider: font_metrics_provider,
};
// Set computed values, overwriting earlier declarations for the same
// property.
let mut cacheable = true;
let mut seen = PropertyBitField::new();
// Declaration blocks are stored in increasing precedence order, we want
// them in decreasing order here.
//
// We could (and used to) use a pattern match here, but that bloats this
// function to over 100K of compiled code!
//
// To improve i-cache behavior, we outline the individual functions and use
// virtual dispatch instead.
ComputedValues::do_cascade_property(|cascade_property| {
% for category_to_cascade_now in ["early", "other"]:
for declaration in iter_declarations() {
if let PropertyDeclaration::Custom(..) = *declaration {
continue
}
// The computed value of some properties depends on the
// (sometimes computed) value of *other* properties.
//
// So we classify properties into "early" and "other", such that
// the only dependencies can be from "other" to "early".
//
// We iterate applicable_declarations twice, first cascading
// "early" properties then "other".
//
// Unfortunately, it’s not easy to check that this
// classification is correct.
let is_early_property = matches!(*declaration,
PropertyDeclaration::FontSize(_) |
PropertyDeclaration::FontFamily(_) |
PropertyDeclaration::Color(_) |
PropertyDeclaration::Position(_) |
PropertyDeclaration::Float(_) |
PropertyDeclaration::TextDecoration${'' if product == 'servo' else 'Line'}(_) |
PropertyDeclaration::WritingMode(_) |
PropertyDeclaration::Direction(_) |
PropertyDeclaration::TextOrientation(_)
);
if
% if category_to_cascade_now == "early":
!
% endif
is_early_property
{
continue
}
let discriminant = declaration.discriminant_value();
(cascade_property[discriminant])(declaration,
inherited_style,
&mut context,
&mut seen,
&mut cacheable,
&mut cascade_info,
&mut error_reporter);
}
% if category_to_cascade_now == "early":
let mode = get_writing_mode(context.style.get_inheritedbox());
context.style.set_writing_mode(mode);
% endif
% endfor
});
let mut style = context.style;
let positioned = matches!(style.get_box().clone_position(),
longhands::position::SpecifiedValue::absolute |
longhands::position::SpecifiedValue::fixed);
let floated = style.get_box().clone_float() != longhands::float::SpecifiedValue::none;
let is_flex_item =
context.inherited_style.get_box().clone_display() == computed_values::display::T::flex;
if positioned || floated || is_root_element || is_flex_item {
use computed_values::display::T;
let specified_display = style.get_box().clone_display();
let computed_display = match specified_display {
T::inline_table => {
Some(T::table)
}
T::inline | T::inline_block |
T::table_row_group | T::table_column |
T::table_column_group | T::table_header_group |
T::table_footer_group | T::table_row | T::table_cell |
T::table_caption => {
Some(T::block)
}
_ => None
};
if let Some(computed_display) = computed_display {
let box_ = style.mutate_box();
box_.set_display(computed_display);
% if product == "servo":
box_.set__servo_display_for_hypothetical_box(if is_root_element || is_flex_item {
computed_display
} else {
specified_display
});
% endif
}
}
{
use computed_values::overflow_x::T as overflow;
use computed_values::overflow_y;
match (style.get_box().clone_overflow_x() == longhands::overflow_x::computed_value::T::visible,
style.get_box().clone_overflow_y().0 == longhands::overflow_x::computed_value::T::visible) {
(true, true) => {}
(true, _) => {
style.mutate_box().set_overflow_x(overflow::auto);
}
(_, true) => {
style.mutate_box().set_overflow_y(overflow_y::T(overflow::auto));
}
_ => {}
}
}
% if "align-items" in data.longhands_by_name:
{
use computed_values::align_self::T as align_self;
use computed_values::align_items::T as align_items;
if style.get_position().clone_align_self() == computed_values::align_self::T::auto && !positioned {
let self_align =
match context.inherited_style.get_position().clone_align_items() {
align_items::stretch => align_self::stretch,
align_items::baseline => align_self::baseline,
align_items::flex_start => align_self::flex_start,
align_items::flex_end => align_self::flex_end,
align_items::center => align_self::center,
};
style.mutate_position().set_align_self(self_align);
}
}
% endif
// The initial value of border-*-width may be changed at computed value time.
% for side in ["top", "right", "bottom", "left"]:
// Like calling to_computed_value, which wouldn't type check.
if style.get_border().clone_border_${side}_style().none_or_hidden() &&
style.get_border().border_${side}_has_nonzero_width() {
style.mutate_border().set_border_${side}_width(Au(0));
}
% endfor
% if product == "gecko":
style.mutate_background().fill_arrays();
style.mutate_svg().fill_arrays();
% endif
// The initial value of outline width may be changed at computed value time.
if style.get_outline().clone_outline_style().none_or_hidden() &&
style.get_outline().outline_has_nonzero_width() {
style.mutate_outline().set_outline_width(Au(0));
}
if is_root_element {
let s = style.get_font().clone_font_size();
style.set_root_font_size(s);
}
if seen.get_font_style() || seen.get_font_weight() || seen.get_font_stretch() ||
seen.get_font_family() {
style.mutate_font().compute_font_hash();
}
style
}
#[cfg(feature = "servo")]
pub fn modify_style_for_anonymous_flow(style: &mut Arc<ComputedValues>,
new_display_value: longhands::display::computed_value::T) {
// The 'align-self' property needs some special treatment since
// its value depends on the 'align-items' value of its parent.
% if "align-items" in data.longhands_by_name:
use computed_values::align_self::T as align_self;
use computed_values::align_items::T as align_items;
let self_align =
match style.position.align_items {
align_items::stretch => align_self::stretch,
align_items::baseline => align_self::baseline,
align_items::flex_start => align_self::flex_start,
align_items::flex_end => align_self::flex_end,
align_items::center => align_self::center,
};
% endif
let inital_values = &*INITIAL_SERVO_VALUES;
let mut style = Arc::make_mut(style);
% for style_struct in data.active_style_structs():
% if not style_struct.inherited:
style.${style_struct.ident} = inital_values.clone_${style_struct.name_lower}();
% endif
% endfor
% if "align-items" in data.longhands_by_name:
let position = Arc::make_mut(&mut style.position);
position.align_self = self_align;
% endif
if new_display_value != longhands::display::computed_value::T::inline {
let new_box = Arc::make_mut(&mut style.box_);
new_box.display = new_display_value;
}
let border = Arc::make_mut(&mut style.border);
% for side in ["top", "right", "bottom", "left"]:
// Like calling to_computed_value, which wouldn't type check.
border.border_${side}_width = Au(0);
% endfor
// Initial value of outline-style is always none for anonymous box.
let outline = Arc::make_mut(&mut style.outline);
outline.outline_width = Au(0);
}
/// Alters the given style to accommodate replaced content. This is called in flow construction. It
/// handles cases like `<div style="position: absolute">foo bar baz</div>` (in which `foo`, `bar`,
/// and `baz` must not be absolutely-positioned) and cases like `<sup>Foo</sup>` (in which the
/// `vertical-align: top` style of `sup` must not propagate down into `Foo`).
///
/// FIXME(#5625, pcwalton): It would probably be cleaner and faster to do this in the cascade.
#[cfg(feature = "servo")]
#[inline]
pub fn modify_style_for_replaced_content(style: &mut Arc<ComputedValues>) {
// Reset `position` to handle cases like `<div style="position: absolute">foo bar baz</div>`.
if style.box_.display != longhands::display::computed_value::T::inline {
let mut style = Arc::make_mut(style);
Arc::make_mut(&mut style.box_).display = longhands::display::computed_value::T::inline;
Arc::make_mut(&mut style.box_).position =
longhands::position::computed_value::T::static_;
}
// Reset `vertical-align` to handle cases like `<sup>foo</sup>`.
if style.box_.vertical_align != longhands::vertical_align::computed_value::T::baseline {
let mut style = Arc::make_mut(style);
Arc::make_mut(&mut style.box_).vertical_align =
longhands::vertical_align::computed_value::T::baseline
}
// Reset margins.
if style.margin.margin_top != computed::LengthOrPercentageOrAuto::Length(Au(0)) ||
style.margin.margin_left != computed::LengthOrPercentageOrAuto::Length(Au(0)) ||
style.margin.margin_bottom != computed::LengthOrPercentageOrAuto::Length(Au(0)) ||
style.margin.margin_right != computed::LengthOrPercentageOrAuto::Length(Au(0)) {
let mut style = Arc::make_mut(style);
let margin = Arc::make_mut(&mut style.margin);
margin.margin_top = computed::LengthOrPercentageOrAuto::Length(Au(0));
margin.margin_left = computed::LengthOrPercentageOrAuto::Length(Au(0));
margin.margin_bottom = computed::LengthOrPercentageOrAuto::Length(Au(0));
margin.margin_right = computed::LengthOrPercentageOrAuto::Length(Au(0));
}
}
/// Adjusts borders as appropriate to account for a fragment's status as the first or last fragment
/// within the range of an element.
///
/// Specifically, this function sets border widths to zero on the sides for which the fragment is
/// not outermost.
#[cfg(feature = "servo")]
#[inline]
pub fn modify_border_style_for_inline_sides(style: &mut Arc<ComputedValues>,
is_first_fragment_of_element: bool,
is_last_fragment_of_element: bool) {
fn modify_side(style: &mut Arc<ComputedValues>, side: PhysicalSide) {
{
let border = &style.border;
let current_style = match side {
PhysicalSide::Left => (border.border_left_width, border.border_left_style),
PhysicalSide::Right => (border.border_right_width, border.border_right_style),
PhysicalSide::Top => (border.border_top_width, border.border_top_style),
PhysicalSide::Bottom => (border.border_bottom_width, border.border_bottom_style),
};
if current_style == (Au(0), BorderStyle::none) {
return;
}
}
let mut style = Arc::make_mut(style);
let border = Arc::make_mut(&mut style.border);
match side {
PhysicalSide::Left => {
border.border_left_width = Au(0);
border.border_left_style = BorderStyle::none;
}
PhysicalSide::Right => {
border.border_right_width = Au(0);
border.border_right_style = BorderStyle::none;
}
PhysicalSide::Bottom => {
border.border_bottom_width = Au(0);
border.border_bottom_style = BorderStyle::none;
}
PhysicalSide::Top => {
border.border_top_width = Au(0);
border.border_top_style = BorderStyle::none;
}
}
}
if !is_first_fragment_of_element {
let side = style.writing_mode.inline_start_physical_side();
modify_side(style, side)
}
if !is_last_fragment_of_element {
let side = style.writing_mode.inline_end_physical_side();
modify_side(style, side)
}
}
/// Adjusts the `position` property as necessary for the outer fragment wrapper of an inline-block.
#[cfg(feature = "servo")]
#[inline]
pub fn modify_style_for_outer_inline_block_fragment(style: &mut Arc<ComputedValues>) {
let mut style = Arc::make_mut(style);
let box_style = Arc::make_mut(&mut style.box_);
box_style.position = longhands::position::computed_value::T::static_
}
/// Adjusts the `position` and `padding` properties as necessary to account for text.
///
/// Text is never directly relatively positioned; it's always contained within an element that is
/// itself relatively positioned.
#[cfg(feature = "servo")]
#[inline]
pub fn modify_style_for_text(style: &mut Arc<ComputedValues>) {
if style.box_.position == longhands::position::computed_value::T::relative {
// We leave the `position` property set to `relative` so that we'll still establish a
// containing block if needed. But we reset all position offsets to `auto`.
let mut style = Arc::make_mut(style);
let mut position = Arc::make_mut(&mut style.position);
position.top = computed::LengthOrPercentageOrAuto::Auto;
position.right = computed::LengthOrPercentageOrAuto::Auto;
position.bottom = computed::LengthOrPercentageOrAuto::Auto;
position.left = computed::LengthOrPercentageOrAuto::Auto;
}
if style.padding.padding_top != computed::LengthOrPercentage::Length(Au(0)) ||
style.padding.padding_right != computed::LengthOrPercentage::Length(Au(0)) ||
style.padding.padding_bottom != computed::LengthOrPercentage::Length(Au(0)) ||
style.padding.padding_left != computed::LengthOrPercentage::Length(Au(0)) {
let mut style = Arc::make_mut(style);
let mut padding = Arc::make_mut(&mut style.padding);
padding.padding_top = computed::LengthOrPercentage::Length(Au(0));
padding.padding_right = computed::LengthOrPercentage::Length(Au(0));
padding.padding_bottom = computed::LengthOrPercentage::Length(Au(0));
padding.padding_left = computed::LengthOrPercentage::Length(Au(0));
}
if style.effects.opacity != 1.0 {
let mut style = Arc::make_mut(style);
let mut effects = Arc::make_mut(&mut style.effects);
effects.opacity = 1.0;
}
}
/// Adjusts the `clip` property so that an inline absolute hypothetical fragment doesn't clip its
/// children.
#[cfg(feature = "servo")]
pub fn modify_style_for_inline_absolute_hypothetical_fragment(style: &mut Arc<ComputedValues>) {
if style.get_effects().clip.0.is_some() {
let mut style = Arc::make_mut(style);
let effects_style = Arc::make_mut(&mut style.effects);
effects_style.clip.0 = None
}
}
// FIXME: https://github.com/w3c/csswg-drafts/issues/580
pub fn is_supported_property(property: &str) -> bool {
match_ignore_ascii_case! { property,
% for property in data.shorthands + data.longhands:
"${property.name}" => true,
% endfor
_ => property.starts_with("--")
}
}
#[macro_export]
macro_rules! css_properties_accessors {
($macro_name: ident) => {
$macro_name! {
% for property in data.shorthands + data.longhands:
% if not property.derived_from and not property.internal:
% if '-' in property.name:
[${property.ident.capitalize()}, Set${property.ident.capitalize()}, "${property.name}"],
% endif
% if property != data.longhands[-1]:
[${property.camel_case}, Set${property.camel_case}, "${property.name}"],
% else:
[${property.camel_case}, Set${property.camel_case}, "${property.name}"]
% endif
% endif
% endfor
}
}
}
macro_rules! longhand_properties_idents {
($macro_name: ident) => {
$macro_name! {
% for property in data.longhands:
${property.ident}
% endfor
}
}
}<|fim▁end|>
|
% endfor
PropertyDeclaration::Custom(ref name, _) => {
|
<|file_name|>BoniMichele.java<|end_file_name|><|fim▁begin|>/*
* Code used in the "Software Engineering" course.
*
* Copyright 2017 by Claudio Cusano ([email protected])
* Dept of Electrical, Computer and Biomedical Engineering,
* University of Pavia.
*/
package goldrush;
/**
* @author Reina Michele cl418656
* @author Bonissone Davidecl427113
*/
public class BoniMichele extends GoldDigger{ //
int t=0;
int j=99;
@Override
public int chooseDiggingSite(int[] distances) {
for (int i=0; i<distances.length; i++){
if (t==0){
if (distances[i]==140) {
j=i;
t++;
}
}
else if (t<3) {
if (distances[i]== 30) {
j=i;
t=0;
<|fim▁hole|> j=i;
t=0;
}
}
}
return j;
}
}<|fim▁end|>
|
}
}
else {
if (distances[i]== 200) {
|
<|file_name|>stylesheet_loader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::document_loader::LoadType;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::DomRoot;
use crate::dom::document::Document;
use crate::dom::element::Element;
use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::htmllinkelement::{HTMLLinkElement, RequestGenerationId};
use crate::dom::node::{containing_shadow_root, document_from_node, window_from_node};
use crate::dom::performanceresourcetiming::InitiatorType;
use crate::dom::shadowroot::ShadowRoot;
use crate::fetch::create_a_potential_cors_request;
use crate::network_listener::{self, NetworkListener, PreInvoke, ResourceTimingListener};
use cssparser::SourceLocation;
use encoding_rs::UTF_8;
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use mime::{self, Mime};
use msg::constellation_msg::PipelineId;
use net_traits::request::{CorsSettings, Destination, Referrer, RequestBuilder};
use net_traits::{
FetchMetadata, FetchResponseListener, FilteredMetadata, Metadata, NetworkError, ReferrerPolicy,
};
use net_traits::{ResourceFetchTiming, ResourceTimingType};
use parking_lot::RwLock;
use servo_arc::Arc;
use servo_url::ImmutableOrigin;
use servo_url::ServoUrl;
use std::mem;
use std::sync::atomic::AtomicBool;
use std::sync::Mutex;
use style::media_queries::MediaList;
use style::parser::ParserContext;
use style::shared_lock::{Locked, SharedRwLock};
use style::stylesheets::import_rule::ImportSheet;
use style::stylesheets::StylesheetLoader as StyleStylesheetLoader;
use style::stylesheets::{
CssRules, ImportRule, Namespaces, Origin, Stylesheet, StylesheetContents,
};
use style::values::CssUrl;
pub trait StylesheetOwner {
/// Returns whether this element was inserted by the parser (i.e., it should
/// trigger a document-load-blocking load).
fn parser_inserted(&self) -> bool;
/// Which referrer policy should loads triggered by this owner follow, or
/// `None` for the default.
fn referrer_policy(&self) -> Option<ReferrerPolicy>;
/// Notes that a new load is pending to finish.
fn increment_pending_loads_count(&self);
/// Returns None if there are still pending loads, or whether any load has
/// failed since the loads started.
fn load_finished(&self, successful: bool) -> Option<bool>;
/// Sets origin_clean flag.
fn set_origin_clean(&self, origin_clean: bool);
}
pub enum StylesheetContextSource {
// NB: `media` is just an option so we avoid cloning it.
LinkElement { media: Option<MediaList> },
Import(Arc<Stylesheet>),
}
/// The context required for asynchronously loading an external stylesheet.
pub struct StylesheetContext {
/// The element that initiated the request.
elem: Trusted<HTMLElement>,
source: StylesheetContextSource,
url: ServoUrl,
metadata: Option<Metadata>,
/// The response body received to date.
data: Vec<u8>,
/// The node document for elem when the load was initiated.
document: Trusted<Document>,
shadow_root: Option<Trusted<ShadowRoot>>,
origin_clean: bool,
/// A token which must match the generation id of the `HTMLLinkElement` for it to load the stylesheet.
/// This is ignored for `HTMLStyleElement` and imports.
request_generation_id: Option<RequestGenerationId>,
resource_timing: ResourceFetchTiming,
}
impl PreInvoke for StylesheetContext {}
impl FetchResponseListener for StylesheetContext {
fn process_request_body(&mut self) {}
fn process_request_eof(&mut self) {}
fn process_response(&mut self, metadata: Result<FetchMetadata, NetworkError>) {
if let Ok(FetchMetadata::Filtered { ref filtered, .. }) = metadata {
match *filtered {
FilteredMetadata::Opaque | FilteredMetadata::OpaqueRedirect(_) => {
self.origin_clean = false;
},
_ => {},
}
}
self.metadata = metadata.ok().map(|m| match m {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_, .. } => unsafe_,
});
}
fn process_response_chunk(&mut self, mut payload: Vec<u8>) {
self.data.append(&mut payload);
}
fn process_response_eof(&mut self, status: Result<ResourceFetchTiming, NetworkError>) {
let elem = self.elem.root();
let document = self.document.root();
let mut successful = false;
if status.is_ok() {
let metadata = match self.metadata.take() {
Some(meta) => meta,
None => return,
};
let is_css = metadata.content_type.map_or(false, |ct| {
let mime: Mime = ct.into_inner().into();
mime.type_() == mime::TEXT && mime.subtype() == mime::CSS
});
let data = if is_css {
mem::replace(&mut self.data, vec![])
} else {
vec![]
};
// TODO: Get the actual value. http://dev.w3.org/csswg/css-syntax/#environment-encoding
let environment_encoding = UTF_8;
let protocol_encoding_label = metadata.charset.as_ref().map(|s| &**s);
let final_url = metadata.final_url;
let win = window_from_node(&*elem);
let loader = StylesheetLoader::for_element(&elem);
match self.source {
StylesheetContextSource::LinkElement { ref mut media } => {
let link = elem.downcast::<HTMLLinkElement>().unwrap();
// We must first check whether the generations of the context and the element match up,
// else we risk applying the wrong stylesheet when responses come out-of-order.
let is_stylesheet_load_applicable = self
.request_generation_id
.map_or(true, |gen| gen == link.get_request_generation_id());
if is_stylesheet_load_applicable {
let shared_lock = document.style_shared_lock().clone();
let sheet = Arc::new(Stylesheet::from_bytes(
&data,
final_url,
protocol_encoding_label,
Some(environment_encoding),
Origin::Author,
media.take().unwrap(),
shared_lock,
Some(&loader),
win.css_error_reporter(),
document.quirks_mode(),
));
if link.is_alternate() {
sheet.set_disabled(true);
}
link.set_stylesheet(sheet);
}
},
StylesheetContextSource::Import(ref stylesheet) => {
Stylesheet::update_from_bytes(
&stylesheet,
&data,
protocol_encoding_label,
Some(environment_encoding),
final_url,
Some(&loader),
win.css_error_reporter(),
);
},
}
if let Some(ref shadow_root) = self.shadow_root {
shadow_root.root().invalidate_stylesheets();
} else {
document.invalidate_stylesheets();
}
// FIXME: Revisit once consensus is reached at:
// https://github.com/whatwg/html/issues/1142
successful = metadata.status.map_or(false, |(code, _)| code == 200);
}
let owner = elem
.upcast::<Element>()
.as_stylesheet_owner()
.expect("Stylesheet not loaded by <style> or <link> element!");
owner.set_origin_clean(self.origin_clean);
if owner.parser_inserted() {
document.decrement_script_blocking_stylesheet_count();
}
document.finish_load(LoadType::Stylesheet(self.url.clone()));
if let Some(any_failed) = owner.load_finished(successful) {
let event = if any_failed {
atom!("error")
} else {
atom!("load")
};
elem.upcast::<EventTarget>().fire_event(event);
}
}
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
&mut self.resource_timing
}
fn resource_timing(&self) -> &ResourceFetchTiming {
&self.resource_timing
}
fn submit_resource_timing(&mut self) {
network_listener::submit_timing(self)
}
}
impl ResourceTimingListener for StylesheetContext {
fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
let initiator_type = InitiatorType::LocalName(
self.elem
.root()
.upcast::<Element>()
.local_name()
.to_string(),
);
(initiator_type, self.url.clone())
}
fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
document_from_node(&*self.elem.root()).global()
}
}
pub struct StylesheetLoader<'a> {
elem: &'a HTMLElement,
}
impl<'a> StylesheetLoader<'a> {
pub fn for_element(element: &'a HTMLElement) -> Self {
StylesheetLoader { elem: element }
}
}
impl<'a> StylesheetLoader<'a> {<|fim▁hole|> url: ServoUrl,
cors_setting: Option<CorsSettings>,
integrity_metadata: String,
) {
let document = document_from_node(self.elem);
let shadow_root = containing_shadow_root(self.elem).map(|sr| Trusted::new(&*sr));
let gen = self
.elem
.downcast::<HTMLLinkElement>()
.map(HTMLLinkElement::get_request_generation_id);
let context = ::std::sync::Arc::new(Mutex::new(StylesheetContext {
elem: Trusted::new(&*self.elem),
source: source,
url: url.clone(),
metadata: None,
data: vec![],
document: Trusted::new(&*document),
shadow_root,
origin_clean: true,
request_generation_id: gen,
resource_timing: ResourceFetchTiming::new(ResourceTimingType::Resource),
}));
let (action_sender, action_receiver) = ipc::channel().unwrap();
let (task_source, canceller) = document
.window()
.task_manager()
.networking_task_source_with_canceller();
let listener = NetworkListener {
context,
task_source,
canceller: Some(canceller),
};
ROUTER.add_route(
action_receiver.to_opaque(),
Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}),
);
let owner = self
.elem
.upcast::<Element>()
.as_stylesheet_owner()
.expect("Stylesheet not loaded by <style> or <link> element!");
let referrer_policy = owner
.referrer_policy()
.or_else(|| document.get_referrer_policy());
owner.increment_pending_loads_count();
if owner.parser_inserted() {
document.increment_script_blocking_stylesheet_count();
}
let request = stylesheet_fetch_request(
url.clone(),
cors_setting,
document.origin().immutable().clone(),
self.elem.global().pipeline_id(),
Referrer::ReferrerUrl(document.url()),
referrer_policy,
integrity_metadata,
);
document.fetch_async(LoadType::Stylesheet(url), request, action_sender);
}
}
// This function is also used to prefetch a stylesheet in `script::dom::servoparser::prefetch`.
// https://html.spec.whatwg.org/multipage/#default-fetch-and-process-the-linked-resource
pub(crate) fn stylesheet_fetch_request(
url: ServoUrl,
cors_setting: Option<CorsSettings>,
origin: ImmutableOrigin,
pipeline_id: PipelineId,
referrer: Referrer,
referrer_policy: Option<ReferrerPolicy>,
integrity_metadata: String,
) -> RequestBuilder {
create_a_potential_cors_request(url, Destination::Style, cors_setting, None)
.origin(origin)
.pipeline_id(Some(pipeline_id))
.referrer(Some(referrer))
.referrer_policy(referrer_policy)
.integrity_metadata(integrity_metadata)
}
impl<'a> StyleStylesheetLoader for StylesheetLoader<'a> {
/// Request a stylesheet after parsing a given `@import` rule, and return
/// the constructed `@import` rule.
fn request_stylesheet(
&self,
url: CssUrl,
source_location: SourceLocation,
context: &ParserContext,
lock: &SharedRwLock,
media: Arc<Locked<MediaList>>,
) -> Arc<Locked<ImportRule>> {
let sheet = Arc::new(Stylesheet {
contents: StylesheetContents {
rules: CssRules::new(Vec::new(), lock),
origin: context.stylesheet_origin,
url_data: RwLock::new(context.url_data.clone()),
quirks_mode: context.quirks_mode,
namespaces: RwLock::new(Namespaces::default()),
source_map_url: RwLock::new(None),
source_url: RwLock::new(None),
},
media: media,
shared_lock: lock.clone(),
disabled: AtomicBool::new(false),
});
let stylesheet = ImportSheet(sheet.clone());
let import = ImportRule {
url,
source_location,
stylesheet,
};
let url = match import.url.url().cloned() {
Some(url) => url,
None => return Arc::new(lock.wrap(import)),
};
// TODO (mrnayak) : Whether we should use the original loader's CORS
// setting? Fix this when spec has more details.
let source = StylesheetContextSource::Import(sheet.clone());
self.load(source, url, None, "".to_owned());
Arc::new(lock.wrap(import))
}
}<|fim▁end|>
|
pub fn load(
&self,
source: StylesheetContextSource,
|
<|file_name|>ENMonthNameMiddleEndianParser.js<|end_file_name|><|fim▁begin|>/*
The parser for parsing US's date format that begin with month's name.
<|fim▁hole|> - January 13
- January 13, 2012
- January 13 - 15, 2012
- Tuesday, January 13, 2012
*/
var moment = require('moment');
require('moment-timezone');
var Parser = require('../parser').Parser;
var ParsedResult = require('../../result').ParsedResult;
var DAYS_OFFSET = { 'sunday': 0, 'sun': 0, 'monday': 1, 'mon': 1,'tuesday': 2, 'tue':2, 'wednesday': 3, 'wed': 3,
'thursday': 4, 'thur': 4, 'thu': 4,'friday': 5, 'fri': 5,'saturday': 6, 'sat': 6,}
var regFullPattern = /(\W|^)((Sunday|Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sun|Mon|Tue|Wed|Thu|Fri|Sat)\s*,?\s*)?(Jan|January|Feb|February|Mar|March|Apr|April|May|Jun|June|Jul|July|Aug|August|Sep|September|Oct|October|Nov|November|Dec|December)\s*(([0-9]{1,2})(st|nd|rd|th)?\s*(to|\-)\s*)?([0-9]{1,2})(st|nd|rd|th)?(,)?(\s*[0-9]{4})(\s*BE)?(\W|$)/i;
var regShortPattern = /(\W|^)((Sunday|Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sun|Mon|Tue|Wed|Thu|Fri|Sat)\s*,?\s*)?(Jan|January|Feb|February|Mar|March|Apr|April|May|Jun|June|Jul|July|Aug|August|Sep|September|Oct|October|Nov|November|Dec|December)\s*(([0-9]{1,2})(st|nd|rd|th)?\s*(to|\-)\s*)?([0-9]{1,2})(st|nd|rd|th)?([^0-9]|$)/i;
exports.Parser = function ENMonthNameMiddleEndianParser(){
Parser.call(this);
this.pattern = function() { return regShortPattern; }
this.extract = function(text, ref, match, opt){
var result = new ParsedResult();
var impliedComponents = [];
var date = null;
var originalText = '';
var index = match.index;
text = text.substr(index);
var match = text.match(regFullPattern);
if(match && text.indexOf(match[0]) == 0){
var text = match[0];
text = text.substring(match[1].length, match[0].length - match[14].length);
index = index + match[1].length;
originalText = text;
text = text.replace(match[2], '');
text = text.replace(match[4], match[4]+' ');
if(match[5]) text = text.replace(match[5],'');
if(match[10]) text = text.replace(match[10],'');
if(match[11]) text = text.replace(',',' ');
if(match[13]){
var years = match[12];
years = ' ' + (parseInt(years) - 543);
text = text.replace(match[13], '');
text = text.replace(match[12], years);
}
text = text.replace(match[9],parseInt(match[9])+'');
date = moment(text,'MMMM DD YYYY');
if(!date) return null;
result.start.assign('day', date.date());
result.start.assign('month', date.month() + 1);
result.start.assign('year', date.year());
} else {
match = text.match(regShortPattern);
if(!match) return null;
//Short Pattern (without years)
var text = match[0];
text = text.substring(match[1].length, match[0].length - match[11].length);
index = index + match[1].length;
originalText = text;
text = text.replace(match[2], '');
text = text.replace(match[4], match[4]+' ');
if(match[4]) text = text.replace(match[5],'');
date = moment(text,'MMMM DD');
if(!date) return null;
//Find the most appropriated year
impliedComponents.push('year')
date.year(moment(ref).year());
var nextYear = date.clone().add(1, 'year');
var lastYear = date.clone().add(-1, 'year');
if( Math.abs(nextYear.diff(moment(ref))) < Math.abs(date.diff(moment(ref))) ){
date = nextYear;
}
else if( Math.abs(lastYear.diff(moment(ref))) < Math.abs(date.diff(moment(ref))) ){
date = lastYear;
}
result.start.assign('day', date.date());
result.start.assign('month', date.month() + 1);
result.start.imply('year', date.year());
}
//Day of week
if(match[3]) {
result.start.assign('weekday', DAYS_OFFSET[match[3].toLowerCase()]);
}
if (match[5]) {
var endDay = parseInt(match[9]);
var startDay = parseInt(match[6]);
result.end = result.start.clone();
result.start.assign('day', startDay);
result.end.assign('day', endDay);
var endDate = date.clone();
date.date(startDay);
endDate.date(endDay);
}
result.index = index;
result.text = originalText;
result.ref = ref;
result.tags['ENMonthNameMiddleEndianParser'] = true;
return result;
}
}<|fim▁end|>
|
EX.
|
<|file_name|>HtmlReportAction.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.gui;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.util.HashSet;
import java.util.Set;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.MenuElement;
import org.apache.jmeter.exceptions.IllegalUserActionException;
import org.apache.jmeter.gui.action.AbstractAction;
import org.apache.jmeter.gui.action.ActionNames;
import org.apache.jmeter.gui.action.ActionRouter;
import org.apache.jmeter.gui.plugin.MenuCreator;
import org.apache.jmeter.util.JMeterUtils;
public class HtmlReportAction extends AbstractAction implements MenuCreator {<|fim▁hole|> commands.add(ActionNames.HTML_REPORT);
}
public HtmlReportAction() {
super();
}
@Override
public void doAction(ActionEvent e) throws IllegalUserActionException {
htmlReportPanel = new HtmlReportUI();
htmlReportPanel.showInputDialog(getParentFrame(e));
}
@Override
public Set<String> getActionNames() {
return commands;
}
@Override
public JMenuItem[] getMenuItemsAtLocation(MENU_LOCATION location) {
if (location != MENU_LOCATION.TOOLS) {
return new JMenuItem[0];
}
// Use the action name as resource key because the action name is used by JMeterMenuBar too when changing languages.
JMenuItem menuItem = new JMenuItem(JMeterUtils.getResString(ActionNames.HTML_REPORT), KeyEvent.VK_UNDEFINED);
menuItem.setName(ActionNames.HTML_REPORT);
menuItem.setActionCommand(ActionNames.HTML_REPORT);
menuItem.setAccelerator(null);
menuItem.addActionListener(ActionRouter.getInstance());
return new JMenuItem[] { menuItem };
}
@Override
public JMenu[] getTopLevelMenus() {
return new JMenu[0];
}
@Override
public boolean localeChanged(MenuElement menu) {
return false;
}
@Override
public void localeChanged() {
// NOOP
}
public HtmlReportUI getHtmlReportPanel() {
return htmlReportPanel;
}
}<|fim▁end|>
|
private static Set<String> commands = new HashSet<>();
private HtmlReportUI htmlReportPanel;
static {
|
<|file_name|>stopwords.rs<|end_file_name|><|fim▁begin|>use fnv::FnvHashSet;
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
use std::io::Error;
pub fn load(path: &str) -> Result<FnvHashSet<String>, Error> {
let mut stopwords = FnvHashSet::default();
let f = File::open(path)?;
let file = BufReader::new(&f);
for line in file.lines() {
let sw = line.unwrap();
stopwords.insert(sw);
}<|fim▁hole|> panic!("Stopwords are empty!");
}
Ok(stopwords)
}<|fim▁end|>
|
if stopwords.len() == 0 {
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext
import os.path
import subprocess
import sys
libpg_query = os.path.join('.', 'libpg_query')
class PSqlParseBuildExt(build_ext):
def run(self):
return_code = subprocess.call(['make', '-C', libpg_query, 'build'])
if return_code:
sys.stderr.write('''
An error occurred during extension building.
Make sure you have bison and flex installed on your system.
''')
sys.exit(return_code)
build_ext.run(self)
<|fim▁hole|>
libraries = ['pg_query']
extensions = [
Extension('psqlparse.parser',
['psqlparse/parser' + ext],
libraries=libraries,
include_dirs=[libpg_query],
library_dirs=[libpg_query])
]
if USE_CYTHON:
from Cython.Build import cythonize
extensions = cythonize(extensions)
setup(name='psqlparse',
version='1.0-rc7',
url='https://github.com/alculquicondor/psqlparse',
author='Aldo Culquicondor',
author_email='[email protected]',
description='Parse SQL queries using the PostgreSQL query parser',
install_requires=['six'],
license='BSD',
cmdclass={'build_ext': PSqlParseBuildExt},
packages=['psqlparse', 'psqlparse.nodes'],
ext_modules=extensions)<|fim▁end|>
|
USE_CYTHON = bool(os.environ.get('USE_CYTHON'))
ext = '.pyx' if USE_CYTHON else '.c'
|
<|file_name|>config.js<|end_file_name|><|fim▁begin|>exports.dbname = "lrdata";
exports.dbuser = "lrdata";<|fim▁hole|>
exports.lfmApiKey = 'c0db7c8bfb98655ab25aa2e959fdcc68';
exports.lfmApiSecret = 'aff4890d7cb9492bc72250abbeffc3e1';
exports.tagAgeBeforeRefresh = 14; // In days
exports.tagFetchFrequency = 1000; // In milliseconds<|fim▁end|>
|
exports.dbpassword = "test";
|
<|file_name|>registry.js<|end_file_name|><|fim▁begin|>'use strict';
describe('Registry', function() {
describe('create()', function() {
it('name', function() {
let blot = Registry.create('bold');
expect(blot instanceof BoldBlot).toBe(true);
expect(blot.statics.blotName).toBe('bold');
});
it('node', function() {
let node = document.createElement('strong');
let blot = Registry.create(node);
expect(blot instanceof BoldBlot).toBe(true);
expect(blot.statics.blotName).toBe('bold');
});
it('block', function() {
let blot = Registry.create(Registry.Scope.BLOCK_BLOT);
expect(blot instanceof BlockBlot).toBe(true);
expect(blot.statics.blotName).toBe('block');
});
it('inline', function() {
let blot = Registry.create(Registry.Scope.INLINE_BLOT);
expect(blot instanceof InlineBlot).toBe(true);
expect(blot.statics.blotName).toBe('inline');
});
it('string index', function() {
let blot = Registry.create('header', '2');
expect(blot instanceof HeaderBlot).toBe(true);
expect(blot.formats()).toEqual({ header: 'h2' });
});
it('invalid', function() {
expect(function() {
Registry.create(BoldBlot);
}).toThrowError(/\[Parchment\]/);
});
});
describe('register()', function() {
it('invalid', function() {
expect(function() {
Registry.register({});
}).toThrowError(/\[Parchment\]/);
});
it('abstract', function() {
expect(function() {
Registry.register(ShadowBlot);
}).toThrowError(/\[Parchment\]/);
});
});
describe('find()', function() {
it('exact', function() {
let blockNode = document.createElement('p');
blockNode.innerHTML = '<span>01</span><em>23<strong>45</strong></em>';
let blockBlot = Registry.create(blockNode);
expect(Registry.find(document.body)).toBeFalsy();
expect(Registry.find(blockNode)).toBe(blockBlot);
expect(Registry.find(blockNode.querySelector('span'))).toBe(blockBlot.children.head);
expect(Registry.find(blockNode.querySelector('em'))).toBe(blockBlot.children.tail);
expect(Registry.find(blockNode.querySelector('strong'))).toBe(
blockBlot.children.tail.children.tail,
);
let text01 = blockBlot.children.head.children.head;
let text23 = blockBlot.children.tail.children.head;
let text45 = blockBlot.children.tail.children.tail.children.head;
expect(Registry.find(text01.domNode)).toBe(text01);
expect(Registry.find(text23.domNode)).toBe(text23);
expect(Registry.find(text45.domNode)).toBe(text45);
});
it('bubble', function() {
let blockBlot = Registry.create('block');
let textNode = document.createTextNode('Test');
blockBlot.domNode.appendChild(textNode);
expect(Registry.find(textNode)).toBeFalsy();
expect(Registry.find(textNode, true)).toEqual(blockBlot);
});
it('detached parent', function() {<|fim▁hole|> let blockNode = document.createElement('p');
blockNode.appendChild(document.createTextNode('Test'));
expect(Registry.find(blockNode.firstChild)).toBeFalsy();
expect(Registry.find(blockNode.firstChild, true)).toBeFalsy();
});
});
describe('query()', function() {
it('class', function() {
let node = document.createElement('em');
node.setAttribute('class', 'author-blot');
expect(Registry.query(node)).toBe(AuthorBlot);
});
it('type mismatch', function() {
let match = Registry.query('italic', Registry.Scope.ATTRIBUTE);
expect(match).toBeFalsy();
});
it('level mismatch for blot', function() {
let match = Registry.query('italic', Registry.Scope.BLOCK);
expect(match).toBeFalsy();
});
it('level mismatch for attribute', function() {
let match = Registry.query('color', Registry.Scope.BLOCK);
expect(match).toBeFalsy();
});
it('either level', function() {
let match = Registry.query('italic', Registry.Scope.BLOCK | Registry.Scope.INLINE);
expect(match).toBe(ItalicBlot);
});
it('level and type match', function() {
let match = Registry.query('italic', Registry.Scope.INLINE & Registry.Scope.BLOT);
expect(match).toBe(ItalicBlot);
});
it('level match and type mismatch', function() {
let match = Registry.query('italic', Registry.Scope.INLINE & Registry.Scope.ATTRIBUTE);
expect(match).toBeFalsy();
});
it('type match and level mismatch', function() {
let match = Registry.query('italic', Registry.Scope.BLOCK & Registry.Scope.BLOT);
expect(match).toBeFalsy();
});
});
});<|fim▁end|>
| |
<|file_name|>milestones.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env vpython3
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script for updating the active milestones for the chromium project.
To activate a new chromium branch, run the following from the root of
the repo (where MM is the milestone number and BBBB is the branch
number):
```
scripts/chromium/milestones.py activate --milestone MM --branch BBBB
./main.star
```
To deactivate a chromium branch, run the following from the root of the
repo (where MM is the milestone number):
```
scripts/chromium/milestones.py deactivate --milestone MM
./main.star
```
Usage:
milestones.py activate --milestone XX --branch YYYY
milestones.py deactivate --milestone XX
"""
import argparse
import itertools
import json
import os
import re
import sys
INFRA_CONFIG_DIR = os.path.abspath(os.path.join(__file__, '..', '..'))
def parse_args(args=None, *, parser_type=None):
parser_type = parser_type or argparse.ArgumentParser
parser = parser_type(
description='Update the active milestones for the chromium project')
parser.set_defaults(func=None)
parser.add_argument('--milestones-json',
help='Path to the milestones.json file',
default=os.path.join(INFRA_CONFIG_DIR, 'milestones.json'))
subparsers = parser.add_subparsers()
activate_parser = subparsers.add_parser(
'activate', help='Add an additional active milestone')
activate_parser.set_defaults(func=activate_cmd)
activate_parser.add_argument(
'--milestone',
required=True,
help=('The milestone identifier '
'(e.g. the milestone number for standard release channel)'))
activate_parser.add_argument(
'--branch',
required=True,
help='The branch name, must correspond to a ref in refs/branch-heads')
deactivate_parser = subparsers.add_parser(
'deactivate', help='Remove an active milestone')
deactivate_parser.set_defaults(func=deactivate_cmd)
deactivate_parser.add_argument(
'--milestone',
required=True,
help=('The milestone identifier '
'(e.g. the milestone number for standard release channel)'))
args = parser.parse_args(args)
if args.func is None:
parser.error('no sub-command specified')
return args<|fim▁hole|>class MilestonesException(Exception):
pass
_NUMBER_RE = re.compile('([0-9]+)')
def numeric_sort_key(s):
# The capture group in the regex means that the numeric portions are returned,
# odd indices will be the numeric portions of the string (the 0th or last
# element will be empty if the string starts or ends with a number,
# respectively)
pieces = _NUMBER_RE.split(s)
return [
(int(x), x) if is_numeric else x
for x, is_numeric
in zip(pieces, itertools.cycle([False, True]))
]
def add_milestone(milestones, milestone, branch):
if milestone in milestones:
raise MilestonesException(
f'there is already an active milestone with id {milestone!r}: '
f'{milestones[milestone]}')
milestones[milestone] = {
'name': f'm{milestone}',
'project': f'chromium-m{milestone}',
'ref': f'refs/branch-heads/{branch}',
}
milestones = {
k: milestones[k] for k in sorted(milestones, key=numeric_sort_key)
}
return json.dumps(milestones, indent=4) + '\n'
def activate_cmd(args):
with open(args.milestones_json) as f:
milestones = json.load(f)
milestones = add_milestone(milestones, args.milestone, args.branch)
with open(args.milestones_json, 'w') as f:
f.write(milestones)
def remove_milestone(milestones, milestone):
if milestone not in milestones:
raise MilestonesException(
f'{milestone!r} does not refer to an active milestone: '
f'{list(milestones.keys())}')
del milestones[milestone]
milestones = {
k: milestones[k] for k in sorted(milestones, key=numeric_sort_key)
}
return json.dumps(milestones, indent=4) + '\n'
def deactivate_cmd(args):
with open(args.milestones_json) as f:
milestones = json.load(f)
milestones = remove_milestone(milestones, args.milestone)
with open(args.milestones_json, 'w') as f:
f.write(milestones)
def main():
args = parse_args()
try:
args.func(args)
except MilestonesException as e:
print(str(e), file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>footer.js<|end_file_name|><|fim▁begin|>import React from 'react';
import styled from 'styled-components';
const FooterSection = styled.section`
padding: 0.4em 0em;
text-align: right;
`;
class Footer extends React.Component {
render() {
return <FooterSection>{ this.props.children }</FooterSection>;<|fim▁hole|>
}
export default Footer<|fim▁end|>
|
}
|
<|file_name|>test_file.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
#
# Copyright © 2013 IBM Corp
#
# Author: Tong Li <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging.handlers
import os
import tempfile
from ceilometer.dispatcher import file
from ceilometer.openstack.common.fixture import config
from ceilometer.openstack.common import test
from ceilometer.publisher import utils
<|fim▁hole|> def setUp(self):
super(TestDispatcherFile, self).setUp()
self.CONF = self.useFixture(config.Config()).conf
def test_file_dispatcher_with_all_config(self):
# Create a temporaryFile to get a file name
tf = tempfile.NamedTemporaryFile('r')
filename = tf.name
tf.close()
self.CONF.dispatcher_file.file_path = filename
self.CONF.dispatcher_file.max_bytes = 50
self.CONF.dispatcher_file.backup_count = 5
dispatcher = file.FileDispatcher(self.CONF)
# The number of the handlers should be 1
self.assertEqual(1, len(dispatcher.log.handlers))
# The handler should be RotatingFileHandler
handler = dispatcher.log.handlers[0]
self.assertIsInstance(handler,
logging.handlers.RotatingFileHandler)
msg = {'counter_name': 'test',
'resource_id': self.id(),
'counter_volume': 1,
}
msg['message_signature'] = utils.compute_signature(
msg,
self.CONF.publisher.metering_secret,
)
# The record_metering_data method should exist and not produce errors.
dispatcher.record_metering_data(msg)
# After the method call above, the file should have been created.
self.assertTrue(os.path.exists(handler.baseFilename))
def test_file_dispatcher_with_path_only(self):
# Create a temporaryFile to get a file name
tf = tempfile.NamedTemporaryFile('r')
filename = tf.name
tf.close()
self.CONF.dispatcher_file.file_path = filename
self.CONF.dispatcher_file.max_bytes = None
self.CONF.dispatcher_file.backup_count = None
dispatcher = file.FileDispatcher(self.CONF)
# The number of the handlers should be 1
self.assertEqual(1, len(dispatcher.log.handlers))
# The handler should be RotatingFileHandler
handler = dispatcher.log.handlers[0]
self.assertIsInstance(handler,
logging.FileHandler)
msg = {'counter_name': 'test',
'resource_id': self.id(),
'counter_volume': 1,
}
msg['message_signature'] = utils.compute_signature(
msg,
self.CONF.publisher.metering_secret,
)
# The record_metering_data method should exist and not produce errors.
dispatcher.record_metering_data(msg)
# After the method call above, the file should have been created.
self.assertTrue(os.path.exists(handler.baseFilename))
def test_file_dispatcher_with_no_path(self):
self.CONF.dispatcher_file.file_path = None
dispatcher = file.FileDispatcher(self.CONF)
# The log should be None
self.assertIsNone(dispatcher.log)<|fim▁end|>
|
class TestDispatcherFile(test.BaseTestCase):
|
<|file_name|>chart_of_accounts.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import frappe, os, json
from frappe.utils import cstr
from unidecode import unidecode
from six import iteritems
def create_charts(company, chart_template=None, existing_company=None):
chart = get_chart(chart_template, existing_company)
if chart:
accounts = []
def _import_accounts(children, parent, root_type, root_account=False):
for account_name, child in iteritems(children):
if root_account:
root_type = child.get("root_type")
if account_name not in ["account_number", "account_type",
"root_type", "is_group", "tax_rate"]:
account_number = cstr(child.get("account_number")).strip()
account_name, account_name_in_db = add_suffix_if_duplicate(account_name,
account_number, accounts)
is_group = identify_is_group(child)
report_type = "Balance Sheet" if root_type in ["Asset", "Liability", "Equity"] \
else "Profit and Loss"
account = frappe.get_doc({
"doctype": "Account",
"account_name": account_name,
"company": company,
"parent_account": parent,
"is_group": is_group,
"root_type": root_type,
"report_type": report_type,
"account_number": account_number,
"account_type": child.get("account_type"),
"account_currency": frappe.db.get_value("Company", company, "default_currency"),
"tax_rate": child.get("tax_rate")
})
if root_account or frappe.local.flags.allow_unverified_charts:
account.flags.ignore_mandatory = True
account.flags.ignore_permissions = True
account.insert()
accounts.append(account_name_in_db)
_import_accounts(child, account.name, root_type)
_import_accounts(chart, None, None, root_account=True)
def add_suffix_if_duplicate(account_name, account_number, accounts):
if account_number:
account_name_in_db = unidecode(" - ".join([account_number,
account_name.strip().lower()]))
else:
account_name_in_db = unidecode(account_name.strip().lower())
if account_name_in_db in accounts:
count = accounts.count(account_name_in_db)
account_name = account_name + " " + cstr(count)
return account_name, account_name_in_db
def identify_is_group(child):
if child.get("is_group"):
is_group = child.get("is_group")
elif len(set(child.keys()) - set(["account_type", "root_type", "is_group", "tax_rate", "account_number"])):
is_group = 1
else:
is_group = 0
return is_group
def get_chart(chart_template, existing_company=None):
chart = {}
if existing_company:
return get_account_tree_from_existing_company(existing_company)
elif chart_template == "Standard":
from erpnext.accounts.doctype.account.chart_of_accounts.verified import standard_chart_of_accounts
return standard_chart_of_accounts.get()
elif chart_template == "Standard with Numbers":
from erpnext.accounts.doctype.account.chart_of_accounts.verified \
import standard_chart_of_accounts_with_account_number
return standard_chart_of_accounts_with_account_number.get()
else:
folders = ("verified",)
if frappe.local.flags.allow_unverified_charts:
folders = ("verified", "unverified")
for folder in folders:
path = os.path.join(os.path.dirname(__file__), folder)
for fname in os.listdir(path):
fname = frappe.as_unicode(fname)
if fname.endswith(".json"):
with open(os.path.join(path, fname), "r") as f:
chart = f.read()
if chart and json.loads(chart).get("name") == chart_template:
return json.loads(chart).get("tree")
@frappe.whitelist()
def get_charts_for_country(country, with_standard=False):
charts = []
def _get_chart_name(content):
if content:
content = json.loads(content)
if (content and content.get("disabled", "No") == "No") \
or frappe.local.flags.allow_unverified_charts:
charts.append(content["name"])
country_code = frappe.db.get_value("Country", country, "code")
if country_code:
folders = ("verified",)
if frappe.local.flags.allow_unverified_charts:
folders = ("verified", "unverified")
for folder in folders:
path = os.path.join(os.path.dirname(__file__), folder)
if not os.path.exists(path):
continue
for fname in os.listdir(path):
fname = frappe.as_unicode(fname)
if (fname.startswith(country_code) or fname.startswith(country)) and fname.endswith(".json"):
with open(os.path.join(path, fname), "r") as f:
_get_chart_name(f.read())
# if more than one charts, returned then add the standard
if len(charts) != 1 or with_standard:
charts += ["Standard", "Standard with Numbers"]
return charts
def get_account_tree_from_existing_company(existing_company):
all_accounts = frappe.get_all('Account',
filters={'company': existing_company},
fields = ["name", "account_name", "parent_account", "account_type",
"is_group", "root_type", "tax_rate", "account_number"],
order_by="lft, rgt")
account_tree = {}
# fill in tree starting with root accounts (those with no parent)
if all_accounts:
build_account_tree(account_tree, None, all_accounts)
return account_tree
def build_account_tree(tree, parent, all_accounts):
# find children
parent_account = parent.name if parent else ""
children = [acc for acc in all_accounts if cstr(acc.parent_account) == parent_account]
# if no children, but a group account
if not children and parent.is_group:
tree["is_group"] = 1
tree["account_number"] = parent.account_number
# build a subtree for each child
for child in children:
# start new subtree
tree[child.account_name] = {}
# assign account_type and root_type
if child.account_number:
tree[child.account_name]["account_number"] = child.account_number
if child.account_type:
tree[child.account_name]["account_type"] = child.account_type
if child.tax_rate:
tree[child.account_name]["tax_rate"] = child.tax_rate
if not parent:
tree[child.account_name]["root_type"] = child.root_type
# call recursively to build a subtree for current account
build_account_tree(tree[child.account_name], child, all_accounts)
@frappe.whitelist()
def validate_bank_account(coa, bank_account):
accounts = []
chart = get_chart(coa)
if chart:
def _get_account_names(account_master):
for account_name, child in iteritems(account_master):
if account_name not in ["account_number", "account_type",
"root_type", "is_group", "tax_rate"]:
accounts.append(account_name)
_get_account_names(child)<|fim▁hole|>
_get_account_names(chart)
return (bank_account in accounts)<|fim▁end|>
| |
<|file_name|>Menubar.Add.js<|end_file_name|><|fim▁begin|>Menubar.Add = function ( editor ) {
var meshCount = 0;
var lightCount = 0;
// event handlers
function onObject3DOptionClick () {
var mesh = new THREE.Object3D();
mesh.name = 'Object3D ' + ( ++ meshCount );
editor.addObject( mesh );
editor.select( mesh );
}
//Plane
function onPlaneOptionClick () {
var width = 200;
var height = 200;
var widthSegments = 1;
var heightSegments = 1;
var geometry = new THREE.PlaneGeometry( width, height, widthSegments, heightSegments );
var material = new THREE.MeshPhongMaterial();
var mesh = new THREE.Mesh( geometry, material );
mesh.name = 'Plane ' + ( ++ meshCount );
editor.addObject( mesh );
editor.select( mesh );
};
//Triangle
function onTriangleOptionClick (){
var geometry = new THREE.Geometry();
var v1 = new THREE.Vector3(-100,0,0);
var v2 = new THREE.Vector3(100,0,0);
var v3 = new THREE.Vector3(0,100,0);
geometry.vertices.push(v1);
geometry.vertices.push(v2);
geometry.vertices.push(v3);
geometry.faces.push(new THREE.Face3(0,2,1));
var material = new THREE.MeshBasicMaterial({color:0xff0000});
var mesh = new THREE.Mesh(geometry, material);
mesh.name = 'Triangle ' + (++ meshCount);
editor.addObject( mesh );
editor.select(mesh);
};
//Box
function onBoxOptionClick () {
var width = 100;
var height = 100;
var depth = 100;
var widthSegments = 1;
var heightSegments = 1;
var depthSegments = 1;
var geometry = new THREE.BoxGeometry( width, height, depth, widthSegments, heightSegments, depthSegments );
var mesh = new THREE.Mesh( geometry, new THREE.MeshPhongMaterial() );
mesh.name = 'Box ' + ( ++ meshCount );
editor.addObject( mesh );
editor.select( mesh );
}
//Circle
function onCircleOptionClick () {
var radius = 20;
var segments = 32;
var geometry = new THREE.CircleGeometry( radius, segments );
var mesh = new THREE.Mesh( geometry, new THREE.MeshPhongMaterial() );
mesh.name = 'Circle ' + ( ++ meshCount );
editor.addObject( mesh );
editor.select( mesh );
}
//Cylinder
function onCylinderOptionClick () {
var radiusTop = 20;
var radiusBottom = 20;
var height = 100;
var radiusSegments = 32;
var heightSegments = 1;
var openEnded = false;
<|fim▁hole|> editor.addObject( mesh );
editor.select( mesh );
}
//Sphere
function onSphereOptionClick () {
var radius = 75;
var widthSegments = 32;
var heightSegments = 16;
var geometry = new THREE.SphereGeometry( radius, widthSegments, heightSegments );
var mesh = new THREE.Mesh( geometry, new THREE.MeshPhongMaterial() );
mesh.name = 'Sphere ' + ( ++ meshCount );
editor.addObject( mesh );
editor.select( mesh );
}
//Icosahedron
function onIcosahedronOptionClick () {
var radius = 75;
var detail = 2;
var geometry = new THREE.IcosahedronGeometry ( radius, detail );
var mesh = new THREE.Mesh( geometry, new THREE.MeshPhongMaterial() );
mesh.name = 'Icosahedron ' + ( ++ meshCount );
editor.addObject( mesh );
editor.select( mesh );
}
//Torus
function onTorusOptionClick () {
var radius = 100;
var tube = 40;
var radialSegments = 8;
var tubularSegments = 6;
var arc = Math.PI * 2;
var geometry = new THREE.TorusGeometry( radius, tube, radialSegments, tubularSegments, arc );
var mesh = new THREE.Mesh( geometry, new THREE.MeshPhongMaterial() );
mesh.name = 'Torus ' + ( ++ meshCount );
editor.addObject( mesh );
editor.select( mesh );
}
//Torus Knot
function onTorusKnotOptionClick () {
var radius = 100;
var tube = 40;
var radialSegments = 64;
var tubularSegments = 8;
var p = 2;
var q = 3;
var heightScale = 1;
var geometry = new THREE.TorusKnotGeometry( radius, tube, radialSegments, tubularSegments, p, q, heightScale );
var mesh = new THREE.Mesh( geometry, new THREE.MeshPhongMaterial() );
mesh.name = 'TorusKnot ' + ( ++ meshCount );
editor.addObject( mesh );
editor.select( mesh );
}
//Sprite
function onSpriteOptionClick () {
var sprite = new THREE.Sprite( new THREE.SpriteMaterial() );
sprite.name = 'Sprite ' + ( ++ meshCount );
editor.addObject( sprite );
editor.select( sprite );
}
function onPointLightOptionClick () {
var color = 0xffffff;
var intensity = 1;
var distance = 0;
var light = new THREE.PointLight( color, intensity, distance );
light.name = 'PointLight ' + ( ++ lightCount );
editor.addObject( light );
editor.select( light );
}
function onSpotLightOptionClick () {
var color = 0xffffff;
var intensity = 1;
var distance = 0;
var angle = Math.PI * 0.1;
var exponent = 10;
var light = new THREE.SpotLight( color, intensity, distance, angle, exponent );
//.distance
// light will attenuate linearly from maximum intensity at light position down to zero at distance.
// .angle
// Maximum extent of the spotlight, in radians, from its direction. Should be no more than Math.PI/2.
// Default — Math.PI/3.
// .exponent
// Rapidity of the falloff of light from its target direction.
// Default — 10.0.
light.name = 'SpotLight ' + ( ++ lightCount );
light.target.name = 'SpotLight ' + ( lightCount ) + ' Target';
light.position.set( 0, 1, 0 ).multiplyScalar( 200 );
editor.addObject( light );
editor.select( light );
}
function onDirectionalLightOptionClick () {
var color = 0xffffff;
var intensity = 1;
var light = new THREE.DirectionalLight( color, intensity );
light.name = 'DirectionalLight ' + ( ++ lightCount );
light.target.name = 'DirectionalLight ' + ( lightCount ) + ' Target';
light.position.set( 1, 1, 1 ).multiplyScalar( 200 );
editor.addObject( light );
editor.select( light );
}
function onHemisphereLightOptionClick () {
var skyColor = 0x00aaff;
var groundColor = 0xffaa00;
var intensity = 1;
var light = new THREE.HemisphereLight( skyColor, groundColor, intensity );
light.name = 'HemisphereLight ' + ( ++ lightCount );
light.position.set( 1, 1, 1 ).multiplyScalar( 200 );
editor.addObject( light );
editor.select( light );
}
function onAmbientLightOptionClick() {
var color = 0x222222;
var light = new THREE.AmbientLight( color );
light.name = 'AmbientLight ' + ( ++ lightCount );
editor.addObject( light );
editor.select( light );
}
// configure menu contents
var createOption = UI.MenubarHelper.createOption;
var createDivider = UI.MenubarHelper.createDivider;
var menuConfig = [
//createOption( 'Object3D', onObject3DOptionClick ),
//createDivider(),
createOption( 'Plane', onPlaneOptionClick ),
createOption('Triangle',onTriangleOptionClick),
createOption( 'Box', onBoxOptionClick ),
createOption( 'Circle', onCircleOptionClick ),
createOption( 'Cylinder', onCylinderOptionClick ),
createOption( 'Sphere', onSphereOptionClick ),
//createOption( 'Icosahedron', onIcosahedronOptionClick ),
//createOption( 'Torus', onTorusOptionClick ),
//createOption( 'Torus Knot', onTorusKnotOptionClick ),
createDivider(),
//createOption( 'Sprite', onSpriteOptionClick ),
createDivider(),
createOption( 'Point light', onPointLightOptionClick ),
createOption( 'Spot light', onSpotLightOptionClick ),
createOption( 'Directional light', onDirectionalLightOptionClick ),
createOption( 'Hemisphere light', onHemisphereLightOptionClick ),
createOption( 'Ambient light', onAmbientLightOptionClick )
];
var optionsPanel = UI.MenubarHelper.createOptionsPanel( menuConfig );
return UI.MenubarHelper.createMenuContainer( 'Add', optionsPanel );
}<|fim▁end|>
|
var geometry = new THREE.CylinderGeometry( radiusTop, radiusBottom, height, radiusSegments, heightSegments, openEnded );
var mesh = new THREE.Mesh( geometry, new THREE.MeshPhongMaterial() );
mesh.name = 'Cylinder ' + ( ++ meshCount );
|
<|file_name|>detect_faces_tinyface.py<|end_file_name|><|fim▁begin|>import matplotlib.pyplot as plt
from bob.io.base import load
from bob.io.base.test_utils import datafile
from bob.io.image import imshow
from bob.ip.facedetect.tinyface import TinyFacesDetector
from matplotlib.patches import Rectangle
# load colored test image
color_image = load(datafile("test_image_multi_face.png", "bob.ip.facedetect"))
is_mxnet_available = True
try:
import mxnet
except Exception:
is_mxnet_available = False
if not is_mxnet_available:
imshow(color_image)
else:
# detect all faces
detector = TinyFacesDetector()
detections = detector.detect(color_image)
imshow(color_image)
plt.axis("off")
for annotations in detections:
topleft = annotations["topleft"]
bottomright = annotations["bottomright"]
size = bottomright[0] - topleft[0], bottomright[1] - topleft[1]
# draw bounding boxes
plt.gca().add_patch(
Rectangle(
topleft[::-1],
size[1],
size[0],
edgecolor="b",
facecolor="none",
linewidth=2,
)<|fim▁hole|><|fim▁end|>
|
)
|
<|file_name|>ControlPanelButton.java<|end_file_name|><|fim▁begin|>package com.algebraweb.editor.client;
import com.google.gwt.user.client.ui.Button;
/**
* A button for the control panel. Will be styled accordingly.
*
* @author Patrick Brosi
*
*/
public class ControlPanelButton extends Button {
public ControlPanelButton(String desc) {
super();
this.addStyleName("controllpanel-button");
super.getElement().setAttribute("title", desc);
this.setWidth("39px");
this.setHeight("39px");
}
public ControlPanelButton(String desc, String styleClass) {
this(desc);<|fim▁hole|> this.addStyleName("controllbutton-" + styleClass);
}
}<|fim▁end|>
| |
<|file_name|>variables_0.js<|end_file_name|><|fim▁begin|>var searchData=
[<|fim▁hole|>];<|fim▁end|>
|
['node',['node',['../structconnectivity.html#af0fc7c1443c916dce333bee34787cd20',1,'connectivity']]]
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from .base import *
from .controller import *<|fim▁end|>
| |
<|file_name|>Types.ts<|end_file_name|><|fim▁begin|>import Artists from "./Steps/Artists"
import { BudgetComponent as Budget } from "./Steps/Budget"
import { CollectorIntentComponent as CollectorIntent } from "./Steps/CollectorIntent"
import Genes from "./Steps/Genes"
/**
* The props interface that the step needs to implement for the wizard.
*/
export interface StepProps {
onNextButtonPressed: (increaseBy?) => void
}
export interface StepComponent extends React.ComponentClass<StepProps> {
slug?: string
}
export interface FollowProps {<|fim▁hole|> | typeof Budget.slug
| typeof Artists.slug
| typeof Genes.slug
| typeof CollectorIntent.slug
| null<|fim▁end|>
|
updateFollowCount: (count: number) => void
}
export type StepSlugs =
|
<|file_name|>serialize.js<|end_file_name|><|fim▁begin|>/**
* A decorator for making sure specific function being invoked serializely.
*
* Usage:
* class A {
* @serialize
* async foo() {}
* }
*
*/
export default function serialize(target, key, descriptor) {
let prev = null;
function serializeFunc(...args) {
const next = () =>
Promise.resolve(descriptor.value.apply(this, args)).then(() => {
prev = null;
});
prev = prev ? prev.then(next) : next();
return prev;
}
return {
...descriptor,<|fim▁hole|><|fim▁end|>
|
value: serializeFunc,
};
}
|
<|file_name|>caddi2018_e.py<|end_file_name|><|fim▁begin|>def main() -> None:
N = int(input())
A = [int(x) for x in input().split()]
rev_A = A[:]
left = [-1] * N
left_cnt = [0] * N
A_left = [A[0]]
for i in range(1, N):
if rev_A[i-1] < rev_A[i]:
cnt = 0
while rev_A[i-1]
pass
elif rev_A[i-1] < rev_A[i] * 4:
now = i-1
while left[now] != -1:
now = left[now]
left[i] = now
A_left.append(A[i])
left[i] = i-1
else:
pass
ans = 10 ** 9
for i in range(N + 1):
A = AA[:]
cnt = 0
if i > 0:
A[i-1] *= -2
cnt += 1
for j in reversed(range(i-1)):
A[j] *= -2
cnt += 1
while A[j] > A[j+1]:<|fim▁hole|> cnt += 2
for j in range(i+1, N):
while A[j-1] > A[j]:
A[j] *= 4
cnt += 2
print(i, cnt, A)
ans = min(ans, cnt)
print(ans)
if __name__ == '__main__':
main()<|fim▁end|>
|
A[j] *= 4
|
<|file_name|>umbicon.directive.js<|end_file_name|><|fim▁begin|>/**
@ngdoc directive
@name umbraco.directives.directive:umbIcon
@restrict E
@scope
@description
Use this directive to show an render an umbraco backoffice svg icon. All svg icons used by this directive should use the following naming convention to keep things consistent: icon-[name of icon]. For example <pre>icon-alert.svg</pre>
<h3>Markup example</h3>
Simple icon
<pre>
<umb-icon icon="icon-alert"></umb-icon>
</pre>
Icon with additional attribute. It can be treated like any other dom element
<pre>
<umb-icon icon="icon-alert" class="another-class"></umb-icon>
</pre>
@example
**/
(function () {
"use strict";
function UmbIconDirective(iconHelper) {
var directive = {
replace: true,
transclude: true,
templateUrl: "views/components/umb-icon.html",
scope: {
icon: "@",
svgString: "=?"
},
link: function (scope, element) {
if (scope.svgString === undefined && scope.svgString !== null && scope.icon !== undefined && scope.icon !== null) {
const observer = new IntersectionObserver(_lazyRequestIcon, {rootMargin: "100px"});
const iconEl = element[0];
observer.observe(iconEl);
// make sure to disconnect the observer when the scope is destroyed
scope.$on('$destroy', function () {
observer.disconnect();
});
}
scope.$watch("icon", function (newValue, oldValue) {
if (newValue && oldValue) {
var newicon = newValue.split(" ")[0];
var oldicon = oldValue.split(" ")[0];
if (newicon !== oldicon) {
_requestIcon(newicon);
}
}<|fim▁hole|>
function _lazyRequestIcon(entries, observer) {
entries.forEach(entry => {
if (entry.isIntersecting === true) {
observer.disconnect();
var icon = scope.icon.split(" ")[0]; // Ensure that only the first part of the icon is used as sometimes the color is added too, e.g. see umbeditorheader.directive scope.openIconPicker
_requestIcon(icon);
}
});
}
function _requestIcon(icon) {
// Reset svg string before requesting new icon.
scope.svgString = null;
iconHelper.getIcon(icon)
.then(data => {
if (data && data.svgString) {
// Watch source SVG string
scope.svgString = data.svgString;
}
});
}
}
};
return directive;
}
angular.module("umbraco.directives").directive("umbIcon", UmbIconDirective);
})();<|fim▁end|>
|
});
|
<|file_name|>localization.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import webview<|fim▁hole|>
"""
This example demonstrates how to localize GUI strings used by pywebview.
"""
if __name__ == '__main__':
localization = {
'global.saveFile': u'Сохранить файл',
'cocoa.menu.about': u'О программе',
'cocoa.menu.services': u'Cлужбы',
'cocoa.menu.view': u'Вид',
'cocoa.menu.hide': u'Скрыть',
'cocoa.menu.hideOthers': u'Скрыть остальные',
'cocoa.menu.showAll': u'Показать все',
'cocoa.menu.quit': u'Завершить',
'cocoa.menu.fullscreen': u'Перейти ',
'windows.fileFilter.allFiles': u'Все файлы',
'windows.fileFilter.otherFiles': u'Остальлные файльы',
'linux.openFile': u'Открыть файл',
'linux.openFiles': u'Открыть файлы',
'linux.openFolder': u'Открыть папку',
}
window_localization_override = {
'global.saveFile': u'Save file',
}
webview.create_window(
'Localization Example',
'https://pywebview.flowrl.com/hello',
localization=window_localization_override,
)
webview.start(localization=localization)<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Support to serve the Home Assistant API as WSGI application."""
from __future__ import annotations
from ipaddress import ip_network
import logging
import os
import ssl
from typing import Any, Final, Optional, TypedDict, cast
from aiohttp import web
from aiohttp.typedefs import StrOrURL
from aiohttp.web_exceptions import HTTPMovedPermanently, HTTPRedirection
import voluptuous as vol
from homeassistant.components.network import async_get_source_ip
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, SERVER_PORT
from homeassistant.core import Event, HomeAssistant
from homeassistant.helpers import storage
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
from homeassistant.setup import async_start_setup, async_when_setup_or_start
from homeassistant.util import ssl as ssl_util
from .auth import setup_auth
from .ban import setup_bans
from .const import KEY_AUTHENTICATED, KEY_HASS, KEY_HASS_USER # noqa: F401
from .cors import setup_cors
from .forwarded import async_setup_forwarded
from .request_context import current_request, setup_request_context
from .security_filter import setup_security_filter
from .static import CACHE_HEADERS, CachingStaticResource
from .view import HomeAssistantView
from .web_runner import HomeAssistantTCPSite
DOMAIN: Final = "http"
CONF_SERVER_HOST: Final = "server_host"
CONF_SERVER_PORT: Final = "server_port"
CONF_BASE_URL: Final = "base_url"
CONF_SSL_CERTIFICATE: Final = "ssl_certificate"
CONF_SSL_PEER_CERTIFICATE: Final = "ssl_peer_certificate"
CONF_SSL_KEY: Final = "ssl_key"
CONF_CORS_ORIGINS: Final = "cors_allowed_origins"
CONF_USE_X_FORWARDED_FOR: Final = "use_x_forwarded_for"
CONF_TRUSTED_PROXIES: Final = "trusted_proxies"
CONF_LOGIN_ATTEMPTS_THRESHOLD: Final = "login_attempts_threshold"
CONF_IP_BAN_ENABLED: Final = "ip_ban_enabled"
CONF_SSL_PROFILE: Final = "ssl_profile"
SSL_MODERN: Final = "modern"
SSL_INTERMEDIATE: Final = "intermediate"
_LOGGER: Final = logging.getLogger(__name__)
DEFAULT_DEVELOPMENT: Final = "0"
# Cast to be able to load custom cards.
# My to be able to check url and version info.
DEFAULT_CORS: Final[list[str]] = ["https://cast.home-assistant.io"]
NO_LOGIN_ATTEMPT_THRESHOLD: Final = -1
MAX_CLIENT_SIZE: Final = 1024 ** 2 * 16
STORAGE_KEY: Final = DOMAIN
STORAGE_VERSION: Final = 1
SAVE_DELAY: Final = 180
HTTP_SCHEMA: Final = vol.All(
cv.deprecated(CONF_BASE_URL),
vol.Schema(
{
vol.Optional(CONF_SERVER_HOST): vol.All(
cv.ensure_list, vol.Length(min=1), [cv.string]
),
vol.Optional(CONF_SERVER_PORT, default=SERVER_PORT): cv.port,
vol.Optional(CONF_BASE_URL): cv.string,
vol.Optional(CONF_SSL_CERTIFICATE): cv.isfile,
vol.Optional(CONF_SSL_PEER_CERTIFICATE): cv.isfile,
vol.Optional(CONF_SSL_KEY): cv.isfile,
vol.Optional(CONF_CORS_ORIGINS, default=DEFAULT_CORS): vol.All(
cv.ensure_list, [cv.string]
),
vol.Inclusive(CONF_USE_X_FORWARDED_FOR, "proxy"): cv.boolean,
vol.Inclusive(CONF_TRUSTED_PROXIES, "proxy"): vol.All(
cv.ensure_list, [ip_network]
),
vol.Optional(
CONF_LOGIN_ATTEMPTS_THRESHOLD, default=NO_LOGIN_ATTEMPT_THRESHOLD
): vol.Any(cv.positive_int, NO_LOGIN_ATTEMPT_THRESHOLD),
vol.Optional(CONF_IP_BAN_ENABLED, default=True): cv.boolean,
vol.Optional(CONF_SSL_PROFILE, default=SSL_MODERN): vol.In(
[SSL_INTERMEDIATE, SSL_MODERN]
),
}
),
)
CONFIG_SCHEMA: Final = vol.Schema({DOMAIN: HTTP_SCHEMA}, extra=vol.ALLOW_EXTRA)
class ConfData(TypedDict, total=False):
"""Typed dict for config data."""
server_host: list[str]
server_port: int
base_url: str
ssl_certificate: str
ssl_peer_certificate: str
ssl_key: str
cors_allowed_origins: list[str]
use_x_forwarded_for: bool
trusted_proxies: list[str]
login_attempts_threshold: int
ip_ban_enabled: bool
ssl_profile: str
@bind_hass
async def async_get_last_config(hass: HomeAssistant) -> dict | None:
"""Return the last known working config."""
store = storage.Store(hass, STORAGE_VERSION, STORAGE_KEY)
return cast(Optional[dict], await store.async_load())
class ApiConfig:
"""Configuration settings for API server."""
def __init__(
self,
local_ip: str,
host: str,
port: int,
use_ssl: bool,
) -> None:
"""Initialize a new API config object."""
self.local_ip = local_ip
self.host = host
self.port = port
self.use_ssl = use_ssl
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the HTTP API and debug interface."""
conf: ConfData | None = config.get(DOMAIN)
if conf is None:
conf = cast(ConfData, HTTP_SCHEMA({}))
server_host = conf.get(CONF_SERVER_HOST)
server_port = conf[CONF_SERVER_PORT]
ssl_certificate = conf.get(CONF_SSL_CERTIFICATE)
ssl_peer_certificate = conf.get(CONF_SSL_PEER_CERTIFICATE)
ssl_key = conf.get(CONF_SSL_KEY)
cors_origins = conf[CONF_CORS_ORIGINS]
use_x_forwarded_for = conf.get(CONF_USE_X_FORWARDED_FOR, False)
trusted_proxies = conf.get(CONF_TRUSTED_PROXIES) or []
is_ban_enabled = conf[CONF_IP_BAN_ENABLED]
login_threshold = conf[CONF_LOGIN_ATTEMPTS_THRESHOLD]<|fim▁hole|> ssl_profile = conf[CONF_SSL_PROFILE]
server = HomeAssistantHTTP(
hass,
server_host=server_host,
server_port=server_port,
ssl_certificate=ssl_certificate,
ssl_peer_certificate=ssl_peer_certificate,
ssl_key=ssl_key,
cors_origins=cors_origins,
use_x_forwarded_for=use_x_forwarded_for,
trusted_proxies=trusted_proxies,
login_threshold=login_threshold,
is_ban_enabled=is_ban_enabled,
ssl_profile=ssl_profile,
)
async def stop_server(event: Event) -> None:
"""Stop the server."""
await server.stop()
async def start_server(*_: Any) -> None:
"""Start the server."""
with async_start_setup(hass, ["http"]):
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_server)
# We already checked it's not None.
assert conf is not None
await start_http_server_and_save_config(hass, dict(conf), server)
async_when_setup_or_start(hass, "frontend", start_server)
hass.http = server
local_ip = await async_get_source_ip(hass)
host = local_ip
if server_host is not None:
# Assume the first server host name provided as API host
host = server_host[0]
hass.config.api = ApiConfig(
local_ip, host, server_port, ssl_certificate is not None
)
return True
class HomeAssistantHTTP:
"""HTTP server for Home Assistant."""
def __init__(
self,
hass: HomeAssistant,
ssl_certificate: str | None,
ssl_peer_certificate: str | None,
ssl_key: str | None,
server_host: list[str] | None,
server_port: int,
cors_origins: list[str],
use_x_forwarded_for: bool,
trusted_proxies: list[str],
login_threshold: int,
is_ban_enabled: bool,
ssl_profile: str,
) -> None:
"""Initialize the HTTP Home Assistant server."""
app = self.app = web.Application(
middlewares=[], client_max_size=MAX_CLIENT_SIZE
)
app[KEY_HASS] = hass
# Order matters, security filters middle ware needs to go first,
# forwarded middleware needs to go second.
setup_security_filter(app)
async_setup_forwarded(app, use_x_forwarded_for, trusted_proxies)
setup_request_context(app, current_request)
if is_ban_enabled:
setup_bans(hass, app, login_threshold)
setup_auth(hass, app)
setup_cors(app, cors_origins)
self.hass = hass
self.ssl_certificate = ssl_certificate
self.ssl_peer_certificate = ssl_peer_certificate
self.ssl_key = ssl_key
self.server_host = server_host
self.server_port = server_port
self.trusted_proxies = trusted_proxies
self.is_ban_enabled = is_ban_enabled
self.ssl_profile = ssl_profile
self._handler = None
self.runner: web.AppRunner | None = None
self.site: HomeAssistantTCPSite | None = None
def register_view(self, view: HomeAssistantView) -> None:
"""Register a view with the WSGI server.
The view argument must be a class that inherits from HomeAssistantView.
It is optional to instantiate it before registering; this method will
handle it either way.
"""
if isinstance(view, type):
# Instantiate the view, if needed
view = view()
if not hasattr(view, "url"):
class_name = view.__class__.__name__
raise AttributeError(f'{class_name} missing required attribute "url"')
if not hasattr(view, "name"):
class_name = view.__class__.__name__
raise AttributeError(f'{class_name} missing required attribute "name"')
view.register(self.app, self.app.router)
def register_redirect(
self,
url: str,
redirect_to: StrOrURL,
*,
redirect_exc: type[HTTPRedirection] = HTTPMovedPermanently,
) -> None:
"""Register a redirect with the server.
If given this must be either a string or callable. In case of a
callable it's called with the url adapter that triggered the match and
the values of the URL as keyword arguments and has to return the target
for the redirect, otherwise it has to be a string with placeholders in
rule syntax.
"""
async def redirect(request: web.Request) -> web.StreamResponse:
"""Redirect to location."""
# Should be instance of aiohttp.web_exceptions._HTTPMove.
raise redirect_exc(redirect_to) # type: ignore[arg-type,misc]
self.app["allow_configured_cors"](
self.app.router.add_route("GET", url, redirect)
)
def register_static_path(
self, url_path: str, path: str, cache_headers: bool = True
) -> None:
"""Register a folder or file to serve as a static path."""
if os.path.isdir(path):
if cache_headers:
resource: CachingStaticResource | web.StaticResource = (
CachingStaticResource(url_path, path)
)
else:
resource = web.StaticResource(url_path, path)
self.app.router.register_resource(resource)
self.app["allow_configured_cors"](resource)
return
async def serve_file(request: web.Request) -> web.FileResponse:
"""Serve file from disk."""
if cache_headers:
return web.FileResponse(path, headers=CACHE_HEADERS)
return web.FileResponse(path)
self.app["allow_configured_cors"](
self.app.router.add_route("GET", url_path, serve_file)
)
async def start(self) -> None:
"""Start the aiohttp server."""
context: ssl.SSLContext | None
if self.ssl_certificate:
try:
if self.ssl_profile == SSL_INTERMEDIATE:
context = ssl_util.server_context_intermediate()
else:
context = ssl_util.server_context_modern()
await self.hass.async_add_executor_job(
context.load_cert_chain, self.ssl_certificate, self.ssl_key
)
except OSError as error:
_LOGGER.error(
"Could not read SSL certificate from %s: %s",
self.ssl_certificate,
error,
)
return
if self.ssl_peer_certificate:
context.verify_mode = ssl.CERT_REQUIRED
await self.hass.async_add_executor_job(
context.load_verify_locations, self.ssl_peer_certificate
)
else:
context = None
# Aiohttp freezes apps after start so that no changes can be made.
# However in Home Assistant components can be discovered after boot.
# This will now raise a RunTimeError.
# To work around this we now prevent the router from getting frozen
# pylint: disable=protected-access
self.app._router.freeze = lambda: None # type: ignore[assignment]
self.runner = web.AppRunner(self.app)
await self.runner.setup()
self.site = HomeAssistantTCPSite(
self.runner, self.server_host, self.server_port, ssl_context=context
)
try:
await self.site.start()
except OSError as error:
_LOGGER.error(
"Failed to create HTTP server at port %d: %s", self.server_port, error
)
_LOGGER.info("Now listening on port %d", self.server_port)
async def stop(self) -> None:
"""Stop the aiohttp server."""
if self.site is not None:
await self.site.stop()
if self.runner is not None:
await self.runner.cleanup()
async def start_http_server_and_save_config(
hass: HomeAssistant, conf: dict, server: HomeAssistantHTTP
) -> None:
"""Startup the http server and save the config."""
await server.start()
# If we are set up successful, we store the HTTP settings for safe mode.
store = storage.Store(hass, STORAGE_VERSION, STORAGE_KEY)
if CONF_TRUSTED_PROXIES in conf:
conf[CONF_TRUSTED_PROXIES] = [
str(ip.network_address) for ip in conf[CONF_TRUSTED_PROXIES]
]
store.async_delay_save(lambda: conf, SAVE_DELAY)<|fim▁end|>
| |
<|file_name|>spot_launcher.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import boto.ec2
from boto.ec2.blockdevicemapping import BlockDeviceType
from boto.ec2.blockdevicemapping import BlockDeviceMapping
import time
import copy
import argparse
import sys
import pprint
import os
import yaml
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
CONFIG_PATH = os.path.join(BASE_PATH, '../configs')
def launch_from_config(conn, instance_config_name, config_file_name):
spot_requests_config = get_config(config_file_name)
config = spot_requests_config[instance_config_name]
mapping = create_mapping(config)
print 'Launching %s instances'%(instance_config_name)
print 'Instance parameters:'
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(config)
spot_req = conn.request_spot_instances(
config['price'],
config['ami_id'],
count=config['count'],
type=config['type'],
key_name=config['key_name'],
instance_type=config['instance_type'],
placement_group=config['placement_group'],
security_group_ids=config['security_groups'],
subnet_id=config['subnet_id'],
instance_profile_name=config['instance_profile_name'],
block_device_map=mapping
)
request_ids = [req.id for req in spot_req]
print 'Waiting for fulfillment'<|fim▁hole|> if 'tags' in config:
tag_instances(conn, instance_ids, config['tags'])
return instance_ids
def get_config(config_file_name):
config_file = open(os.path.join(CONFIG_PATH, config_file_name))
config_dict = yaml.load(config_file.read())
return config_dict
def create_mapping(config):
if 'mapping' not in config:
return None
mapping = BlockDeviceMapping()
for ephemeral_name, device_path in config['mapping'].iteritems():
ephemeral = BlockDeviceType()
ephemeral.ephemeral_name = ephemeral_name
mapping[device_path] = ephemeral
return mapping
def wait_for_fulfillment(conn, request_ids, pending_request_ids):
"""Loop through all pending request ids waiting for them to be fulfilled.
If a request is fulfilled, remove it from pending_request_ids.
If there are still pending requests, sleep and check again in 10 seconds.
Only return when all spot requests have been fulfilled."""
instance_ids = []
failed_ids = []
time.sleep(10)
pending_statuses = set(['pending-evaluation', 'pending-fulfillment'])
while len(pending_request_ids) > 0:
results = conn.get_all_spot_instance_requests(
request_ids=pending_request_ids)
for result in results:
if result.status.code == 'fulfilled':
pending_request_ids.pop(pending_request_ids.index(result.id))
print '\nspot request %s fulfilled!'%result.id
instance_ids.append(result.instance_id)
elif result.status.code not in pending_statuses:
pending_request_ids.pop(pending_request_ids.index(result.id))
print '\nspot request %s could not be fulfilled. ' \
'Status code: %s'%(result.id, result.status.code)
failed_ids.append(result.id)
if len(pending_request_ids) > 0:
sys.stdout.write('.')
sys.stdout.flush()
time.sleep(10)
if len(failed_ids) > 0:
print 'The following spot requests ' \
'have failed: %s'%(', '.join(failed_ids))
else:
print 'All spot requests fulfilled!'
return instance_ids
def tag_instances(conn, instance_ids, tags):
instances = conn.get_only_instances(instance_ids=instance_ids)
for instance in instances:
for key, value in tags.iteritems():
instance.add_tag(key=key, value=value)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('instance', type=str,
help='Instance config name to launch')
parser.add_argument('-r', '--region', type=str, default='us-east-1',
help='EC2 region name')
parser.add_argument('-c', '--config-file', type=str, default='spot_requests.yml',
help='Spot requests config file name')
args = parser.parse_args()
conn = boto.ec2.connect_to_region(args.region)
config_file_name = args.config_file
instance_config_name = args.instance
launch_from_config(conn, instance_config_name, config_file_name)
if __name__ == '__main__':
main()<|fim▁end|>
|
instance_ids = wait_for_fulfillment(conn, request_ids,
copy.deepcopy(request_ids))
|
<|file_name|>DefaultPluginLoadingConfig.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2021 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*<|fim▁hole|> * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.plugin.config;
import com.navercorp.pinpoint.bootstrap.config.DefaultProfilerConfig;
import com.navercorp.pinpoint.bootstrap.config.Value;
import com.navercorp.pinpoint.common.util.StringUtils;
import java.util.Collections;
import java.util.List;
public class DefaultPluginLoadingConfig implements PluginLoadingConfig {
// ArtifactIdUtils.ARTIFACT_SEPARATOR
private static final String ARTIFACT_SEPARATOR = ";";
private List<String> pluginLoadOrder = Collections.emptyList();
private List<String> disabledPlugins = Collections.emptyList();
private List<String> importPluginIds = Collections.emptyList();
public DefaultPluginLoadingConfig() {
}
@Override
public List<String> getPluginLoadOrder() {
return pluginLoadOrder;
}
@Value("${profiler.plugin.load.order}")
public void setPluginLoadOrder(String pluginLoadOrder) {
this.pluginLoadOrder = StringUtils.tokenizeToStringList(pluginLoadOrder, ",");
}
@Override
public List<String> getDisabledPlugins() {
return disabledPlugins;
}
@Value("${profiler.plugin.disable}")
public void setDisabledPlugins(String disabledPlugins) {
this.disabledPlugins = StringUtils.tokenizeToStringList(disabledPlugins, ",");
}
@Override
public List<String> getImportPluginIds() {
return importPluginIds;
}
@Value("${" + DefaultProfilerConfig.IMPORT_PLUGIN + "}")
public void setImportPluginIds(String importPluginIds) {
this.importPluginIds = StringUtils.tokenizeToStringList(importPluginIds, ARTIFACT_SEPARATOR);
}
@Override
public String toString() {
return "DefaultPluginLoadingConfig{" +
"pluginLoadOrder=" + pluginLoadOrder +
", disabledPlugins=" + disabledPlugins +
", importPluginIds=" + importPluginIds +
'}';
}
}<|fim▁end|>
|
* http://www.apache.org/licenses/LICENSE-2.0
*
|
<|file_name|>AnimationGraphFactory.cpp<|end_file_name|><|fim▁begin|>#include "AnimationGraphFactory.h"
namespace animation
{
AnimationGraphFactory::AnimationGraphFactory()<|fim▁hole|> AnimationGraph* AnimationGraphFactory::createGraph(const std::string& _filename)
{
AnimationGraph* result = 0;
MyGUI::xml::Document doc;
if (doc.open(_filename))
{
MyGUI::xml::Element* root = doc.getRoot();
if (root)
{
result = new AnimationGraph(root->findAttribute("name"));
MyGUI::xml::ElementEnumerator data = root->getElementEnumerator();
while (data.next())
{
if (data->getName() == "Node")
{
IAnimationNode* node = mNodeFactory.createNode(data->findAttribute("type"), data->findAttribute("name"), result);
result->addNode(node);
MyGUI::xml::ElementEnumerator prop = data->getElementEnumerator();
while (prop.next("Property"))
{
std::string key = prop->findAttribute("key");
std::string value = prop->findAttribute("value");
node->setProperty(key, value);
}
}
else if (data->getName() == "Connection")
{
IAnimationNode* from_node = result->getNodeByName(data->findAttribute("from"));
IAnimationNode* to_node = result->getNodeByName(data->findAttribute("to"));
MyGUI::xml::ElementEnumerator point = data->getElementEnumerator();
while (point.next("Point"))
{
from_node->addConnection(
point->findAttribute("from"),
to_node,
point->findAttribute("to"));
}
}
}
}
}
return result;
}
} // namespace animation<|fim▁end|>
|
{
}
|
<|file_name|>spec.py<|end_file_name|><|fim▁begin|>from logger import *
# Easy Demo
"""
log_functions = [('no_negative_ret', 'no_negatives_log')]
log_function_args = []
def query():
def sqrt_filter(x):
return x[0] < 0
get_log('no_negatives_log').filter(sqrt_filter).print_log()
"""
# Intermediate Demo
"""
log_functions = [('add', 'add_log')]
log_function_args = [('mult', 'mult_log')]
def query():
print 'add log:'
get_log('add_log').print_log()
print 'mult log:'
get_log('mult_log').print_log()
"""
# Advanced Demo
"""
log_functions = []
log_function_args = [('process', 'url_log')]
def query():
import re<|fim▁hole|> r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def handle_url(urls):
for url in urls[0]:
if regex.match(url) is not None:
log('valid_url', url)
else:
log('invalid_url', url)
get_log('url_log').map(handle_url)
print 'Valid URLs:'
get_log('valid_url').print_log()
print 'Invalid URLs:'
get_log('invalid_url').print_log()
"""<|fim▁end|>
|
regex = re.compile(
|
<|file_name|>test_multipole.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
sys.path.insert(0, os.pardir)
from testing_harness import TestHarness, PyAPITestHarness
import openmc
from openmc.stats import Box
from openmc.source import Source
class MultipoleTestHarness(PyAPITestHarness):
def _build_inputs(self):
####################
# Materials
####################
moderator = openmc.Material(material_id=1)
moderator.set_density('g/cc', 1.0)
moderator.add_nuclide('H1', 2.0)
moderator.add_nuclide('O16', 1.0)
moderator.add_s_alpha_beta('c_H_in_H2O')
dense_fuel = openmc.Material(material_id=2)
dense_fuel.set_density('g/cc', 4.5)
dense_fuel.add_nuclide('U235', 1.0)
mats_file = openmc.Materials([moderator, dense_fuel])
mats_file.export_to_xml()
####################
# Geometry
####################
c1 = openmc.Cell(cell_id=1, fill=moderator)
mod_univ = openmc.Universe(universe_id=1, cells=(c1,))
r0 = openmc.ZCylinder(R=0.3)
c11 = openmc.Cell(cell_id=11, fill=dense_fuel, region=-r0)
c11.temperature = [500, 0, 700, 800]
c12 = openmc.Cell(cell_id=12, fill=moderator, region=+r0)
fuel_univ = openmc.Universe(universe_id=11, cells=(c11, c12))
lat = openmc.RectLattice(lattice_id=101)
lat.dimension = [2, 2]
lat.lower_left = [-2.0, -2.0]
lat.pitch = [2.0, 2.0]
lat.universes = [[fuel_univ]*2]*2
lat.outer = mod_univ
x0 = openmc.XPlane(x0=-3.0)
x1 = openmc.XPlane(x0=3.0)
y0 = openmc.YPlane(y0=-3.0)
y1 = openmc.YPlane(y0=3.0)
for s in [x0, x1, y0, y1]:
s.boundary_type = 'reflective'
c101 = openmc.Cell(cell_id=101, fill=lat, region=+x0 & -x1 & +y0 & -y1)
root_univ = openmc.Universe(universe_id=0, cells=(c101,))
geometry = openmc.Geometry(root_univ)
geometry.export_to_xml()
####################
# Settings
####################
sets_file = openmc.Settings()
sets_file.batches = 5
sets_file.inactive = 0
sets_file.particles = 1000
sets_file.source = Source(space=Box([-1, -1, -1], [1, 1, 1]))
sets_file.output = {'summary': True}
sets_file.temperature = {'tolerance': 1000, 'multipole': True}
sets_file.export_to_xml()
####################
# Plots
####################
plots_file = openmc.Plots()
plot = openmc.Plot(plot_id=1)
plot.basis = 'xy'
plot.color_by = 'cell'
plot.filename = 'cellplot'
plot.origin = (0, 0, 0)
plot.width = (7, 7)
plot.pixels = (400, 400)
plots_file.append(plot)
plot = openmc.Plot(plot_id=2)
plot.basis = 'xy'
plot.color_by = 'material'
plot.filename = 'matplot'
plot.origin = (0, 0, 0)
plot.width = (7, 7)
plot.pixels = (400, 400)
plots_file.append(plot)
plots_file.export_to_xml()
def execute_test(self):
if not 'OPENMC_MULTIPOLE_LIBRARY' in os.environ:
raise RuntimeError("The 'OPENMC_MULTIPOLE_LIBRARY' environment "
"variable must be specified for this test.")
else:
super(MultipoleTestHarness, self).execute_test()
def _get_results(self):
outstr = super(MultipoleTestHarness, self)._get_results()<|fim▁hole|> return outstr
def _cleanup(self):
f = os.path.join(os.getcwd(), 'plots.xml')
if os.path.exists(f):
os.remove(f)
super(MultipoleTestHarness, self)._cleanup()
if __name__ == '__main__':
harness = MultipoleTestHarness('statepoint.5.h5')
harness.main()<|fim▁end|>
|
su = openmc.Summary('summary.h5')
outstr += str(su.geometry.get_all_cells()[11])
|
<|file_name|>MyActor.java<|end_file_name|><|fim▁begin|>package pokemon.vue;
import pokemon.launcher.PokemonCore;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.actions.*;
public class MyActor extends Actor {
public Texture s=new Texture(Gdx.files.internal("crosshair.png"));
SpriteBatch b=new SpriteBatch();
<|fim▁hole|> public MyActor (float x, float y) {
this.x=x;
this.y=y;
System.out.print(PokemonCore.m.getCities().get(0).getX());
this.setBounds(x+PokemonCore.m.getCities().get(0).getX(),y+PokemonCore.m.getCities().get(0).getY(),s.getWidth(),s.getHeight());
/* RepeatAction action = new RepeatAction();
action.setCount(RepeatAction.FOREVER);
action.setAction(Actions.fadeOut(2f));*/
this.addAction(Actions.repeat(RepeatAction.FOREVER,Actions.sequence(Actions.fadeOut(1f),Actions.fadeIn(1f))));
//posx=this.getX();
//posy=this.getY();
//this.addAction(action);
//this.addAction(Actions.sequence(Actions.alpha(0),Actions.fadeIn(2f)));
System.out.println("Actor constructed");
b.getProjectionMatrix().setToOrtho2D(0, 0,640,360);
}
@Override
public void draw (Batch batch, float parentAlpha) {
b.begin();
Color color = getColor();
b.setColor(color.r, color.g, color.b, color.a * parentAlpha);
b.draw(s,this.getX()-15,this.getY()-15,30,30);
b.setColor(color);
b.end();
//System.out.println("Called");
//batch.draw(t,this.getX(),this.getY(),t.getWidth(),t.getHeight());
//batch.draw(s,this.getX()+Minimap.BourgPalette.getX(),this.getY()+Minimap.BourgPalette.getY(),30,30);
}
public void setPosition(float x, float y){
this.setX(x+this.x);
this.setY(y+this.y);
}
}<|fim▁end|>
|
float x,y;
|
<|file_name|>trace.hpp<|end_file_name|><|fim▁begin|>// -*- Mode:C++ -*-
/**************************************************************************************************/
/* */
/* Copyright (C) 2014 University of Hull */
/* */
/**************************************************************************************************/
/* */
/* module : support/trace.hpp */
/* project : */
/* description: */
/* */
/**************************************************************************************************/
#if !defined(UKACHULLDCS_0896X_SUPPORT_TRACE_HPP)
#define UKACHULLDCS_0896X_SUPPORT_TRACE_HPP
// includes, system
#include <boost/noncopyable.hpp> // boost::noncopyable
#include <iosfwd> // std::ostream (fwd)
#include <iostream> // std::cout
#include <string> // std::string
// includes, project
// #include <>
namespace support {
// types, exported (class, enum, struct, union, typedef)
class trace : private boost::noncopyable {
public:
explicit trace(std::string const&, std::ostream& = std::cout);
~trace();
static void enter(std::string const&, std::ostream& = std::cout);
static void leave(std::string const&, std::ostream& = std::cout);
static std::string prefix();
private:
std::string msg_;
std::ostream& os_;
};
// variables, exported (extern)
// functions, inlined (inline)
// functions, exported (extern)
<|fim▁hole|>
#define TRACE_NEVER(x)
#define TRACE_ALWAYS(x) volatile support::trace const _(x)
#define TRACE_FUNC_ALWAYS volatile support::trace const _(BOOST_CURRENT_FUNCTION)
#define TRACE_ENTER_ALWAYS(x) support::trace::enter(x)
#define TRACE_LEAVE_ALWAYS(x) support::trace::leave(x)
#endif // #if !defined(UKACHULLDCS_0896X_SUPPORT_TRACE_HPP)
// the following is intentionally outside the include guards to allow including this header
// multiple times with different settings for UKACHULLDCS_USE_TRACE
#if defined(TRACE)
# undef TRACE
#endif
#if defined(TRACE_FUNC)
# undef TRACE_FUNC
#endif
#if defined(TRACE_ENTER)
# undef TRACE_ENTER
#endif
#if defined(TRACE_LEAVE)
# undef TRACE_LEAVE
#endif
#if defined(UKACHULLDCS_ALL_TRACE)
# define UKACHULLDCS_USE_TRACE
#endif
#if defined(UKACHULLDCS_USE_TRACE)
# define TRACE(x) TRACE_ALWAYS(x)
# define TRACE_FUNC TRACE_FUNC_ALWAYS
# define TRACE_ENTER(x) TRACE_ENTER_ALWAYS(x)
# define TRACE_LEAVE(x) TRACE_LEAVE_ALWAYS(x)
#else
# define TRACE(x)
# define TRACE_FUNC
# define TRACE_ENTER(x)
# define TRACE_LEAVE(x)
#endif<|fim▁end|>
|
} // namespace support {
#include <boost/current_function.hpp> // BOOST_CURRENT_FUNCTION
|
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2007-2022 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import pytest
from hyperspy import signals
from hyperspy.misc.utils import (
is_hyperspy_signal,
parse_quantity,
slugify,
strlist2enumeration,
str2num,
swapelem,
fsdict,
closest_power_of_two,
shorten_name,
is_binned,
)
from hyperspy.exceptions import VisibleDeprecationWarning
def test_slugify():
assert slugify("a") == "a"
assert slugify("1a") == "1a"
assert slugify("1") == "1"
assert slugify("a a") == "a_a"
assert slugify(42) == "42"
assert slugify(3.14159) == "314159"
assert slugify("├── Node1") == "Node1"
assert slugify("a", valid_variable_name=True) == "a"
assert slugify("1a", valid_variable_name=True) == "Number_1a"
assert slugify("1", valid_variable_name=True) == "Number_1"
assert slugify("a", valid_variable_name=False) == "a"
assert slugify("1a", valid_variable_name=False) == "1a"
assert slugify("1", valid_variable_name=False) == "1"
def test_parse_quantity():
# From the metadata specification, the quantity is defined as
# "name (units)" without backets in the name of the quantity
assert parse_quantity("a (b)") == ("a", "b")
assert parse_quantity("a (b/(c))") == ("a", "b/(c)")
assert parse_quantity("a (c) (b/(c))") == ("a (c)", "b/(c)")
assert parse_quantity("a [b]") == ("a [b]", "")
assert parse_quantity("a [b]", opening="[", closing="]") == ("a", "b")
def test_is_hyperspy_signal():
s = signals.Signal1D(np.zeros((5, 5, 5)))
p = object()
assert is_hyperspy_signal(s) is True
assert is_hyperspy_signal(p) is False
def test_strlist2enumeration():
assert strlist2enumeration([]) == ""
assert strlist2enumeration("a") == "a"
assert strlist2enumeration(["a"]) == "a"
assert strlist2enumeration(["a", "b"]) == "a and b"
assert strlist2enumeration(["a", "b", "c"]) == "a, b and c"
def test_str2num():
assert (
str2num("2.17\t 3.14\t 42\n 1\t 2\t 3")
== np.array([[2.17, 3.14, 42.0], [1.0, 2.0, 3.0]])
).all()
def test_swapelem():
L = ["a", "b", "c"]
swapelem(L, 1, 2)
assert L == ["a", "c", "b"]
def test_fsdict():
parrot = {}
fsdict(
["This", "is", "a", "dead", "parrot"], "It has gone to meet its maker", parrot<|fim▁hole|> )
fsdict(["This", "parrot", "is", "no", "more"], "It is an ex parrot", parrot)
fsdict(
["This", "parrot", "has", "seized", "to", "be"],
"It is pushing up the daisies",
parrot,
)
fsdict([""], "I recognize a dead parrot when I see one", parrot)
assert (
parrot["This"]["is"]["a"]["dead"]["parrot"] == "It has gone to meet its maker"
)
assert parrot["This"]["parrot"]["is"]["no"]["more"] == "It is an ex parrot"
assert (
parrot["This"]["parrot"]["has"]["seized"]["to"]["be"]
== "It is pushing up the daisies"
)
assert parrot[""] == "I recognize a dead parrot when I see one"
def test_closest_power_of_two():
assert closest_power_of_two(5) == 8
assert closest_power_of_two(13) == 16
assert closest_power_of_two(120) == 128
assert closest_power_of_two(973) == 1024
def test_shorten_name():
assert (
shorten_name("And now for soemthing completely different.", 16)
== "And now for so.."
)
# Can be removed in v2.0:
def test_is_binned():
s = signals.Signal1D(np.zeros((5, 5)))
assert is_binned(s) == s.axes_manager[-1].is_binned
with pytest.warns(VisibleDeprecationWarning, match="Use of the `binned`"):
s.metadata.set_item("Signal.binned", True)
assert is_binned(s) == s.metadata.Signal.binned<|fim▁end|>
| |
<|file_name|>AsyncSemaphore.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance<|fim▁hole|> * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.distributedlog.common.concurrent;
import java.util.LinkedList;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.RejectedExecutionException;
import java.util.function.Supplier;
import javax.annotation.concurrent.GuardedBy;
import org.apache.bookkeeper.common.concurrent.FutureUtils;
import org.apache.distributedlog.common.util.Permit;
/**
* An AsyncSemaphore is a traditional semaphore but with asynchronous
* execution.
*
* <p>Grabbing a permit returns a `Future[Permit]`.
*
* <p>Basic usage:
* {{{
* val semaphore = new AsyncSemaphore(n)
* ...
* semaphore.acquireAndRun() {
* somethingThatReturnsFutureT()
* }
* }}}
*
* <p>Calls to acquire() and acquireAndRun are serialized, and tickets are
* given out fairly (in order of arrival).
*/
public class AsyncSemaphore {
private final Optional<Integer> maxWaiters;
private final Permit semaphorePermit = new Permit() {
@Override
public void release() {
releasePermit(this);
}
};
@GuardedBy("this")
private Optional<Throwable> closed = Optional.empty();
@GuardedBy("this")
private final LinkedList<CompletableFuture<Permit>> waitq;
@GuardedBy("this")
private int availablePermits;
public AsyncSemaphore(int initialPermits,
Optional<Integer> maxWaiters) {
this.availablePermits = initialPermits;
this.waitq = new LinkedList<>();
this.maxWaiters = maxWaiters;
}
private synchronized void releasePermit(Permit permit) {
CompletableFuture<Permit> next = waitq.pollFirst();
if (null != next) {
next.complete(permit);
} else {
availablePermits += 1;
}
}
private CompletableFuture<Permit> newFuturePermit() {
return FutureUtils.value(semaphorePermit);
}
/**
* Acquire a [[Permit]], asynchronously.
*
* <p>Be sure to `permit.release()` in a
* - `finally` block of your `onSuccess` callback
* - `ensure` block of your future chain
*
* <p>Interrupting this future is only advisory, and will not release the permit
* if the future has already been satisfied.
*
* @note This method always return the same instance of [[Permit]].
* @return a `Future[Permit]` when the `Future` is satisfied, computation can proceed,
* or a Future.Exception[RejectedExecutionException]` if the configured maximum
* number of waiters would be exceeded.
*/
public synchronized CompletableFuture<Permit> acquire() {
if (closed.isPresent()) {
return FutureUtils.exception(closed.get());
}
if (availablePermits > 0) {
availablePermits -= 1;
return newFuturePermit();
} else {
if (maxWaiters.isPresent() && waitq.size() >= maxWaiters.get()) {
return FutureUtils.exception(new RejectedExecutionException("Max waiters exceeded"));
} else {
CompletableFuture<Permit> future = FutureUtils.createFuture();
future.whenComplete((value, cause) -> {
synchronized (AsyncSemaphore.this) {
waitq.remove(future);
}
});
waitq.addLast(future);
return future;
}
}
}
/**
* Fail the semaphore and stop it from distributing further permits. Subsequent
* attempts to acquire a permit fail with `exc`. This semaphore's queued waiters
* are also failed with `exc`.
*/
public synchronized void fail(Throwable exc) {
closed = Optional.of(exc);
for (CompletableFuture<Permit> future : waitq) {
future.cancel(true);
}
waitq.clear();
}
/**
* Execute the function asynchronously when a permit becomes available.
*
* <p>If the function throws a non-fatal exception, the exception is returned as part of the Future.
* For all exceptions, the permit would be released before returning.
*
* @return a Future[T] equivalent to the return value of the input function. If the configured
* maximum value of waitq is reached, Future.Exception[RejectedExecutionException] is
* returned.
*/
public <T> CompletableFuture<T> acquireAndRun(Supplier<CompletableFuture<T>> func) {
return acquire().thenCompose(permit -> {
CompletableFuture<T> future;
try {
future = func.get();
future.whenComplete((value, cause) -> permit.release());
return future;
} catch (Throwable cause) {
permit.release();
throw cause;
}
});
}
}<|fim▁end|>
|
* with the License. You may obtain a copy of the License at
*
|
<|file_name|>async-gen-dstr-const-obj-ptrn-id-init-fn-name-cover.js<|end_file_name|><|fim▁begin|>// This file was procedurally generated from the following sources:
// - src/dstr-binding-for-await/obj-ptrn-id-init-fn-name-cover.case
// - src/dstr-binding-for-await/default/for-await-of-async-gen-const.template
/*---
description: SingleNameBinding assigns `name` to "anonymous" functions "through" cover grammar (for-await-of statement)
esid: sec-for-in-and-for-of-statements-runtime-semantics-labelledevaluation
features: [destructuring-binding, async-iteration]
flags: [generated, async]
info: |
IterationStatement :
for await ( ForDeclaration of AssignmentExpression ) Statement
[...]
2. Return ? ForIn/OfBodyEvaluation(ForDeclaration, Statement, keyResult,
lexicalBinding, labelSet, async).
13.7.5.13 Runtime Semantics: ForIn/OfBodyEvaluation
[...]
4. Let destructuring be IsDestructuring of lhs.
[...]
6. Repeat
[...]
j. If destructuring is false, then
[...]
k. Else
i. If lhsKind is assignment, then
[...]
ii. Else if lhsKind is varBinding, then
[...]
iii. Else,
1. Assert: lhsKind is lexicalBinding.<|fim▁hole|> 2. Assert: lhs is a ForDeclaration.
3. Let status be the result of performing BindingInitialization
for lhs passing nextValue and iterationEnv as arguments.
[...]
13.3.3.7 Runtime Semantics: KeyedBindingInitialization
SingleNameBinding : BindingIdentifier Initializer_opt
[...]
6. If Initializer is present and v is undefined, then
[...]
d. If IsAnonymousFunctionDefinition(Initializer) is true, then
i. Let hasNameProperty be HasOwnProperty(v, "name").
ii. ReturnIfAbrupt(hasNameProperty).
iii. If hasNameProperty is false, perform SetFunctionName(v,
bindingId).
---*/
var iterCount = 0;
async function *fn() {
for await (const { cover = (function () {}), xCover = (0, function() {}) } of [{}]) {
assert.sameValue(cover.name, 'cover');
assert.notSameValue(xCover.name, 'xCover');
iterCount += 1;
}
}
fn().next()
.then(() => assert.sameValue(iterCount, 1, 'iteration occurred as expected'), $DONE)
.then($DONE, $DONE);<|fim▁end|>
| |
<|file_name|>a.rs<|end_file_name|><|fim▁begin|>fn main() {
// foo must be used.
foo();
}
// For this test to operate correctly, foo's body must start on exactly the same
// line and column and have the exact same length in bytes in a.rs and b.rs. In
// a.rs, the body must end on a line number which does not exist in b.rs.
// Basically, avoid modifying this file, including adding or removing whitespace!
fn foo() {
assert_eq!(1, 1);
<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import sorl.thumbnail.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='WebpageSnapshot',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.URLField(db_index=True)),
('image', sorl.thumbnail.fields.ImageField(upload_to='thummer/snapshots', null=True, editable=False)),
('capture_width', models.IntegerField(default=1680, editable=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('captured_at', models.DateTimeField(null=True, editable=False)),
],<|fim▁hole|> },
),
]<|fim▁end|>
|
options={
'ordering': ['-captured_at'],
'get_latest_by': 'captured_at',
|
<|file_name|>db_ext_split.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Usage: db_ext_split.py <src> <dst> <prob>
Options:<|fim▁hole|> -h --help
"""
import os
import cv2
from glob import glob
from docopt import docopt
from mscr.split import Split, RandomSplitPredicate
from mscr.util import Crop
from mscr.data import MyProgressBar
PAD = 8
if __name__ == '__main__':
args = docopt(__doc__)
src = args['<src>']
dst = args['<dst>']
prob = float(args['<prob>'])
split = Split(RandomSplitPredicate(p=prob))
crop = Crop()
count = 0
if os.path.exists(src) and os.path.exists(dst):
filz = glob(os.path.join(src, '*.jpg'))
pbar = MyProgressBar(len(filz), 'extending db:')
for im in filz:
img = cv2.imread(im)
img = crop.run(img)
for bl in split.run(img):
out = os.path.join(dst, str(count).zfill(PAD) + '.jpg')
cv2.imwrite(out, bl.img)
count += 1
pbar.update()
pbar.finish()
else:
print 'err: dimstat.py: path doesn\'t exists'<|fim▁end|>
| |
<|file_name|>acszpt.py<|end_file_name|><|fim▁begin|>"""
This module contains a class, :class:`Query`, that was implemented to provide
users with means to programmatically query the
`ACS Zeropoints Calculator <https://acszeropoints.stsci.edu>`_.
The API works by submitting requests to the
ACS Zeropoints Calculator referenced above and hence, it is only valid for ACS
specific instruments (HRC, SBC, or WFC).
The API can be used in two ways by specifying either a
``(date, detector, filter)`` combination or just a ``(date, detector)``
combination. In the first case, the query
will return the zeropoint information for the specific filter and detector at
specified date. In the second case, the query will return the zeropoint
information for all the filters for the desired detector at the specified date.
In either case, the result will be an ``astropy.table.QTable`` where each column
is an ``astropy.units.quantity.Quantity`` object with the appropriate units attached.
Examples
--------
Retrieve the zeropoint information for all the filters on 2016-04-01 for WFC:
>>> from acstools import acszpt
>>> date = '2016-04-01'
>>> detector = 'WFC'<|fim▁hole|> Angstrom erg / (Angstrom cm2 s) mag(ST) mag mag(AB)
str6 float64 float64 float64 float64 float64
------ -------- ---------------------- ------- ------- -------
F435W 4329.2 3.148e-19 25.155 25.763 25.665
F475W 4746.2 1.827e-19 25.746 26.149 26.056
F502N 5023.0 5.259e-18 22.098 22.365 22.285
F550M 5581.5 3.99e-19 24.898 24.825 24.856
F555W 5360.9 1.963e-19 25.667 25.713 25.713
F606W 5922.0 7.811e-20 26.668 26.405 26.498
F625W 6312.0 1.188e-19 26.213 25.735 25.904
F658N 6584.0 1.97e-18 23.164 22.381 22.763
F660N 6599.4 5.156e-18 22.119 21.428 21.714
F775W 7693.2 9.954e-20 26.405 25.272 25.667
F814W 8045.0 7.046e-20 26.78 25.517 25.944
F850LP 9033.2 1.52e-19 25.945 24.332 24.858
F892N 8914.8 1.502e-18 23.458 21.905 22.4
Retrieve the zeropoint information for the F435W filter on 2016-04-01 for WFC:
>>> from acstools import acszpt
>>> date = '2016-04-01'
>>> detector = 'WFC'
>>> filt = 'F435W'
>>> q = acszpt.Query(date=date, detector=detector, filter=filt)
>>> zpt_table = q.fetch()
>>> print(zpt_table)
FILTER PHOTPLAM PHOTFLAM STmag VEGAmag ABmag
Angstrom erg / (Angstrom cm2 s) mag(ST) mag mag(AB)
------ -------- ---------------------- ------- ------- -------
F435W 4329.2 3.148e-19 25.155 25.763 25.665
Retrieve the zeropoint information for the F435W filter for WFC at multiple dates:
>>> from acstools import acszpt
>>> dates = ['2004-10-13', '2011-04-01', '2014-01-17', '2018-05-23']
>>> queries = []
>>> for date in dates:
... q = acszpt.Query(date=date, detector='WFC', filt='F435W')
... zpt_table = q.fetch()
... # Each object has a zpt_table attribute, so we save the instance
... queries.append(q)
>>> for q in queries:
... print(q.date, q.zpt_table['PHOTFLAM'][0], q.zpt_table['STmag'][0])
2004-10-13 3.074e-19 erg / (Angstrom cm2 s) 25.181 mag(ST)
2011-04-01 3.138e-19 erg / (Angstrom cm2 s) 25.158 mag(ST)
2014-01-17 3.144e-19 erg / (Angstrom cm2 s) 25.156 mag(ST)
2018-05-23 3.152e-19 erg / (Angstrom cm2 s) 25.154 mag(ST)
>>> type(queries[0].zpt_table['PHOTFLAM'])
astropy.units.quantity.Quantity
"""
import datetime as dt
import logging
import os
from urllib.request import urlopen
from urllib.error import URLError
import astropy.units as u
from astropy.table import QTable
from bs4 import BeautifulSoup
import numpy as np
__taskname__ = "acszpt"
__author__ = "Nathan Miles"
__version__ = "1.0"
__vdate__ = "22-Jan-2019"
__all__ = ['Query']
# Initialize the logger
logging.basicConfig()
LOG = logging.getLogger(f'{__taskname__}.Query')
LOG.setLevel(logging.INFO)
class Query:
"""Class used to interface with the ACS Zeropoints Calculator API.
Parameters
----------
date : str
Input date in the following ISO format, YYYY-MM-DD.
detector : {'HRC', 'SBC', 'WFC'}
One of the three channels on ACS: HRC, SBC, or WFC.
filt : str or `None`, optional
One of valid filters for the chosen detector. If no filter is supplied,
all of the filters for the chosen detector will be used:
* HRC:
F220W, F250W, F330W,
F344N, F435W, F475W,
F502N, F550M, F555W,
F606W, F625W, F658N, F660N,
F775W, F814W, F850LP, F892N
* WFC:
F435W, F475W,
F502N, F550M, F555W,
F606W, F625W, F658N, F660N,
F775W, F814W, F850LP, F892N
* SBC:
F115LP, F122M, F125LP,
F140LP, F150LP, F165LP
"""
def __init__(self, date, detector, filt=None):
# Set the attributes
self._date = date
self._detector = detector.upper()
self._filt = filt
self.valid_filters = {
'WFC': ['F435W', 'F475W', 'F502N', 'F550M',
'F555W', 'F606W', 'F625W', 'F658N',
'F660N', 'F775W', 'F814W', 'F850LP', 'F892N'],
'HRC': ['F220W', 'F250W', 'F330W', 'F344N',
'F435W', 'F475W', 'F502N', 'F550M',
'F555W', 'F606W', 'F625W', 'F658N',
'F660N', 'F775W', 'F814W', 'F850LP', 'F892N'],
'SBC': ['F115LP', 'F122M', 'F125LP',
'F140LP', 'F150LP', 'F165LP']
}
self._zpt_table = None
# Set the private attributes
if filt is None:
self._url = ('https://acszeropoints.stsci.edu/results_all/?'
f'date={self.date}&detector={self.detector}')
else:
self._filt = filt.upper()
self._url = ('https://acszeropoints.stsci.edu/results_single/?'
f'date1={self.date}&detector={self.detector}'
f'&{self.detector}_filter={self.filt}')
# ACS Launch Date
self._acs_installation_date = dt.datetime(2002, 3, 7)
# The farthest date in future that the component and throughput files
# are valid for. If input date is larger, extrapolation is not valid.
self._extrapolation_date = dt.datetime(2021, 12, 31)
self._msg_div = '-' * 79
self._valid_detectors = ['HRC', 'SBC', 'WFC']
self._response = None
self._failed = False
self._data_units = {
'FILTER': u.dimensionless_unscaled,
'PHOTPLAM': u.angstrom,
'PHOTFLAM': u.erg / u.cm ** 2 / u.second / u.angstrom,
'STmag': u.STmag,
'VEGAmag': u.mag,
'ABmag': u.ABmag
}
self._block_size = len(self._data_units)
@property
def date(self):
"""The user supplied date. (str)"""
return self._date
@property
def detector(self):
"""The user supplied detector. (str)"""
return self._detector
@property
def filt(self):
"""The user supplied filter, if one was given. (str or `None`)"""
return self._filt
@property
def zpt_table(self):
"""The results returned by the ACS Zeropoint Calculator. (`astropy.table.QTable`)"""
return self._zpt_table
def _check_inputs(self):
"""Check the inputs to ensure they are valid.
Returns
-------
status : bool
True if all inputs are valid, False if one is not.
"""
valid_detector = True
valid_filter = True
valid_date = True
# Determine the submitted detector is valid
if self.detector not in self._valid_detectors:
msg = (f'{self.detector} is not a valid detector option.\n'
'Please choose one of the following:\n'
f'{os.linesep.join(self._valid_detectors)}\n'
f'{self._msg_div}')
LOG.error(msg)
valid_detector = False
# Determine if the submitted filter is valid
if (self.filt is not None and valid_detector and
self.filt not in self.valid_filters[self.detector]):
msg = (f'{self.filt} is not a valid filter for {self.detector}\n'
'Please choose one of the following:\n'
f'{os.linesep.join(self.valid_filters[self.detector])}\n'
f'{self._msg_div}')
LOG.error(msg)
valid_filter = False
# Determine if the submitted date is valid
date_check = self._check_date()
if date_check is not None:
LOG.error(f'{date_check}\n{self._msg_div}')
valid_date = False
if not valid_detector or not valid_filter or not valid_date:
return False
return True
def _check_date(self, fmt='%Y-%m-%d'):
"""Convenience method for determining if the input date is valid.
Parameters
----------
fmt : str
The format of the date string. The default is ``%Y-%m-%d``, which
corresponds to ``YYYY-MM-DD``.
Returns
-------
status : str or `None`
If the date is valid, returns `None`. If the date is invalid,
returns a message explaining the issue.
"""
result = None
try:
dt_obj = dt.datetime.strptime(self.date, fmt)
except ValueError:
result = f'{self.date} does not match YYYY-MM-DD format'
else:
if dt_obj < self._acs_installation_date:
result = ('The observation date cannot occur '
'before ACS was installed '
f'({self._acs_installation_date.strftime(fmt)})')
elif dt_obj > self._extrapolation_date:
result = ('The observation date cannot occur after the '
'maximum allowable date, '
f'{self._extrapolation_date.strftime(fmt)}. '
'Extrapolations of the '
'instrument throughput after this date lead to '
'high uncertainties and are therefore invalid.')
finally:
return result
def _submit_request(self):
"""Submit a request to the ACS Zeropoint Calculator.
If an exception is raised during the request, an error message is
given. Otherwise, the response is saved in the corresponding
attribute.
"""
if not self._url.startswith('http'):
raise ValueError(f'Invalid URL {self._url}')
try:
self._response = urlopen(self._url) # nosec
except URLError as e:
msg = (f'{repr(e)}\n{self._msg_div}\nThe query failed! '
'Please check your inputs. '
'If the error persists, submit a ticket to the '
'ACS Help Desk at hsthelp.stsci.edu with the error message '
'displayed above.')
LOG.error(msg)
self._failed = True
else:
self._failed = False
def _parse_and_format(self):
""" Parse and format the results returned by the ACS Zeropoint Calculator.
Using ``beautifulsoup4``, find all the ``<tb> </tb>`` tags present in
the response. Format the results into an astropy.table.QTable with
corresponding units and assign it to the zpt_table attribute.
"""
soup = BeautifulSoup(self._response.read(), 'html.parser')
# Grab all elements in the table returned by the ZPT calc.
td = soup.find_all('td')
# Remove the units attached to PHOTFLAM and PHOTPLAM column names.
td = [val.text.split(' ')[0] for val in td]
# Turn the single list into a 2-D numpy array
data = np.reshape(td,
(int(len(td) / self._block_size), self._block_size))
# Create the QTable, note that sometimes self._response will be empty
# even though the return was successful; hence the try/except to catch
# any potential index errors. Provide the user with a message and
# set the zpt_table to None.
try:
tab = QTable(data[1:, :],
names=data[0],
dtype=[str, float, float, float, float, float])
except IndexError as e:
msg = (f'{repr(e)}\n{self._msg_div}\n'
'There was an issue parsing the request. '
'Try resubmitting the query. If this issue persists, please '
'submit a ticket to the Help Desk at'
'https://stsci.service-now.com/hst')
LOG.info(msg)
self._zpt_table = None
else:
# If and only if no exception was raised, attach the units to each
# column of the QTable. Note we skip the FILTER column because
# Quantity objects in astropy must be numerical (i.e. not str)
for col in tab.colnames:
if col.lower() == 'filter':
continue
tab[col].unit = self._data_units[col]
self._zpt_table = tab
def fetch(self):
"""Submit the request to the ACS Zeropoints Calculator.
This method will:
* submit the request
* parse the response
* format the results into a table with the correct units
Returns
-------
tab : `astropy.table.QTable` or `None`
If the request was successful, returns a table; otherwise, `None`.
"""
LOG.info('Checking inputs...')
valid_inputs = self._check_inputs()
if valid_inputs:
LOG.info(f'Submitting request to {self._url}')
self._submit_request()
if self._failed:
return
LOG.info('Parsing the response and formatting the results...')
self._parse_and_format()
return self.zpt_table
LOG.error('Please fix the incorrect input(s)')<|fim▁end|>
|
>>> q = acszpt.Query(date=date, detector=detector)
>>> zpt_table = q.fetch()
>>> print(zpt_table)
FILTER PHOTPLAM PHOTFLAM STmag VEGAmag ABmag
|
<|file_name|>scanner.py<|end_file_name|><|fim▁begin|>"""
Iterator based sre token scanner
"""
import sre_parse, sre_compile, sre_constants<|fim▁hole|>__all__ = ['Scanner', 'pattern']
FLAGS = (VERBOSE | MULTILINE | DOTALL)
class Scanner(object):
def __init__(self, lexicon, flags=FLAGS):
self.actions = [None]
# combine phrases into a compound pattern
s = sre_parse.Pattern()
s.flags = flags
p = []
for idx, token in enumerate(lexicon):
phrase = token.pattern
try:
subpattern = sre_parse.SubPattern(s,
[(SUBPATTERN, (idx + 1, sre_parse.parse(phrase, flags)))])
except sre_constants.error:
raise
p.append(subpattern)
self.actions.append(token)
p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
self.scanner = sre_compile.compile(p)
def iterscan(self, string, idx=0, context=None):
"""
Yield match, end_idx for each match
"""
match = self.scanner.scanner(string, idx).match
actions = self.actions
lastend = idx
end = len(string)
while True:
m = match()
if m is None:
break
matchbegin, matchend = m.span()
if lastend == matchend:
break
action = actions[m.lastindex]
if action is not None:
rval, next_pos = action(m, context)
if next_pos is not None and next_pos != matchend:
# "fast forward" the scanner
matchend = next_pos
match = self.scanner.scanner(string, matchend).match
yield rval, matchend
lastend = matchend
def pattern(pattern, flags=FLAGS):
def decorator(fn):
fn.pattern = pattern
fn.regex = re.compile(pattern, flags)
return fn
return decorator<|fim▁end|>
|
from sre_constants import BRANCH, SUBPATTERN
from re import VERBOSE, MULTILINE, DOTALL
import re
|
<|file_name|>UInt256.py<|end_file_name|><|fim▁begin|>from neo.Core.UIntBase import UIntBase
class UInt256(UIntBase):
def __init__(self, data=None):
super(UInt256, self).__init__(num_bytes=32, data=data)
@staticmethod
def ParseString(value):
"""
Parse the input str `value` into UInt256
Raises:<|fim▁hole|> ValueError: if the input `value` length (after '0x' if present) != 64
"""
if value[0:2] == '0x':
value = value[2:]
if not len(value) == 64:
raise ValueError(f"Invalid UInt256 input: {len(value)} chars != 64 chars")
reversed_data = bytearray.fromhex(value)
reversed_data.reverse()
return UInt256(data=reversed_data)<|fim▁end|>
| |
<|file_name|>neutron.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
import copy
import pprint
import ipaddr
import netaddr
from neutronclient.common import exceptions as neutron_exc
from neutronclient.v2_0 import client as neutron_client
from cloudferrylib.base import exception
from cloudferrylib.base import network
from cloudferrylib.os.identity import keystone as ksresource
from cloudferrylib.utils import cache
from cloudferrylib.utils import log
from cloudferrylib.utils import utils as utl
LOG = log.getLogger(__name__)
DEFAULT_SECGR = 'default'
@cache.Cached(getter='get_subnets_list', modifier='create_network')
@cache.Cached(getter='get_networks_list', modifier='create_network')
@cache.Cached(getter='get_ports_list', modifier='create_port')
class NeutronNetwork(network.Network):
"""
The main class for working with OpenStack Neutron client
"""
def __init__(self, config, cloud):
super(NeutronNetwork, self).__init__(config)
self.cloud = cloud
self.identity_client = cloud.resources[utl.IDENTITY_RESOURCE]
self.filter_tenant_id = None
self.ext_net_map = \
utl.read_yaml_file(self.config.migrate.ext_net_map) or {}
self.mysql_connector = cloud.mysql_connector('neutron')
@property
def neutron_client(self):
return self.proxy(self.get_client(), self.config)
def get_client(self):
return neutron_client.Client(
username=self.config.cloud.user,
password=self.config.cloud.password,
tenant_name=self.config.cloud.tenant,
auth_url=self.config.cloud.auth_url,
cacert=self.config.cloud.cacert,
insecure=self.config.cloud.insecure,
region_name=self.config.cloud.region
)
def read_info(self, **kwargs):
"""Get info about neutron resources:
:rtype: Dictionary with all necessary neutron info
"""
if kwargs.get('tenant_id'):
tenant_id = self.filter_tenant_id = kwargs['tenant_id'][0]
else:
tenant_id = ''
nets = self.get_networks(tenant_id)
subnets = self.get_subnets(tenant_id)
detached_ports = self.get_detached_ports(tenant_id)
LOG.debug('List of detached ports: %s',
repr([p['id'] for p in detached_ports]))
if self.filter_tenant_id is not None:
shared_nets = self.get_shared_networks_raw()
for net in shared_nets:
# do not include the same network twice
if net['id'] in [n['id'] for n in nets]:
continue
nets.append(self.convert_networks(net, self.cloud))
LOG.debug("Got shared network ID %s", net['id'])
# Append subnets from the shared networks
for subnet in net['subnets']:
# do not include the same subnets twice
if subnet['id'] in [sn['id'] for sn in subnets]:
continue
subnets.append(self.convert_subnets(subnet, self.cloud))
LOG.debug("Got shared subnet ID %s", subnet['id'])
full_nets_list = self.get_networks()
else:
full_nets_list = nets
# Get full list off busy segmentation IDs
used_seg_ids = get_segmentation_ids_from_net_list(full_nets_list)
routers = self.get_routers()
subnet_ids = {sn['id'] for sn in subnets}
for router in routers:
router['subnet_ids'] = [sn_id for sn_id in router['subnet_ids']
if sn_id in subnet_ids]
info = {'networks': nets,
'subnets': subnets,
'routers': routers,
'detached_ports': detached_ports,
'floating_ips': self.get_floatingips(tenant_id),
'security_groups': self.get_sec_gr_and_rules(tenant_id),
'quota': self.get_quota(tenant_id),
'meta': {
'segmentation_ids': used_seg_ids
}}
if self.config.migrate.keep_lbaas:
info['lbaas'] = dict()
info['lb_pools'] = self.get_lb_pools(tenant_id)
info['lb_monitors'] = self.get_lb_monitors(tenant_id)
info['lb_members'] = self.get_lb_members(tenant_id)
info['lb_vips'] = self.get_lb_vips(tenant_id)
return info
def show_quota(self, tenant_id=''):
return self.neutron_client.show_quota(tenant_id)
def list_quotas(self):
return self.neutron_client.list_quotas()['quotas']
def get_quota(self, tenant_id):
# return structure {'name_tenant': {'subnet': 10, ...}, ...}
tenants = {}
if not tenant_id:
tenants_obj = self.identity_client.get_tenants_list()
tenants = {t.id: t.name for t in tenants_obj}
else:
tenants[tenant_id] = self.identity_client.\
try_get_tenant_name_by_id(tenant_id)
data = {
}
if self.config.network.get_all_quota:
for t_id, t_val in tenants.iteritems():
data[t_val] = self.neutron_client.show_quota(t_id)
else:
for t in self.neutron_client.list_quotas()['quotas']:
if (not tenant_id) or (tenant_id == t['tenant_id']):
tenant_name = self.identity_client.\
try_get_tenant_name_by_id(t['tenant_id'])
data[tenant_name] = {k: v
for k, v in t.iteritems()
if k != 'tenant_id'}
return data
def upload_quota(self, quota):
identity = self.identity_client
for q_name, q_val in quota.iteritems():
tenant_id = identity.get_tenant_id_by_name(q_name)
self.neutron_client.update_quota(tenant_id, q_val)
def create_quota(self, tenant_id, quota):
return self.neutron_client.update_quota(tenant_id, quota)
def required_tenants(self, filter_tenant_id=None):
old_filter_tanant_id = self.filter_tenant_id
self.filter_tenant_id = filter_tenant_id
tenant_ids = set()
for shared_net in self.get_shared_networks_raw():
tenant_ids.add(shared_net['tenant_id'])
for router in self.get_routers_raw():
tenant_ids.add(router['tenant_id'])
self.filter_tenant_id = old_filter_tanant_id
return list(tenant_ids)
def deploy(self, info):
"""
Deploy network resources to DST
Have non trivial behavior when enabled keep_floatingip and
change_router_ips. Example:
Initial state:
src cloud with router external ip 123.0.0.5
and FloatingIP 123.0.0.4
Migrate resources:
1. Move FloatingIP to dst. On dst we have FloatingIP 123.0.0.4
2. Create FloatingIP on dst as stub for router IP.
On dst we have two FloatingIP [123.0.0.4, 123.0.0.5].
IP 123.0.0.5 exists only in OpenStack DB and not crush
src network.
3. Create router on dst. (here is the main idea) As you see above,
ips 123.0.0.4 and 123.0.0.5 already allocated,
then OpenStack must allocate another ip for router
(e.g. 123.0.0.6).
4. FloatingIP 123.0.0.5 is not needed anymore.
We use it on 1.3. step for not allow OpenStack create
router with this ip. It will be released if you enable
clean_router_ips_stub in config
After resource migration we have:
src router external ip 123.0.0.5 and FloatingIP 123.0.0.4
dst router external ip 123.0.0.6 and FloatingIP 123.0.0.4
"""
deploy_info = info
self.upload_quota(deploy_info['quota'])
self.upload_networks(deploy_info['networks'],
deploy_info['meta']['segmentation_ids'],
deploy_info['detached_ports'])
dst_router_ip_ids = None
if self.config.migrate.keep_floatingip:
self.upload_floatingips(deploy_info['networks'],
deploy_info['floating_ips'])
if self.config.migrate.change_router_ips:
subnets_map = {subnet['id']: subnet
for subnet in deploy_info['subnets']}
router_ips = self.extract_router_ips_as_floating_ips(
subnets_map, deploy_info['routers'])
dst_router_ip_ids = self.upload_floatingips(
deploy_info['networks'], router_ips)
self.upload_routers(deploy_info['networks'],
deploy_info['subnets'],
deploy_info['routers'])
if self.config.migrate.clean_router_ips_stub and dst_router_ip_ids:
for router_ip_stub in dst_router_ip_ids:
self.neutron_client.delete_floatingip(router_ip_stub)
self.upload_neutron_security_groups(deploy_info['security_groups'])
self.upload_sec_group_rules(deploy_info['security_groups'])
if self.config.migrate.keep_lbaas:
self.upload_lb_pools(deploy_info['lb_pools'],
deploy_info['subnets'])
self.upload_lb_monitors(deploy_info['lb_monitors'])
self.associate_lb_monitors(deploy_info['lb_pools'],
deploy_info['lb_monitors'])
self.upload_lb_members(deploy_info['lb_members'],
deploy_info['lb_pools'])
self.upload_lb_vips(deploy_info['lb_vips'],
deploy_info['lb_pools'],
deploy_info['subnets'])
return deploy_info
def extract_router_ips_as_floating_ips(self, subnets, routers_info):
result = []
tenant = self.config.migrate.router_ips_stub_tenant
for router_info in routers_info:
router = Router(router_info, subnets)
tenant_name = tenant if tenant else router.tenant_name
if router.ext_net_id:
result.append({'tenant_name': tenant_name,
'floating_network_id': router.ext_net_id,
'floating_ip_address': router.ext_ip})
return result
def get_mac_by_ip(self, ip_address, instance_id):
for port in self.get_ports_list(device_id=instance_id):
for fixed_ip_info in port['fixed_ips']:
if fixed_ip_info['ip_address'] == ip_address:
return port["mac_address"]
def get_instance_network_info(self, instance_id):
ports = []
for port in self.get_ports_list(device_id=instance_id):
ports.append({
'ip_addresses': [x['ip_address'] for x in port['fixed_ips']],
'mac_address': port['mac_address'],
'floatingip': self.get_port_floating_ip(port['id']),
'allowed_address_pairs': port.get('allowed_address_pairs', []),
})
return ports
def get_port_floating_ip(self, port_id):
floating_ips = self.neutron_client.list_floatingips(
port_id=port_id)['floatingips']
if floating_ips:
LOG.debug('Got %d floating IP for port %s',
len(floating_ips), port_id)
return floating_ips[0]['floating_ip_address']
else:
return None
def get_ports_list(self, **kwargs):
return self.neutron_client.list_ports(**kwargs)['ports']
def create_port(self, net_id, mac_address, ip_addresses, tenant_id,
keep_ip, sg_ids=None, allowed_address_pairs=None):
param_create_port = {'network_id': net_id,
'tenant_id': tenant_id}
if mac_address:
param_create_port['mac_address'] = mac_address
if sg_ids:
param_create_port['security_groups'] = sg_ids
if keep_ip:
param_create_port['fixed_ips'] = [{"ip_address": ip}
for ip in ip_addresses]
if allowed_address_pairs is not None:
param_create_port['allowed_address_pairs'] = allowed_address_pairs
with ksresource.AddAdminUserToNonAdminTenant(
self.identity_client.keystone_client,
self.config.cloud.user,
self.config.cloud.tenant):
LOG.debug("Creating port IP '%s', MAC '%s' on net '%s'",
param_create_port.get('fixed_ips'), mac_address, net_id)
return self.neutron_client.create_port(
{'port': param_create_port})['port']
def delete_port(self, port_id):
return self.neutron_client.delete_port(port_id)
def get_network(self, network_info, tenant_id, keep_ip=False):
if keep_ip:
addresses = [ipaddr.IPAddress(ip)
for ip in network_info['ip_addresses']]
private = self.neutron_client.list_networks(
tenant_id=tenant_id)['networks']
shared = self.neutron_client.list_networks(shared=True)['networks']
for net in private + shared:
subnets = self.neutron_client.list_subnets(
network_id=net['id'])['subnets']
if all(any(ipaddr.IPNetwork(subnet['cidr']).Contains(ip)
for subnet in subnets)
for ip in addresses):
return net
if 'id' in network_info:
networks = self.neutron_client.list_networks(
id=network_info['id'])['networks']
if len(networks) > 0:
return networks[0]
if 'name' in network_info:
networks = self.neutron_client.list_networks(
name=network_info['name'])['networks']
if len(networks) > 0:
return networks[0]
LOG.error('Failed to find network %s in tenant %s; keep_ip = %s',
repr(network_info), tenant_id, keep_ip)
raise exception.AbortMigrationError("Can't find suitable network")
def check_existing_port(self, network_id, mac=None, ip_address=None,
ip_addresses=None, existing_ports=None):
if ip_addresses is None:
ip_addresses = []
if ip_address is not None and ip_address not in ip_addresses:
ip_addresses.append(ip_address)
if existing_ports is None:
existing_ports = self.get_ports_list(
fields=['network_id', 'mac_address', 'id', 'fixed_ips',
'device_owner'],
network_id=network_id)
for port in existing_ports:
if port['network_id'] != network_id:
continue
if port['mac_address'] == mac:
return port
for fixed_ip in port['fixed_ips']:
if fixed_ip['ip_address'] in ip_addresses:
return port
return None
@staticmethod
def convert(neutron_object, cloud, obj_name):
"""Convert OpenStack Neutron network object to CloudFerry object.
:param neutron_object: Direct OS NeutronNetwork object to convert,
:cloud: Cloud object,
:obj_name: Name of NeutronNetwork object to convert.
List of possible values:
'network', 'subnet', 'router', 'floating_ip',
'security_group', 'rule'.
"""
obj_map = {
'network': NeutronNetwork.convert_networks,
'subnet': NeutronNetwork.convert_subnets,
'router': NeutronNetwork.convert_routers,
'floating_ip': NeutronNetwork.convert_floatingips,
'security_group': NeutronNetwork.convert_security_groups,
'rule': NeutronNetwork.convert_rules,
'lb_pool': NeutronNetwork.convert_lb_pools,
'lb_member': NeutronNetwork.convert_lb_members,
'lb_monitor': NeutronNetwork.convert_lb_monitors,
'lb_vip': NeutronNetwork.convert_lb_vips
}
return obj_map[obj_name](neutron_object, cloud)
def convert_networks(self, net, cloud):
identity_res = cloud.resources[utl.IDENTITY_RESOURCE]
net_res = cloud.resources[utl.NETWORK_RESOURCE]
get_tenant_name = identity_res.get_tenants_func()
subnets = []
subnets_hash = set()
for subnet in net['subnets']:
snet = self.convert_subnets(subnet, cloud)
subnets.append(snet)
subnets_hash.add(snet['res_hash'])
result = {
'name': net['name'],
'id': net['id'],
'admin_state_up': net['admin_state_up'],
'shared': net['shared'],
'tenant_id': net['tenant_id'],
'tenant_name': get_tenant_name(net['tenant_id']),
'subnets': subnets,
'router:external': net['router:external'],
'provider:physical_network': net['provider:physical_network'],
'provider:network_type': net['provider:network_type'],
'provider:segmentation_id': net['provider:segmentation_id'],
'subnets_hash': subnets_hash,
'meta': {},
}
res_hash = net_res.get_resource_hash(result,
'name',
'shared',
'tenant_name',
'router:external',
'admin_state_up',
'provider:physical_network',
'provider:network_type')
result['res_hash'] = res_hash
return result
@staticmethod
def convert_subnets(snet, cloud):
identity_res = cloud.resources[utl.IDENTITY_RESOURCE]
network_res = cloud.resources[utl.NETWORK_RESOURCE]
get_tenant_name = identity_res.get_tenants_func()
networks_list = network_res.get_networks_list()
net = get_network_from_list_by_id(snet['network_id'], networks_list)
cidr = str(netaddr.IPNetwork(snet['cidr']).cidr)
result = {
'name': snet['name'],
'id': snet['id'],
'enable_dhcp': snet['enable_dhcp'],
'allocation_pools': snet['allocation_pools'],
'gateway_ip': snet['gateway_ip'],
'ip_version': snet['ip_version'],
'cidr': cidr,
'network_name': net['name'],
'external': net['router:external'],
'network_id': snet['network_id'],
'tenant_name': get_tenant_name(snet['tenant_id']),
'dns_nameservers': snet['dns_nameservers'],
'meta': {},
}
res_hash = network_res.get_resource_hash(result,
'name',
'enable_dhcp',
'ip_version',
'gateway_ip',
'cidr',
'allocation_pools',
'tenant_name',
'network_name')
result['res_hash'] = res_hash
return result
@staticmethod
def convert_routers(router, cloud):
identity_res = cloud.resources[utl.IDENTITY_RESOURCE]
net_res = cloud.resources[utl.NETWORK_RESOURCE]
get_tenant_name = identity_res.get_tenants_func()
result = {
'name': router['name'],
'id': router['id'],
'admin_state_up': router['admin_state_up'],
'external_gateway_info': router['external_gateway_info'],
'tenant_name': get_tenant_name(router['tenant_id']),
'meta': {},
}
result.update(net_res.get_ports_info(router))
if router['external_gateway_info']:
networks_list = net_res.get_networks_list()
ext_id = router['external_gateway_info']['network_id']
ext_net = get_network_from_list_by_id(ext_id, networks_list)
result['ext_net_name'] = ext_net['name']
result['ext_net_tenant_name'] = get_tenant_name(
ext_net['tenant_id'])
result['ext_net_id'] = router['external_gateway_info'][
'network_id']
res_hash = net_res.get_resource_hash(result,
'name',
'tenant_name')
result['res_hash'] = res_hash
return result
@staticmethod
def convert_floatingips(floating, cloud):
identity_res = cloud.resources[utl.IDENTITY_RESOURCE]
net_res = cloud.resources[utl.NETWORK_RESOURCE]
get_tenant_name = identity_res.get_tenants_func()
networks_list = net_res.get_networks_list()
ext_id = floating['floating_network_id']
extnet = get_network_from_list_by_id(ext_id, networks_list)
result = {
'id': floating['id'],
'tenant_id': floating['tenant_id'],
'floating_network_id': ext_id,
'network_name': extnet['name'],
'ext_net_tenant_name': get_tenant_name(extnet['tenant_id']),
'tenant_name': get_tenant_name(floating['tenant_id']),
'fixed_ip_address': floating['fixed_ip_address'],
'floating_ip_address': floating['floating_ip_address'],
'port_id': floating['port_id'],
'meta': {},
}
return result
@staticmethod
def convert_rules(rule, cloud):
net_res = cloud.resources[utl.NETWORK_RESOURCE]
rule_hash = net_res.get_resource_hash(rule,
'direction',
'remote_ip_prefix',
'protocol',
'port_range_min',
'port_range_max',
'ethertype')
result = {
'remote_group_id': rule['remote_group_id'],
'direction': rule['direction'],
'remote_ip_prefix': rule['remote_ip_prefix'],
'protocol': rule['protocol'],
'port_range_min': rule['port_range_min'],
'port_range_max': rule['port_range_max'],
'ethertype': rule['ethertype'],
'security_group_id': rule['security_group_id'],
'rule_hash': rule_hash,
'meta': dict()
}
return result
@staticmethod
def convert_security_groups(sec_gr, cloud):
identity_res = cloud.resources[utl.IDENTITY_RESOURCE]
net_res = cloud.resources[utl.NETWORK_RESOURCE]
get_tenant_name = identity_res.get_tenants_func(
return_default_tenant=False)
result = {
'name': sec_gr['name'],
'id': sec_gr['id'],
'tenant_id': sec_gr['tenant_id'],
'tenant_name': get_tenant_name(sec_gr['tenant_id']),
'description': sec_gr['description'],
'security_group_rules': [NeutronNetwork.convert(gr, cloud, 'rule')
for gr in sec_gr['security_group_rules']],
'meta': {},
}
res_hash = net_res.get_resource_hash(result,
'name',
'tenant_name',
'description')
result['res_hash'] = res_hash
return result
@staticmethod
def convert_lb_pools(pool, cloud):
identity_res = cloud.resources[utl.IDENTITY_RESOURCE]
net_res = cloud.resources[utl.NETWORK_RESOURCE]
get_tenant_name = identity_res.get_tenants_func(
return_default_tenant=False)
result = {
'name': pool['name'],
'id': pool['id'],
'description': pool['description'],
'lb_method': pool['lb_method'],
'protocol': pool['protocol'],
'subnet_id': pool['subnet_id'],
'provider': pool.get('provider'),
'tenant_id': pool['tenant_id'],
'tenant_name': get_tenant_name(pool['tenant_id']),
'health_monitors': pool['health_monitors'],
'members': pool['members'],
'meta': {}
}
res_hash = net_res.get_resource_hash(result,
'name',
'tenant_name',
'lb_method',
'protocol')
result['res_hash'] = res_hash
return result
@staticmethod
def convert_lb_monitors(monitor, cloud):
identity_res = cloud.resources[utl.IDENTITY_RESOURCE]
net_res = cloud.resources[utl.NETWORK_RESOURCE]
get_tenant_name = identity_res.get_tenants_func(
return_default_tenant=False)
result = {
'id': monitor['id'],
'tenant_id': monitor['tenant_id'],
'tenant_name': get_tenant_name(monitor['tenant_id']),
'type': monitor['type'],
'delay': monitor['delay'],
'timeout': monitor['timeout'],
'max_retries': monitor['max_retries'],
'url_path': monitor.get('url_path', None),
'expected_codes': monitor.get('expected_codes', None),
'pools': monitor.get('pools'),
'meta': {}
}
res_hash = net_res.get_resource_hash(result,
'tenant_name',
'type',
'delay',
'timeout',
'max_retries')
result['res_hash'] = res_hash
return result
@staticmethod
def convert_lb_members(member, cloud):
identity_res = cloud.resources[utl.IDENTITY_RESOURCE]
net_res = cloud.resources[utl.NETWORK_RESOURCE]
get_tenant_name = identity_res.get_tenants_func(
return_default_tenant=False)
result = {
'id': member['id'],
'pool_id': member['pool_id'],
'address': member['address'],
'protocol_port': member['protocol_port'],
'weight': member['weight'],
'tenant_id': member['tenant_id'],
'tenant_name': get_tenant_name(member['tenant_id']),
'meta': {}
}
res_hash = net_res.get_resource_hash(result,
'address',
'protocol_port',
'weight',
'tenant_name')
result['res_hash'] = res_hash
return result
@staticmethod
def convert_lb_vips(vip, cloud):
identity_res = cloud.resources[utl.IDENTITY_RESOURCE]
net_res = cloud.resources[utl.NETWORK_RESOURCE]
get_tenant_name = identity_res.get_tenants_func(
return_default_tenant=False)
result = {
'name': vip['name'],
'id': vip['id'],
'description': vip['description'],
'address': vip['address'],
'protocol': vip['protocol'],
'protocol_port': vip['protocol_port'],
'pool_id': vip['pool_id'],
'connection_limit': vip['connection_limit'],
'session_persistence': vip.get('session_persistence', None),
'tenant_id': vip['tenant_id'],
'subnet_id': vip['subnet_id'],
'tenant_name': get_tenant_name(vip['tenant_id']),
'meta': {}
}
res_hash = net_res.get_resource_hash(result,
'name',
'address',
'protocol',
'protocol_port',
'tenant_name')
result['res_hash'] = res_hash
return result
def get_shared_networks_raw(self):
"""Returns list of external and shared networks in raw neutron object
format"""
external = self.get_networks_raw({'router:external': True})
shared = self.get_networks_raw({'shared': True})
return external + shared
def get_networks_raw(self, search_dict):
"""Groups networks with subnets in raw `NeutronClient` format"""
neutron = self.neutron_client
nets = neutron.list_networks(**search_dict)['networks']
subnets_list = self.get_subnets_list()
for net in nets:
subnets = []
for subnet_id in net['subnets']:
subnets.append(get_subnet_from_list_by_id(subnet_id,
subnets_list))
net['subnets'] = subnets
return nets
def get_networks(self, tenant_id=''):
LOG.info("Get networks...")
networks = self.get_networks_raw({'tenant_id': tenant_id})
networks_info = []
for net in networks:
cf_net = self.convert_networks(net, self.cloud)
LOG.debug("Getting info about network '%s' (%s):\n%s",
cf_net['name'], cf_net['id'], pprint.pformat(cf_net))
networks_info.append(cf_net)
LOG.info("Done.")
return networks_info
def get_networks_list(self, tenant_id=''):
return self.neutron_client.list_networks(
tenant_id=tenant_id)['networks']
def get_subnets_list(self, tenant_id=''):
return self.neutron_client.list_subnets(tenant_id=tenant_id)['subnets']
def get_detached_ports(self, tenant_id=''):
ports = self.neutron_client.list_ports(tenant_id=tenant_id)['ports']
return [p for p in ports if not p['device_owner']]
def get_subnets(self, tenant_id=''):
LOG.info("Get subnets...")
subnets = self.get_subnets_list(tenant_id)
subnets_info = []
for snet in subnets:
subnet = self.convert(snet, self.cloud, 'subnet')
subnets_info.append(subnet)
LOG.info("Done")
return subnets_info
def reset_subnet_dhcp(self, subnet_id, dhcp_flag):
LOG.debug('Setting enable_dhcp to %s for subnet %s',
dhcp_flag, subnet_id)
subnet_info = {
'subnet':
{
'enable_dhcp': dhcp_flag
}
}
return self.neutron_client.update_subnet(subnet_id, subnet_info)
def get_ports_info(self, router):
LOG.debug("Finding all ports connected to router '%s'", router['name'])
ports_list = self.get_ports_list()
ports = get_ports_by_device_id_from_list(router['id'], ports_list)
subnet_ids = []
ips = []
for port in ports:
for ip_info in port['fixed_ips']:
ips.append(ip_info['ip_address'])
subnet_ids.append(ip_info['subnet_id'])
return {'ips': set(ips), 'subnet_ids': set(subnet_ids)}
def get_routers_raw(self):
routers = self.neutron_client.list_routers()['routers']
if self.filter_tenant_id:
subnet_ids = {
sn['id']
for sn in self.get_subnets_list(self.filter_tenant_id)}
return [r for r in routers
if (r['tenant_id'] == self.filter_tenant_id or
subnet_ids & self.get_ports_info(r)['subnet_ids'])]
return routers
def get_routers(self):
LOG.info("Get routers")
return [self.convert_routers(r, self.cloud)
for r in self.get_routers_raw()]
def get_floatingips(self, tenant_id=''):
LOG.info("Get floatingips...")
floatings = self.neutron_client.list_floatingips(
tenant_id=tenant_id)['floatingips']
floatingips_info = []
for floating in floatings:
floatingip_info = self.convert(floating, self.cloud, 'floating_ip')
floatingips_info.append(floatingip_info)
LOG.info("Done")
return floatingips_info
def get_security_groups(self, tenant_id=''):
return self.neutron_client.list_security_groups(
tenant_id=tenant_id)['security_groups']
def get_sec_gr_and_rules(self, tenant_id=''):
LOG.info("Getting security groups and rules...")
service_tenant_name = self.config.cloud.service_tenant
service_tenant_id = \
self.identity_client.get_tenant_id_by_name(service_tenant_name)
sec_grs = self.get_security_groups(tenant_id)
sec_groups_info = []
for sec_gr in sec_grs:
if sec_gr['tenant_id'] != service_tenant_id:
sec_gr_info = self.convert(sec_gr, self.cloud,
'security_group')
if not sec_gr_info['tenant_name']:
# Skip security group from undefined tenant
LOG.warning("Security group '%s' (%s) from tenant %s "
"has been skipped.", sec_gr['name'],
sec_gr['id'], sec_gr['tenant_id'])
continue
sec_groups_info.append(sec_gr_info)
LOG.info("Done")
return sec_groups_info
def get_lb_pools(self, tenant_id=''):
LOG.info("Getting load balancer pools...")
pools = self.neutron_client.list_pools(tenant_id=tenant_id)['pools']
pools_info = []
for pool in pools:
pool_info = self.convert(pool, self.cloud, 'lb_pool')
pools_info.append(pool_info)
LOG.info("Done")
return pools_info
def get_lb_monitors(self, tenant_id=''):
LOG.info("Getting load balancer monitors...")
monitors = \
self.neutron_client.list_health_monitors(
tenant_id=tenant_id)['health_monitors']
monitors_info = []
for mon in monitors:
mon_info = self.convert(mon, self.cloud, 'lb_monitor')
monitors_info.append(mon_info)
LOG.info("Done")
return monitors_info
def get_lb_members(self, tenant_id=''):
LOG.info("Getting load balancer members...")
members = self.neutron_client.list_members(
tenant_id=tenant_id)['members']
members_info = []
for member in members:
member_info = self.convert(member, self.cloud, 'lb_member')
members_info.append(member_info)
LOG.info("Done")
return members_info
def get_lb_vips(self, tenant_id=''):
LOG.info("Getting load balancer VIPs...")
vips = self.neutron_client.list_vips(
tenant_id=tenant_id)['vips']
vips_info = []
for vip in vips:
vip_info = self.convert(vip, self.cloud, 'lb_vip')
vips_info.append(vip_info)
LOG.info("Done")
return vips_info
def upload_lb_vips(self, vips, pools, subnets):
LOG.info("Creating load balancer VIPs on destination")
existing_vips = self.get_lb_vips()
existing_vips_hashlist = [ex_vip['res_hash']
for ex_vip in existing_vips]
existing_pools = self.get_lb_pools()
existing_snets = self.get_subnets()
for vip in vips:
if not vip['tenant_name']:
continue
if vip['res_hash'] not in existing_vips_hashlist:
tenant_id = self.identity_client.get_tenant_id_by_name(
vip['tenant_name'])
pool_hash = self.get_res_hash_by_id(pools, vip['pool_id'])
dst_pool = self.get_res_by_hash(existing_pools, pool_hash)
snet_hash = self.get_res_hash_by_id(subnets, vip['subnet_id'])
dst_subnet = self.get_res_by_hash(existing_snets, snet_hash)
vip_info = {
'vip': {
'name': vip['name'],
'description': vip['description'],
'address': vip['address'],
'protocol': vip['protocol'],
'protocol_port': vip['protocol_port'],
'connection_limit': vip['connection_limit'],
'pool_id': dst_pool['id'],
'tenant_id': tenant_id,
'subnet_id': dst_subnet['id']
}
}
if vip['session_persistence']:
vip_info['vip']['session_persistence'] = \
vip['session_persistence']
vip['meta']['id'] = self.neutron_client.create_vip(
vip_info)['vip']['id']
else:
LOG.info("| Dst cloud already has the same VIP "
"with address %s in tenant %s",
vip['address'], vip['tenant_name'])
LOG.info("Done")
def upload_lb_members(self, members, pools):
LOG.info("Creating load balancer members...")
existing_members = self.get_lb_members()
existing_members_hashlist = \
[ex_member['res_hash'] for ex_member in existing_members]
existing_pools = self.get_lb_pools()
for member in members:
if not member['tenant_name']:
continue
if member['res_hash'] not in existing_members_hashlist:
tenant_id = self.identity_client.get_tenant_id_by_name(
member['tenant_name'])
pool_hash = self.get_res_hash_by_id(pools, member['pool_id'])
dst_pool = self.get_res_by_hash(existing_pools, pool_hash)
member_info = {
'member': {
'protocol_port': member["protocol_port"],
'address': member['address'],
'pool_id': dst_pool['id'],
'tenant_id': tenant_id
}
}
member['meta']['id'] = self.neutron_client.create_member(
member_info)['member']['id']
else:
LOG.info("| Dst cloud already has the same member "
"with address %s in tenant %s",
member['address'], member['tenant_name'])
LOG.info("Done")
def upload_lb_monitors(self, monitors):
LOG.info("Creating load balancer monitors on destination...")
existing_mons = self.get_lb_monitors()
existing_mons_hashlist = \
[ex_mon['res_hash'] for ex_mon in existing_mons]
for mon in monitors:
if not mon['tenant_name']:
continue
if mon['res_hash'] not in existing_mons_hashlist:
tenant_id = self.identity_client.get_tenant_id_by_name(
mon['tenant_name'])
mon_info = {
'health_monitor':
{
'tenant_id': tenant_id,
'type': mon['type'],
'delay': mon['delay'],
'timeout': mon['timeout'],
'max_retries': mon['max_retries']
}
}
if mon['url_path']:
mon_info['health_monitor']['url_path'] = mon['url_path']
mon_info['health_monitor']['expected_codes'] = \
mon['expected_codes']
mon['meta']['id'] = self.neutron_client.create_health_monitor(
mon_info)['health_monitor']['id']
else:
LOG.info("| Dst cloud already has the same healthmonitor "
"with type %s in tenant %s",
mon['type'], mon['tenant_name'])
LOG.info("Done")
def associate_lb_monitors(self, pools, monitors):
LOG.info("Associating balancer monitors on destination...")
existing_pools = self.get_lb_pools()
existing_monitors = self.get_lb_monitors()
for pool in pools:
if not pool['tenant_name']:
continue
pool_hash = self.get_res_hash_by_id(pools, pool['id'])
dst_pool = self.get_res_by_hash(existing_pools, pool_hash)
for monitor_id in pool['health_monitors']:
monitor_hash = self.get_res_hash_by_id(monitors, monitor_id)
dst_monitor = self.get_res_by_hash(existing_monitors,
monitor_hash)
if dst_monitor['id'] not in dst_pool['health_monitors']:
dst_monitor_info = {
'health_monitor': {
'id': dst_monitor['id']
}
}
self.neutron_client.associate_health_monitor(
dst_pool['id'], dst_monitor_info)
else:
LOG.info(
"Dst pool with name %s already has associated the "
"healthmonitor with id %s in tenant %s",
dst_pool['name'], dst_monitor['id'],
dst_monitor['tenant_name'])
LOG.info("Done")
def upload_lb_pools(self, pools, subnets):
LOG.info("Creating load balancer pools on destination...")
existing_pools = self.get_lb_pools()
existing_pools_hashlist = \
[ex_pool['res_hash'] for ex_pool in existing_pools]
existing_subnets = self.get_subnets()
for pool in pools:
if pool['res_hash'] not in existing_pools_hashlist and \
pool['tenant_name']:
tenant_id = self.identity_client.get_tenant_id_by_name(
pool['tenant_name'])
snet_hash = self.get_res_hash_by_id(subnets, pool['subnet_id'])
snet_id = self.get_res_by_hash(existing_subnets,
snet_hash)['id']
pool_info = {
'pool': {
'name': pool['name'],
'description': pool['description'],
'tenant_id': tenant_id,
'subnet_id': snet_id,
'protocol': pool['protocol'],
'lb_method': pool['lb_method']
}
}
if pool.get('provider'):
pool_info['pool']['provider'] = pool.get('provider')
LOG.debug("Creating LB pool '%s'", pool['name'])
pool['meta']['id'] = \
self.neutron_client.create_pool(pool_info)['pool']['id']
else:
LOG.info("| Dst cloud already has the same pool "
"with name %s in tenant %s",
pool['name'], pool['tenant_name'])
LOG.info("Done")
def upload_neutron_security_groups(self, sec_groups):
LOG.info("Creating neutron security groups on destination...")
exist_secgrs = self.get_sec_gr_and_rules()
exis_secgrs_hashlist = [ex_sg['res_hash'] for ex_sg in exist_secgrs]
for sec_group in sec_groups:
if sec_group['name'] != DEFAULT_SECGR:
if sec_group['res_hash'] not in exis_secgrs_hashlist:
tenant_id = \
self.identity_client.get_tenant_id_by_name(
sec_group['tenant_name']
)
sg_info = \
{
'security_group':
{
'name': sec_group['name'],
'tenant_id': tenant_id,
'description': sec_group['description']
}
}
sec_group['meta']['id'] = self.neutron_client.\
create_security_group(sg_info)['security_group']['id']
LOG.info("Done")
def upload_sec_group_rules(self, sec_groups):
LOG.info("Creating neutron security group rules on destination...")
ex_secgrs = self.get_sec_gr_and_rules()
for sec_gr in sec_groups:
ex_secgr = \
self.get_res_by_hash(ex_secgrs, sec_gr['res_hash'])
if ex_secgr:
exrules_hlist = \
[r['rule_hash'] for r in ex_secgr['security_group_rules']]
else:
exrules_hlist = []
for rule in sec_gr['security_group_rules']:
if rule['protocol'] \
and (rule['rule_hash'] not in exrules_hlist):
rinfo = \
{'security_group_rule': {
'direction': rule['direction'],
'protocol': rule['protocol'],
'port_range_min': rule['port_range_min'],
'port_range_max': rule['port_range_max'],
'ethertype': rule['ethertype'],
'remote_ip_prefix': rule['remote_ip_prefix'],
'security_group_id': ex_secgr['id'],
'tenant_id': ex_secgr['tenant_id']}}
if rule['remote_group_id']:
remote_sghash = \
self.get_res_hash_by_id(sec_groups,
rule['remote_group_id'])
rem_ex_sec_gr = \
self.get_res_by_hash(ex_secgrs,
remote_sghash)
rinfo['security_group_rule']['remote_group_id'] = \
rem_ex_sec_gr['id']
LOG.debug("Creating security group %s", rinfo)
new_rule = \
self.neutron_client.create_security_group_rule(rinfo)
rule['meta']['id'] = new_rule['security_group_rule']['id']
LOG.info("Done")
def upload_networks(self, networks, src_seg_ids, detached_ports):
LOG.info("Creating networks on destination")
identity = self.identity_client
existing_networks = self.get_networks()
# we need to handle duplicates in segmentation ids
dst_seg_ids = get_segmentation_ids_from_net_list(existing_networks)
for src_net in networks:
network_detached_ports = [p for p in detached_ports
if p['network_id'] == src_net['id']]
# Check network for existence on destination cloud
dst_net = self.get_dst_net_by_src_net(existing_networks, src_net)
if dst_net:
LOG.info("DST cloud already has the same "
"network with name '%s' in tenant '%s'",
src_net['name'], src_net['tenant_name'])
self.deploy_detached_ports(dst_net, network_detached_ports)
continue
LOG.debug("Trying to create network '%s'", src_net['name'])
tenant_id = identity.get_tenant_id_by_name(src_net['tenant_name'])
if tenant_id is None:
LOG.warning("Tenant '%s' is not available on destination! "
"Make sure you migrated identity (keystone) "
"resources! Skipping network '%s'.",
src_net['tenant_name'], src_net['name'])
continue
no_extnet_migration = (
src_net.get('router:external') and
not self.config.migrate.migrate_extnets or
(src_net['id'] in self.ext_net_map))
if no_extnet_migration:
LOG.debug("External networks migration is disabled in the "
"config OR external networks mapping is enabled. "
"Skipping external network: '%s (%s)'",
src_net['name'], src_net['id'])
continue
# create dict, representing basic info about network
network_info = {
'network': {
'tenant_id': tenant_id,
'admin_state_up': src_net["admin_state_up"],
'shared': src_net["shared"],
'name': src_net['name'],
'router:external': src_net['router:external']
}
}
phys_net = src_net["provider:physical_network"]
network_type = src_net['provider:network_type']
seg_id = src_net["provider:segmentation_id"]
if phys_net or (src_net['provider:network_type'] in
['gre', 'vxlan']):
# Update network info with additional arguments.
# We need to check if we have parameter
# "provider:physical_network" or param
# "provider:network_type" either is 'gre' or 'vxlan'.
# If condition is satisfied, we need to specify 2 more params:
# "provider:network_type" and "provider:segmentation_id".
list_update_atr = ["provider:network_type"]
if phys_net:
list_update_atr.append("provider:physical_network")
for atr in list_update_atr:
network_info['network'].update({atr: src_net.get(atr)})
# Check segmentation ID for overlapping
# If it doesn't overlap with DST, save the same segmentation ID
# Otherwise pick free segmentation ID, which does not overlap
# with ANY segmentation ID on SRC
if seg_id is not None:
# Segmentation ID exists; Check for overlapping
seg_id_overlaps = (network_type in dst_seg_ids and
seg_id in dst_seg_ids[network_type])
if seg_id_overlaps:
# Choose the lowest free segmentation ID, that also
# does not overlap with SRC
new_seg_id = generate_new_segmentation_id(src_seg_ids,
dst_seg_ids,
network_type)
LOG.debug("'%s' segmentation ID '%s' overlaps with "
"DST. Generating new one: '%s'.",
network_type, seg_id, new_seg_id)
# Use it for network
network_info['network']['provider:segmentation_id'] = (
new_seg_id)
# Update DST segmentation IDs with the just created one
dst_seg_ids[network_type].append(new_seg_id)
else:
# Otherwise use original segmentation ID from SRC
network_info['network']['provider:segmentation_id'] = (
seg_id)
created_network = self.create_network(src_net, network_info)
self.deploy_detached_ports(created_network, network_detached_ports)
def deploy_detached_ports(self, net, ports):
for subnet in net['subnets']:
self.reset_subnet_dhcp(subnet['id'], False)
existing_ports = {p['id']: p
for p in self.get_ports_list(network_id=net['id'])}
for port in ports:
ip_addresses = [fip['ip_address'] for fip in port['fixed_ips']]
existing_port = self.check_existing_port(
net['id'], port['mac_address'],
ip_addresses=ip_addresses,
existing_ports=existing_ports.values())
if existing_port is not None:
if existing_port['mac_address'] == port['mac_address']:
LOG.debug('Port %s already migrated to %s',
port['id'], existing_port['id'])
continue
if existing_port['device_owner'].startswith('network:') or \
not existing_port['device_owner']:
LOG.debug('Deleting port %s from DST', repr(existing_port))
self.delete_port(existing_port['id'])
del existing_ports[existing_port['id']]
else:
raise exception.AbortMigrationError(
'Can\'t migrate port %s conflict with port %s' %
(port['id'], existing_port['id']))
self.create_port(net['id'], port['mac_address'], ip_addresses,
net['tenant_id'], True)
for subnet in net['subnets']:
if subnet['enable_dhcp']:
self.reset_subnet_dhcp(subnet['id'], True)
def create_network(self, src_net, network_info):
try:
LOG.debug("creating network with args: '%s'",
pprint.pformat(network_info))
created_net = self.neutron_client.create_network(network_info)
created_net = created_net['network']
LOG.info("Created net '%s'", created_net['name'])
except neutron_exc.NeutronClientException as e:
LOG.warning("Cannot create network on destination: %s. "
"Destination cloud already has the same network. May "
"result in port allocation errors, such as VM IP "
"allocation, floating IP allocation, router IP "
"allocation, etc.", e)
return
for snet in src_net['subnets']:
subnet_info = {
'subnet': {
'name': snet['name'],
'enable_dhcp': snet['enable_dhcp'],
'network_id': created_net['id'],
'cidr': snet['cidr'],
'allocation_pools': snet['allocation_pools'],
'gateway_ip': snet['gateway_ip'],
'ip_version': snet['ip_version'],
'dns_nameservers': snet['dns_nameservers'],
'tenant_id': created_net['tenant_id']
}
}
try:
created_subnet = self.neutron_client.create_subnet(subnet_info)
created_subnet = created_subnet['subnet']
snet['meta']['id'] = created_subnet['id']
LOG.info("Created subnet '%s' in net '%s'",
created_subnet['cidr'], created_net['name'])
created_net['subnets'].append(created_subnet)
except neutron_exc.NeutronClientException:
LOG.info("Subnet '%s' (%s) already exists, skipping",
snet['name'], snet['cidr'])
return created_net
def upload_routers(self, networks, subnets, routers):
LOG.info("Creating routers on destination")
existing_subnets = self.get_subnets()
existing_routers = self.get_routers()
for router in routers:
tenant_id = self.identity_client.get_tenant_id_by_name(
router['tenant_name'])
r_info = {'router': {'name': router['name'],
'tenant_id': tenant_id}}
existing_router = self.get_res_by_hash(existing_routers,
router['res_hash'])
if not existing_router:
LOG.debug("Creating router %s", pprint.pformat(r_info))
existing_router = self.convert_routers(
self.neutron_client.create_router(r_info)['router'],
self.cloud)
router['meta']['id'] = existing_router['id']
self.add_router_interfaces(router, existing_router, subnets,
existing_subnets)
ex_gw_info = router['external_gateway_info']
if ex_gw_info:
self.add_router_gateway(existing_router, router['ext_net_id'],
networks,
ex_gw_info.get('enable_snat'))
def add_router_gateway(self, dst_router, ext_net_id, src_nets,
set_snat=None):
"""
:param set_snat: possible values:
1. `None` - do not update, useful in cases when destination cloud does
not support SNAT for external networks (pre-icehouse);
2. `True` - enable SNAT
3. `False` - disable SNAT
"""
dst_nets = self.get_networks()
dst_net_id = self.get_new_extnet_id(ext_net_id, src_nets, dst_nets)
if dst_net_id:
info = {'network_id': dst_net_id}
if set_snat is not None:
info['enable_snat'] = set_snat
LOG.debug("Setting the external network (%s) gateway for a router "
"'%s' (%s)", dst_net_id, dst_router['name'],
dst_router['id'])
self.neutron_client.add_gateway_router(dst_router['id'], info)
else:
LOG.warning('External (%s) network is not exists on destination',
ext_net_id)
def add_router_interfaces(self, src_router, dst_router, src_subnets,
dst_subnets):
for subnet_id in src_router['subnet_ids']:
subnet_hash = self.get_res_hash_by_id(src_subnets, subnet_id)
src_subnet = self.get_res_by_hash(src_subnets, subnet_hash)
if src_subnet['external']:
LOG.debug("NOT connecting subnet '%s' to router '%s' because "
"it's connected to external network", subnet_id,
dst_router['name'])
continue
existing_subnet = self.get_res_by_hash(dst_subnets, subnet_hash)
if existing_subnet['id'] in dst_router['subnet_ids']:
continue
LOG.debug("Adding subnet '%s' to router '%s'", subnet_id,
dst_router['name'])
try:
self.neutron_client.add_interface_router(
dst_router['id'],
{"subnet_id": existing_subnet['id']})
except neutron_exc.NeutronClientException as e:
LOG.debug(e, exc_info=True)
LOG.warning("Couldn't add interface to subnet %s to router %s:"
"\n%s", existing_subnet['id'], dst_router['id'], e)
def upload_floatingips(self, networks, src_floats):
"""Creates floating IPs on destination
Process:
1. Create floating IP on destination using neutron APIs in particular
tenant. This allocates first IP address available in external
network.
2. If keep_floating_ips option is set:
2.1. Modify IP address of a floating IP to be the same as on
destination. This is done from the DB level.
2.2. Else - do not modify floating IP address
3. Return list of ID of new floating IPs
"""
LOG.info("Uploading floating IPs...")
existing_networks = self.get_networks()
new_floating_ids = []
fips_dst = self.neutron_client.list_floatingips()['floatingips']
ipfloatings = {fip['floating_ip_address']: fip['id']
for fip in fips_dst}
for fip in src_floats:
ip = fip['floating_ip_address']
if ip in ipfloatings:
new_floating_ids.append(ipfloatings[ip])
continue
with ksresource.AddAdminUserToNonAdminTenant(
self.identity_client.keystone_client,
self.config.cloud.user,
fip['tenant_name']):
ext_net_id = self.get_new_extnet_id(
fip['floating_network_id'], networks, existing_networks)
if ext_net_id is None:
LOG.info("No external net for floating IP, make sure all "
"external networks migrated. Skipping floating "
"IP '%s'", fip['floating_ip_address'])
continue
tenant = self.identity_client.keystone_client.tenants.find(
name=fip['tenant_name'])
new_fip = {
'floatingip': {
'floating_network_id': ext_net_id,
'tenant_id': tenant.id
}
}
created_fip = self.create_floatingip(new_fip)
if created_fip is None:
continue
fip_id = created_fip['id']
new_floating_ids.append(fip_id)
sqls = [('UPDATE IGNORE floatingips '
'SET floating_ip_address = "{ip}" '
'WHERE id = "{fip_id}"').format(ip=ip, fip_id=fip_id),
('UPDATE IGNORE ipallocations '
'SET ip_address = "{ip}" '
'WHERE port_id = ('
'SELECT floating_port_id '
'FROM floatingips '
'WHERE id = "{fip_id}")').format(
ip=ip, fip_id=fip_id),
('DELETE FROM ipavailabilityranges '
'WHERE allocation_pool_id in ( '
'SELECT id '
'FROM ipallocationpools '
'WHERE subnet_id = ( '
'SELECT subnet_id '
'FROM ipallocations '
'WHERE port_id = ( '
'SELECT floating_port_id '
'FROM floatingips '
'WHERE id = "{fip_id}")))').format(
fip_id=fip_id)]
LOG.debug(sqls)
dst_mysql = self.mysql_connector
dst_mysql.batch_execute(sqls)
LOG.info("Done")
return new_floating_ids
def create_floatingip(self, fip):
try:
LOG.debug("Creating FIP on net '%s'",
fip['floatingip']['floating_network_id'])
created = self.neutron_client.create_floatingip(fip)
return created['floatingip']
except neutron_exc.NeutronClientException as e:
LOG.warning("Unable to create floating IP on destination: '%s'", e)
def update_floatingip(self, floatingip_id, port_id=None):
update_dict = {'floatingip': {'port_id': port_id}}
LOG.debug("Associating floating IP '%s' with port '%s'",
floatingip_id, port_id)
return self.neutron_client.update_floatingip(floatingip_id,
update_dict)
@staticmethod
def get_res_by_hash(existing_resources, resource_hash):
for resource in existing_resources:
if resource['res_hash'] == resource_hash:
return resource
@staticmethod
def get_res_hash_by_id(resources, resource_id):
for resource in resources:
if resource['id'] == resource_id:
return resource['res_hash']
@staticmethod
def get_resource_hash(neutron_resource, *args):
net_res = copy.deepcopy(neutron_resource)
list_info = list()
for arg in args:
if not isinstance(net_res[arg], list):
list_info.append(net_res[arg])
else:
if arg == 'allocation_pools':
pools = net_res[arg]
net_res[arg] = [ip for pl in pools for ip in pl.values()]
for argitem in net_res[arg]:
if isinstance(argitem, basestring):
argitem = argitem.lower()
list_info.append(argitem)
hash_list = \
[info.lower() if isinstance(info, basestring) else info
for info in list_info]
hash_list.sort()
return hash(tuple(hash_list))
def get_new_extnet_id(self, src_net_id, src_nets, dst_nets):
"""
Get ID of similar external network form DST.
:param src_net_id: External network ID from SRC cloud,
:param src_nets: Networks list from SRC cloud,
:param dst_nets: Networks list from DST cloud,
:return unicode: External network ID from DST, that matches with the
similar network from SRC.
"""
if src_net_id in self.ext_net_map:
dst_net_id = self.ext_net_map[src_net_id]
else:
src_net = get_network_from_list_by_id(src_net_id, src_nets)
dst_net = self.get_dst_net_by_src_net(dst_nets, src_net)
if not dst_net:
return
dst_net_id = dst_net['id']
return dst_net_id
@staticmethod
def get_dst_net_by_src_net(existing_networks, src_net):
"""
Get the same Network object from DST cloud.
:param existing_networks: Existing networks list on DST cloud,
:param src_net: Network object from SRC,
:return dict: Network object from DST, that matches with the same
network from SRC.
"""
for net in existing_networks:
if (net['res_hash'] == src_net['res_hash'] and
net['subnets_hash'] == src_net['subnets_hash']):
return net
class Router(object):
"""
Represents router_info, extract external ip.
Router_info contain list of ips only in different order. Impossible to
define external router ip.
"""
def __init__(self, router_info, subnets):
self.id = router_info['id']
self.ext_net_id = router_info.get('ext_net_id', None)
self.int_cidr = []
self.tenant_name = router_info['tenant_name']
if self.ext_net_id:
subnet_ids = router_info['subnet_ids']
for subnet_id in subnet_ids:
subnet = subnets[subnet_id]
if subnet['network_id'] == self.ext_net_id:
self.ext_cidr = subnet['cidr']
self.ext_subnet_id = subnet_id
else:
self.int_cidr.append(subnet['cidr'])
ext_network = ipaddr.IPNetwork(self.ext_cidr)
for ip in router_info['ips']:
if ext_network.Contains(ipaddr.IPAddress(ip)):
self.ext_ip = ip
break
def get_network_from_list_by_id(network_id, networks_list):
"""Get Neutron network by id from provided networks list.
:param network_id: Neutron network ID
:param networks_list: List of Neutron networks, where target network should
be searched
"""
for net in networks_list:
if net['id'] == network_id:
return net
LOG.warning("Cannot obtain network with id='%s' from provided networks "
"list", network_id)
def get_subnet_from_list_by_id(subnet_id, subnets_list):
"""Get Neutron subnet by id from provided subnets list.
:param subnet_id: Neutron subnet ID
:param subnets_list: List of Neutron subnets, where target subnet should
be searched
"""
<|fim▁hole|>
LOG.warning("Cannot obtain subnet with id='%s' from provided subnets "
"list", subnet_id)
def get_ports_by_device_id_from_list(device_id, ports_list):
"""Get Neutron ports by device ID from provided ports list.
:param device_id: Port device ID
:param ports_list: List of Neutron ports, where target ports should be
searched
:result: List of ports, which are belong to specified device ID
"""
ports = []
for port in ports_list:
if port['device_id'] == device_id:
ports.append(port)
if not ports:
LOG.debug("There are no ports with device_id='%s' in provided list",
device_id)
return ports
def get_network_from_list(ip, tenant_id, networks_list, subnets_list):
"""Get Neutron network by parameters from provided list.
:param ip: IP address of VM from this network
:param tenant_id: Tenant Id of VM in this network
:param networks_list: List of Neutron networks, where target network
should be searched
:param subnets_list: List of Neutron subnets, where target network
should be searched
"""
instance_ip = ipaddr.IPAddress(ip)
for subnet in subnets_list:
network_id = subnet['network_id']
net = get_network_from_list_by_id(network_id, networks_list)
if subnet['tenant_id'] == tenant_id or net['shared']:
if ipaddr.IPNetwork(subnet['cidr']).Contains(instance_ip):
return get_network_from_list_by_id(network_id,
networks_list)
def get_segmentation_ids_from_net_list(networks):
"""Get busy segmentation IDs from provided networks list.
We need to handle duplicates in segmentation ids.
Neutron has different validation rules for different network types.
For 'gre' and 'vxlan' network types there is no strong requirement
for 'physical_network' attribute, if we want to have
'segmentation_id', because traffic is encapsulated in L3 packets.
For 'vlan' network type there is a strong requirement for
'physical_network' attribute, if we want to have 'segmentation_id'.
:result: Dictionary with busy segmentation IDs.
Hash is used with structure {"gre": [1, 2, ...],
"vlan": [1, 2, ...]}
"""
used_seg_ids = {}
for net in networks:
network_has_segmentation_id = (
net["provider:physical_network"] or
(net["provider:network_type"] in ['gre', 'vxlan']))
if network_has_segmentation_id:
if net["provider:network_type"] not in used_seg_ids:
used_seg_ids[net['provider:network_type']] = []
if net["provider:segmentation_id"] is not None:
used_seg_ids[net["provider:network_type"]].append(
net["provider:segmentation_id"])
return used_seg_ids
def generate_new_segmentation_id(src_seg_ids, dst_seg_ids, network_type):
"""Generate new segmentation ID based on provided info with busy ones.
Search for the lowest free segmentation ID. IDs '0' and '1' are reserved
in most of network types, so start searching from '2'.
For 'vlan' network type ID '4095' is the last one in available range and
besides also reserved. Raise AbortMigrationError if reach this ID.
:param src_seg_ids: Dictionary with busy segmentation IDs on SRC
:param dst_seg_ids: Dictionary with busy segmentation IDs on DST
:param network_type: Network type ('vlan', 'vxlan' or 'gre')
:result int: New generated free segmentation ID
"""
src_seg_ids = set(src_seg_ids.get(network_type, []))
dst_seg_ids = set(dst_seg_ids.get(network_type, []))
busy_seg_ids = src_seg_ids | dst_seg_ids
free_seg_id = None
counter = 2
while free_seg_id is None:
if counter not in busy_seg_ids:
free_seg_id = counter
counter += 1
if free_seg_id >= 4095 and network_type == 'vlan':
raise exception.AbortMigrationError("Segmentation IDs limit for 'vlan'"
" network type has been exceeded")
return free_seg_id<|fim▁end|>
|
for subnet in subnets_list:
if subnet['id'] == subnet_id:
return subnet
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""
URLs used in the unit tests for django-registration.
You should not attempt to use these URLs in any sort of real or
development environment; instead, use
``registration/backends/default/urls.py``. This URLconf includes those
URLs, and also adds several additional URLs which serve no purpose
other than to test that optional keyword arguments are properly
handled.
"""
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from ..views import activate
from ..views import register
urlpatterns = patterns('',
# Test the 'activate' view with custom template
# name.
url(r'^activate-with-template-name/(?P<activation_key>\w+)/$',
activate,
{'template_name': 'registration/test_template_name.html',
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_test_activate_template_name'),
# Test the 'activate' view with
# extra_context_argument.
url(r'^activate-extra-context/(?P<activation_key>\w+)/$',
activate,
{'extra_context': {'foo': 'bar', 'callable': lambda: 'called'},
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_test_activate_extra_context'),
# Test the 'activate' view with success_url argument.
url(r'^activate-with-success-url/(?P<activation_key>\w+)/$',
activate,
{'success_url': 'registration_test_custom_success_url',
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_test_activate_success_url'),
# Test the 'register' view with custom template
# name.
url(r'^register-with-template-name/$',
register,
{'template_name': 'registration/test_template_name.html',
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_test_register_template_name'),
# Test the'register' view with extra_context
# argument.
url(r'^register-extra-context/$',
register,
{'extra_context': {'foo': 'bar', 'callable': lambda: 'called'},
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_test_register_extra_context'),
# Test the 'register' view with custom URL for
# closed registration.
url(r'^register-with-disallowed-url/$',
register,<|fim▁hole|> name='registration_test_register_disallowed_url'),
# Set up a pattern which will correspond to the
# custom 'disallowed_url' above.
url(r'^custom-disallowed/$',
direct_to_template,
{'template': 'registration/registration_closed.html'},
name='registration_test_custom_disallowed'),
# Test the 'register' view with custom redirect
# on successful registration.
url(r'^register-with-success_url/$',
register,
{'success_url': 'registration_test_custom_success_url',
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_test_register_success_url'
),
# Pattern for custom redirect set above.
url(r'^custom-success/$',
direct_to_template,
{'template': 'registration/test_template_name.html'},
name='registration_test_custom_success_url'),
(r'', include('registration.backends.default.urls')),
)<|fim▁end|>
|
{'disallowed_url': 'registration_test_custom_disallowed',
'backend': 'registration.backends.default.DefaultBackend'},
|
<|file_name|>StringEdgePrinter.java<|end_file_name|><|fim▁begin|>package io.github.jhg543.mellex.operation;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import io.github.jhg543.mellex.ASTHelper.*;
import io.github.jhg543.mellex.antlrparser.DefaultSQLBaseListener;
import io.github.jhg543.mellex.antlrparser.DefaultSQLLexer;
import io.github.jhg543.mellex.antlrparser.DefaultSQLParser;
import io.github.jhg543.mellex.antlrparser.DefaultSQLParser.Sql_stmtContext;
import io.github.jhg543.mellex.inputsource.BasicTableDefinitionProvider;
import io.github.jhg543.mellex.inputsource.TableDefinitionProvider;
import io.github.jhg543.mellex.listeners.ColumnDataFlowListener;
import io.github.jhg543.mellex.util.Misc;
import io.github.jhg543.nyallas.graphmodel.*;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Predicate;
public class StringEdgePrinter {
private static final Logger log = LoggerFactory.getLogger(StringEdgePrinter.class);
private static int ERR_NOSQL = 1;
private static int ERR_PARSE = 2;
private static int ERR_SEMANTIC = 3;
private static int ERR_OK = 0;
private static int printSingleFile(Path srcdir, Path dstdir, int scriptNumber, TableDefinitionProvider tp) {
// generate a hash to mark vt table names
String srcHash = Integer.toHexString(srcdir.hashCode());
// create destination dir
try {
Files.createDirectories(dstdir);
} catch (IOException e) {
throw new RuntimeException(e);
}
try (PrintWriter err = new PrintWriter(dstdir.resolve("log").toAbsolutePath().toString(), "utf-8")) {
// trim perl code
String sql = Misc.trimPerlScript(srcdir, StandardCharsets.UTF_8);
if (sql == null) {
err.println("Can't extract sql from file " + srcdir.toString());
return ERR_NOSQL;
}
// log actual sql statement ( for corrent line number ..)
try (PrintWriter writer = new PrintWriter(dstdir.resolve("sql").toAbsolutePath().toString(), "utf-8")) {
writer.append(sql);
}
// antlr parse
AtomicInteger errorCount = new AtomicInteger();
ANTLRInputStream in = new ANTLRInputStream(sql);
DefaultSQLLexer lexer = new DefaultSQLLexer(in);
CommonTokenStream tokens = new CommonTokenStream(lexer);
DefaultSQLParser parser = new DefaultSQLParser(tokens);
parser.removeErrorListeners();
parser.addErrorListener(new BaseErrorListener() {
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine,
String msg, RecognitionException e) {
err.println("line" + line + ":" + charPositionInLine + "at" + offendingSymbol + ":" + msg);
errorCount.incrementAndGet();
}
});
err.println("-------Parse start---------");
ParseTree tree = null;
try {
tree = parser.parse();
if (errorCount.get() > 0) {
return ERR_PARSE;
}
} catch (Exception e) {
e.printStackTrace(err);
return ERR_PARSE;
}
err.println("-------Parse OK, Semantic Analysis start --------");
ParseTreeWalker w = new ParseTreeWalker();
try {
ColumnDataFlowListener s = new ColumnDataFlowListener(tp, tokens);
w.walk(s, tree);
} catch (Exception e) {
e.printStackTrace(err);
return ERR_SEMANTIC;
}
err.println("-------Semantic OK, Writing result --------");
// Remove volatile tables
VolatileTableRemover graph = new VolatileTableRemover();
// DAG dag = new DAG();
// ZeroBasedStringIdGenerator ids = new
// ZeroBasedStringIdGenerator();
Map<String, Vertex<String, Integer>> vmap = new HashMap<>();
// Output result and initialize volatile tables removal process
try (PrintWriter out = new PrintWriter(dstdir.resolve("out").toAbsolutePath().toString(), "utf-8")) {
out.println("ScriptID StmtID StmtType DestCol SrcCol ConnectionType");
String template = "%d %d %s %s.%s %s.%s %d\n";
DefaultSQLBaseListener pr = new DefaultSQLBaseListener() {
int stmtNumber = 0;
@Override
public void exitSql_stmt(Sql_stmtContext ctx) {
super.exitSql_stmt(ctx);
String stmtType = null;
SubQuery q = null;
if (ctx.insert_stmt() != null) {
stmtType = "I";
q = ctx.insert_stmt().stmt;
}
if (ctx.create_table_stmt() != null) {
if (ctx.create_table_stmt().insert != null) {
stmtType = "C";
q = ctx.create_table_stmt().insert;
}
}
if (ctx.create_view_stmt() != null) {
stmtType = "V";
q = ctx.create_view_stmt().insert;
}
if (ctx.update_stmt() != null) {
stmtType = "U";
q = ctx.update_stmt().q;
}
if (q != null) {
// what's vt's scope?
Set<String> vts = tp.getVolatileTables().keySet();
String dstTable = q.dbobj.toDotString();
boolean isDstVT = vts.contains(dstTable);
if (isDstVT) {
dstTable = "VT_" + srcHash + "_" + dstTable;
}
for (ResultColumn c : q.columns) {
for (InfSource source : c.inf.getSources()) {
ObjectName srcname = source.getSourceObject();
String srcTable = srcname.toDotStringExceptLast();
boolean isSrcVT = vts.contains(srcTable);
if (isSrcVT) {
srcTable = "VT_" + srcHash + "_" + srcTable;
}
out.append(String.format(template, scriptNumber, stmtNumber, stmtType, dstTable, c.name,
srcTable, srcname.toDotStringLast(), source.getConnectionType().getMarker()));
// collapse volatile table
String dst = dstTable + "." + c.name;
String src = srcTable + "." + srcname.toDotStringLast();
// Integer dstnum = ids.queryNumber(dst);
// Integer srcnum = ids.queryNumber(src);
Vertex<String, Integer> srcv;
srcv = vmap.get(src);
if (srcv == null) {
srcv = graph.addVertex(BasicVertex::new);
vmap.put(src, srcv);
srcv.setVertexData(src);
if (isSrcVT) {
srcv.setMarker(0);
}
}
Vertex<String, Integer> dstv;
dstv = vmap.get(dst);
if (dstv == null) {
dstv = graph.addVertex(BasicVertex::new);
vmap.put(dst, dstv);
dstv.setVertexData(dst);
if (isDstVT) {
dstv.setMarker(0);
}
}
Edge<String, Integer> edge = new BasicEdge<String, Integer>(srcv, dstv);
edge.setEdgeData(source.getConnectionType().getMarker());
graph.addEdge(edge);
}
}
} else {
// log.warn("query null for sm " + stmtNumber);
}
stmtNumber++;
}
};
w.walk(pr, tree);
}
// Int2ObjectMap<Node> collapsed = dag.collapse(scriptNumber);
graph.remove();
// write result (with volatile tables removed)
try (PrintWriter out = new PrintWriter(dstdir.resolve("novt").toAbsolutePath().toString(), "utf-8")) {
out.println("scriptid,dstsch,dsttbl,dstcol,srcsch,srctbl,srccol,contype");
String template = "%d,%s,%s,%s,%s,%s,%s,%d\n";
for (Vertex<String, Integer> v : graph.getVertexes()) {
for (Edge<String, Integer> e : v.getOutgoingEdges()) {
String dst = e.getTarget().getVertexData();
String src = e.getSource().getVertexData();
List<String> t1 = Splitter.on('.').splitToList(dst);
if (t1.size() == 2) {
t1 = new ArrayList<String>(t1);
t1.add(0, "3X_NOSCHEMA_" + scriptNumber);
}
List<String> t2 = Splitter.on('.').splitToList(src);
if (t2.size() == 2) {
t2 = new ArrayList<String>(t1);
t2.add(0, "3X_NOSCHEMA_" + scriptNumber);
}
out.append(String.format(template, scriptNumber, t1.get(0), t1.get(1), t1.get(2), t2.get(0), t2.get(1),
t2.get(2), e.getEdgeData()));
}
}
}
tp.clearVolatileTables();
err.println("-------Success --------");
return 0;
} catch (FileNotFoundException | UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public static int[] printStringEdge(Path srcdir, Path dstdir, Predicate<Path> filefilter, int scriptNumberStart,
boolean caseSensitive) {
// ensure directories exist
Preconditions.checkState(Files.isDirectory(srcdir));
try {
Files.createDirectories(dstdir);
} catch (IOException e1) {
throw new RuntimeException(e1);
}
// set up variables
GlobalSettings.setCaseSensitive(caseSensitive);
AtomicInteger scriptNumber = new AtomicInteger(scriptNumberStart);
TableDefinitionProvider tp = new BasicTableDefinitionProvider(Misc::nameSym);
int[] stats = new int[10];
// open global output files
try (PrintWriter out = new PrintWriter(dstdir.resolve("stats").toAbsolutePath().toString(), "utf-8");
PrintWriter cols = new PrintWriter(dstdir.resolve("cols").toAbsolutePath().toString(), "utf-8");
PrintWriter numbers = new PrintWriter(dstdir.resolve("number").toAbsolutePath().toString(), "utf-8")) {
// for each file
Files.walk(srcdir).filter(filefilter).sorted().forEach(path -> {
int sn = scriptNumber.getAndIncrement();
numbers.println("" + sn + " " + path.toString());
String srcHash = Integer.toHexString(path.hashCode());
Path workdir = dstdir.resolve(path.getFileName()).resolve(srcHash);
// deal with single files.
int retcode = printSingleFile(path, workdir, sn, tp);
if (retcode > 0) {
out.println(String.format("%s %d %d", path.toString(), retcode, sn));
}
stats[retcode]++;
});
out.println("OK=" + stats[ERR_OK]);
out.println("NOSQL=" + stats[ERR_NOSQL]);
out.println("PARSE=" + stats[ERR_PARSE]);
out.println("SEMANTIC=" + stats[ERR_SEMANTIC]);
tp.getPermanentTables().forEach((name, stmt) -> {
stmt.columns.forEach(colname -> cols.println(name + "." + colname.name));
});
return stats;<|fim▁hole|> } catch (IOException e) {
throw new RuntimeException(e);
}
}
public static void main(String[] args) throws Exception {
Predicate<Path> filefilter = x -> Files.isRegularFile(x)
&& (x.getFileName().toString().toLowerCase().endsWith(".sql") || x.getFileName().toString().toLowerCase()
.endsWith(".pl"))
&& x.toString().toUpperCase().endsWith("BIN\\" + x.getFileName().toString().toUpperCase());
// printStringEdge(Paths.get("d:/dataflow/work1/script/mafixed"),
// Paths.get("d:/dataflow/work2/mares"), filefilter, 0, false);
printStringEdge(Paths.get("d:/dataflow/work1/debug"), Paths.get("d:/dataflow/work2/debugres"), filefilter, 0, false);
// printStringEdge(Paths.get("d:/dataflow/work1/f1/sor"),
// Paths.get("d:/dataflow/work2/result2/sor"), filefilter, 0, false);
}
}<|fim▁end|>
| |
<|file_name|>webpack.js<|end_file_name|><|fim▁begin|>const webpack = require('webpack');
const path = require('path');
const {debugMode, pkg} = require('./env');
const cwd = process.cwd();
const srcPath = path.join(cwd, pkg.src.js);
const distPath = path.join(cwd, pkg.dist.js);
// Babel loaders
const babelLoaders = (function () {
const presets = ['react', 'es2015', 'stage-0'];
const plugins = ['react-html-attrs', 'transform-decorators-legacy', 'transform-class-properties'];
const presetsQuery = presets.map(p => `presets[]=${p}`).join(',');
const pluginsQuery = plugins.map(p => `plugins[]=${p}`).join(',');
const query = [presetsQuery, pluginsQuery].join(',');
return [`babel?${query}`];
}());
if (debugMode) {
// Only hot load on debug mode
babelLoaders.push('webpack-module-hot-accept');
babelLoaders.unshift('react-hot');
}
const plugins = (function () {
const dedup = new webpack.optimize.DedupePlugin();
const occurenceOrder = new webpack.optimize.OccurenceOrderPlugin();
const noErrors = new webpack.NoErrorsPlugin();
const hotModuleReplacement = new webpack.HotModuleReplacementPlugin();
const uglifyJS = new webpack.optimize.UglifyJsPlugin({
compress: { warnings: false },
output: { comments: false },
mangle: false,
sourcemap: false,
});
const define = new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify(debugMode ? 'debug' : 'production'),
},
'__PRODUCTION_MODE': JSON.stringify(!debugMode),
'__DEBUG_MODE': JSON.stringify(debugMode),
'__APP_NAME': JSON.stringify(pkg.name),
'__APP_VERSION': JSON.stringify(pkg.version),
});
if (debugMode) {
return [occurenceOrder, hotModuleReplacement, noErrors];
}
return [dedup, occurenceOrder, uglifyJS, define];
}());
const webpackSettings = {
debug: debugMode,
plugins,
entry: {
app: [
path.join(srcPath, './app.jsx'),
],
},
output: {
path: distPath,
publicPath: '/js/',
filename: '[name].js',
},
resolve: {
extensions: ['', '.js', '.jsx'],
},
<|fim▁hole|> exclude: /(?:node_modules|bower_components)/,
include: [srcPath],
loaders: babelLoaders,
}],
},
};
// debug mode settings
if (debugMode) {
webpackSettings.devtool = 'inline-sourcemap';
for (const key in webpackSettings.entry) {
if (webpackSettings.entry.hasOwnProperty(key)) {
webpackSettings.entry[key].unshift('webpack-hot-middleware/client');
webpackSettings.entry[key].unshift('webpack/hot/dev-server');
}
}
}
module.exports = webpackSettings;<|fim▁end|>
|
module: {
loaders: [{
test: /\.jsx$/,
|
<|file_name|>day_7.rs<|end_file_name|><|fim▁begin|>pub fn compress(src: &str) -> String {
if src.is_empty() {
src.to_owned()
} else {
let mut compressed = String::new();
let mut chars = src.chars().peekable();
while let Some(c) = chars.peek().cloned() {
let mut counter = 0;
while let Some(n) = chars.peek().cloned() {
if c == n {
counter += 1;
chars.next();
} else {
break;
}
}
compressed.push_str(counter.to_string().as_str());
compressed.push(c);
}
compressed
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]<|fim▁hole|> #[test]
fn compress_unique_chars_string() {
assert_eq!(compress("abc"), "1a1b1c");
}
#[test]
fn compress_doubled_chars_string() {
assert_eq!(compress("aabbcc"), "2a2b2c");
}
}<|fim▁end|>
|
fn compress_empty_string() {
assert_eq!(compress(""), "");
}
|
<|file_name|>_knn.py<|end_file_name|><|fim▁begin|># Authors: Ashim Bhattarai <[email protected]>
# Thomas J Fan <[email protected]>
# License: BSD 3 clause
import numpy as np
from ._base import _BaseImputer
from ..utils.validation import FLOAT_DTYPES
from ..metrics import pairwise_distances_chunked
from ..metrics.pairwise import _NAN_METRICS
from ..neighbors._base import _get_weights
from ..neighbors._base import _check_weights
from ..utils import is_scalar_nan
from ..utils._mask import _get_mask
from ..utils.validation import check_is_fitted
class KNNImputer(_BaseImputer):
"""Imputation for completing missing values using k-Nearest Neighbors.
Each sample's missing values are imputed using the mean value from
`n_neighbors` nearest neighbors found in the training set. Two samples are
close if the features that neither is missing are close.
Read more in the :ref:`User Guide <knnimpute>`.
.. versionadded:: 0.22
Parameters
----------
missing_values : int, float, str, np.nan or None, default=np.nan
The placeholder for the missing values. All occurrences of
`missing_values` will be imputed. For pandas' dataframes with
nullable integer dtypes with missing values, `missing_values`
should be set to np.nan, since `pd.NA` will be converted to np.nan.
n_neighbors : int, default=5
Number of neighboring samples to use for imputation.
weights : {'uniform', 'distance'} or callable, default='uniform'
Weight function used in prediction. Possible values:
- 'uniform' : uniform weights. All points in each neighborhood are
weighted equally.
- 'distance' : weight points by the inverse of their distance.
in this case, closer neighbors of a query point will have a
greater influence than neighbors which are further away.
- callable : a user-defined function which accepts an
array of distances, and returns an array of the same shape
containing the weights.
metric : {'nan_euclidean'} or callable, default='nan_euclidean'
Distance metric for searching neighbors. Possible values:
- 'nan_euclidean'
- callable : a user-defined function which conforms to the definition
of ``_pairwise_callable(X, Y, metric, **kwds)``. The function
accepts two arrays, X and Y, and a `missing_values` keyword in
`kwds` and returns a scalar distance value.
copy : bool, default=True
If True, a copy of X will be created. If False, imputation will
be done in-place whenever possible.
add_indicator : bool, default=False
If True, a :class:`MissingIndicator` transform will stack onto the
output of the imputer's transform. This allows a predictive estimator
to account for missingness despite imputation. If a feature has no
missing values at fit/train time, the feature won't appear on the
missing indicator even if there are missing values at transform/test
time.
Attributes
----------
indicator_ : :class:`~sklearn.impute.MissingIndicator`
Indicator used to add binary indicators for missing values.
``None`` if add_indicator is False.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
References
----------
* Olga Troyanskaya, Michael Cantor, Gavin Sherlock, Pat Brown, Trevor
Hastie, Robert Tibshirani, David Botstein and Russ B. Altman, Missing
value estimation methods for DNA microarrays, BIOINFORMATICS Vol. 17
no. 6, 2001 Pages 520-525.
Examples
--------
>>> import numpy as np
>>> from sklearn.impute import KNNImputer
>>> X = [[1, 2, np.nan], [3, 4, 3], [np.nan, 6, 5], [8, 8, 7]]
>>> imputer = KNNImputer(n_neighbors=2)
>>> imputer.fit_transform(X)
array([[1. , 2. , 4. ],
[3. , 4. , 3. ],
[5.5, 6. , 5. ],
[8. , 8. , 7. ]])
"""
def __init__(self, *, missing_values=np.nan, n_neighbors=5,
weights="uniform", metric="nan_euclidean", copy=True,
add_indicator=False):
super().__init__(
missing_values=missing_values,
add_indicator=add_indicator
)
self.n_neighbors = n_neighbors
self.weights = weights
self.metric = metric
self.copy = copy
def _calc_impute(self, dist_pot_donors, n_neighbors,
fit_X_col, mask_fit_X_col):
"""Helper function to impute a single column.
Parameters
----------
dist_pot_donors : ndarray of shape (n_receivers, n_potential_donors)
Distance matrix between the receivers and potential donors from
training set. There must be at least one non-nan distance between
a receiver and a potential donor.
n_neighbors : int
Number of neighbors to consider.
fit_X_col : ndarray of shape (n_potential_donors,)
Column of potential donors from training set.
mask_fit_X_col : ndarray of shape (n_potential_donors,)
Missing mask for fit_X_col.
Returns
-------
imputed_values: ndarray of shape (n_receivers,)
Imputed values for receiver.
"""
# Get donors
donors_idx = np.argpartition(dist_pot_donors, n_neighbors - 1,
axis=1)[:, :n_neighbors]
# Get weight matrix from from distance matrix
donors_dist = dist_pot_donors[
np.arange(donors_idx.shape[0])[:, None], donors_idx]
weight_matrix = _get_weights(donors_dist, self.weights)
# fill nans with zeros
if weight_matrix is not None:
weight_matrix[np.isnan(weight_matrix)] = 0.0
# Retrieve donor values and calculate kNN average
donors = fit_X_col.take(donors_idx)
donors_mask = mask_fit_X_col.take(donors_idx)
donors = np.ma.array(donors, mask=donors_mask)
return np.ma.average(donors, axis=1, weights=weight_matrix).data
def fit(self, X, y=None):
"""Fit the imputer on X.
Parameters
----------
X : array-like shape of (n_samples, n_features)
Input data, where `n_samples` is the number of samples and
`n_features` is the number of features.
Returns
-------
self : object
"""
# Check data integrity and calling arguments
if not is_scalar_nan(self.missing_values):
force_all_finite = True
else:
force_all_finite = "allow-nan"
if self.metric not in _NAN_METRICS and not callable(self.metric):
raise ValueError(
"The selected metric does not support NaN values")
if self.n_neighbors <= 0:
raise ValueError(
"Expected n_neighbors > 0. Got {}".format(self.n_neighbors))
X = self._validate_data(X, accept_sparse=False, dtype=FLOAT_DTYPES,
force_all_finite=force_all_finite,
copy=self.copy)
_check_weights(self.weights)
self._fit_X = X
self._mask_fit_X = _get_mask(self._fit_X, self.missing_values)
super()._fit_indicator(self._mask_fit_X)
return self
def transform(self, X):
"""Impute all missing values in X.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The input data to complete.
Returns
-------
X : array-like of shape (n_samples, n_output_features)
The imputed dataset. `n_output_features` is the number of features
that is not always missing during `fit`.
"""
check_is_fitted(self)
if not is_scalar_nan(self.missing_values):
force_all_finite = True
else:
force_all_finite = "allow-nan"
X = self._validate_data(X, accept_sparse=False, dtype=FLOAT_DTYPES,
force_all_finite=force_all_finite,
copy=self.copy, reset=False)
mask = _get_mask(X, self.missing_values)
mask_fit_X = self._mask_fit_X
valid_mask = ~np.all(mask_fit_X, axis=0)
X_indicator = super()._transform_indicator(mask)
# Removes columns where the training data is all nan
if not np.any(mask):
# No missing values in X
# Remove columns where the training data is all nan
return X[:, valid_mask]
row_missing_idx = np.flatnonzero(mask.any(axis=1))
non_missing_fix_X = np.logical_not(mask_fit_X)
# Maps from indices from X to indices in dist matrix
dist_idx_map = np.zeros(X.shape[0], dtype=int)
dist_idx_map[row_missing_idx] = np.arange(row_missing_idx.shape[0])
def process_chunk(dist_chunk, start):
row_missing_chunk = row_missing_idx[start:start + len(dist_chunk)]
# Find and impute missing by column
for col in range(X.shape[1]):
if not valid_mask[col]:
# column was all missing during training
continue
col_mask = mask[row_missing_chunk, col]
if not np.any(col_mask):
# column has no missing values
continue
potential_donors_idx, = np.nonzero(non_missing_fix_X[:, col])
# receivers_idx are indices in X
receivers_idx = row_missing_chunk[np.flatnonzero(col_mask)]
# distances for samples that needed imputation for column
dist_subset = (dist_chunk[dist_idx_map[receivers_idx] - start]
[:, potential_donors_idx])
# receivers with all nan distances impute with mean
all_nan_dist_mask = np.isnan(dist_subset).all(axis=1)
all_nan_receivers_idx = receivers_idx[all_nan_dist_mask]
if all_nan_receivers_idx.size:
col_mean = np.ma.array(self._fit_X[:, col],
mask=mask_fit_X[:, col]).mean()
X[all_nan_receivers_idx, col] = col_mean
if len(all_nan_receivers_idx) == len(receivers_idx):
# all receivers imputed with mean
continue
# receivers with at least one defined distance
receivers_idx = receivers_idx[~all_nan_dist_mask]
dist_subset = (dist_chunk[dist_idx_map[receivers_idx]
- start]
[:, potential_donors_idx])
n_neighbors = min(self.n_neighbors, len(potential_donors_idx))
value = self._calc_impute(
dist_subset,
n_neighbors,
self._fit_X[potential_donors_idx, col],
mask_fit_X[potential_donors_idx, col])
X[receivers_idx, col] = value
# process in fixed-memory chunks
gen = pairwise_distances_chunked(
X[row_missing_idx, :],
self._fit_X,
metric=self.metric,<|fim▁hole|> missing_values=self.missing_values,
force_all_finite=force_all_finite,
reduce_func=process_chunk)
for chunk in gen:
# process_chunk modifies X in place. No return value.
pass
return super()._concatenate_indicator(X[:, valid_mask], X_indicator)<|fim▁end|>
| |
<|file_name|>4263e76758123044.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
a === b
|
<|file_name|>BaseUtils.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*-
'''
基本工具
Created on 2014年5月14日
@author: Exp
'''
''' 获取系统时间 '''
def getSysTime(format = "%Y-%m-%d %H:%M:%S"):
import time
return time.strftime(format)
# End Fun getSysTime()
''' 判断是否为本地运行环境,否则为SAE运行环境 '''
def isLocalEnvironment():
from os import environ
return not environ.get("APP_NAME", "")
# End Fun isLocalEnvironment()
''' 加密字符串 '''
def encrypt(plaintext):
import base64
return base64.encodestring(plaintext)
# End Fun encrypt()
''' 解密字符串 '''
def decrypt(ciphertext):
import base64
return base64.decodestring(ciphertext)
# End Fun decrypt()
''' 简单编码转换,把未知编码的orgStr转码为aimCharset,其中orgStr的源编码由系统自动判断 '''
def simpleTranscoding(orgStr, aimCharset):
import chardet
orgCharset = chardet.detect(orgStr)['encoding'] #自动判断编码
<|fim▁hole|> return transcoding(orgStr, orgCharset, aimCharset)
# End Fun simpleTranscoding()
''' 编码转换,把源编码为orgCharset的orgStr,转码为aimCharset '''
def transcoding(orgStr, orgCharset, aimCharset):
unicodeStr = orgStr.decode(orgCharset)
return unicodeStr.encode(aimCharset)
# End Fun transcoding()<|fim▁end|>
| |
<|file_name|>flyby.js<|end_file_name|><|fim▁begin|>(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
module.exports = { "default": require("core-js/library/fn/get-iterator"), __esModule: true };
},{"core-js/library/fn/get-iterator":6}],2:[function(require,module,exports){
module.exports = { "default": require("core-js/library/fn/object/define-property"), __esModule: true };
},{"core-js/library/fn/object/define-property":7}],3:[function(require,module,exports){
module.exports = { "default": require("core-js/library/fn/object/keys"), __esModule: true };
},{"core-js/library/fn/object/keys":8}],4:[function(require,module,exports){
"use strict";
exports.__esModule = true;
exports.default = function (instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
};
},{}],5:[function(require,module,exports){
"use strict";
exports.__esModule = true;
var _defineProperty = require("../core-js/object/define-property");
var _defineProperty2 = _interopRequireDefault(_defineProperty);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = function () {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
(0, _defineProperty2.default)(target, descriptor.key, descriptor);
}
}
return function (Constructor, protoProps, staticProps) {
if (protoProps) defineProperties(Constructor.prototype, protoProps);
if (staticProps) defineProperties(Constructor, staticProps);
return Constructor;
};
}();
},{"../core-js/object/define-property":2}],6:[function(require,module,exports){
require('../modules/web.dom.iterable');
require('../modules/es6.string.iterator');
module.exports = require('../modules/core.get-iterator');
},{"../modules/core.get-iterator":57,"../modules/es6.string.iterator":61,"../modules/web.dom.iterable":62}],7:[function(require,module,exports){
require('../../modules/es6.object.define-property');
var $Object = require('../../modules/_core').Object;
module.exports = function defineProperty(it, key, desc){
return $Object.defineProperty(it, key, desc);
};
},{"../../modules/_core":15,"../../modules/es6.object.define-property":59}],8:[function(require,module,exports){
require('../../modules/es6.object.keys');
module.exports = require('../../modules/_core').Object.keys;
},{"../../modules/_core":15,"../../modules/es6.object.keys":60}],9:[function(require,module,exports){
module.exports = function(it){
if(typeof it != 'function')throw TypeError(it + ' is not a function!');
return it;
};
},{}],10:[function(require,module,exports){
module.exports = function(){ /* empty */ };
},{}],11:[function(require,module,exports){
var isObject = require('./_is-object');
module.exports = function(it){
if(!isObject(it))throw TypeError(it + ' is not an object!');
return it;
};
},{"./_is-object":29}],12:[function(require,module,exports){
// false -> Array#indexOf
// true -> Array#includes
var toIObject = require('./_to-iobject')
, toLength = require('./_to-length')
, toIndex = require('./_to-index');
module.exports = function(IS_INCLUDES){
return function($this, el, fromIndex){
var O = toIObject($this)
, length = toLength(O.length)
, index = toIndex(fromIndex, length)
, value;
// Array#includes uses SameValueZero equality algorithm
if(IS_INCLUDES && el != el)while(length > index){
value = O[index++];
if(value != value)return true;
// Array#toIndex ignores holes, Array#includes - not
} else for(;length > index; index++)if(IS_INCLUDES || index in O){
if(O[index] === el)return IS_INCLUDES || index || 0;
} return !IS_INCLUDES && -1;
};
};
},{"./_to-index":48,"./_to-iobject":50,"./_to-length":51}],13:[function(require,module,exports){
// getting tag from 19.1.3.6 Object.prototype.toString()
var cof = require('./_cof')
, TAG = require('./_wks')('toStringTag')
// ES3 wrong here
, ARG = cof(function(){ return arguments; }()) == 'Arguments';
// fallback for IE11 Script Access Denied error
var tryGet = function(it, key){
try {
return it[key];
} catch(e){ /* empty */ }
};
module.exports = function(it){
var O, T, B;
return it === undefined ? 'Undefined' : it === null ? 'Null'
// @@toStringTag case
: typeof (T = tryGet(O = Object(it), TAG)) == 'string' ? T
// builtinTag case
: ARG ? cof(O)
// ES3 arguments fallback
: (B = cof(O)) == 'Object' && typeof O.callee == 'function' ? 'Arguments' : B;
};
},{"./_cof":14,"./_wks":55}],14:[function(require,module,exports){
var toString = {}.toString;
module.exports = function(it){
return toString.call(it).slice(8, -1);
};
},{}],15:[function(require,module,exports){
var core = module.exports = {version: '2.4.0'};
if(typeof __e == 'number')__e = core; // eslint-disable-line no-undef
},{}],16:[function(require,module,exports){
// optional / simple context binding
var aFunction = require('./_a-function');
module.exports = function(fn, that, length){
aFunction(fn);
if(that === undefined)return fn;
switch(length){
case 1: return function(a){
return fn.call(that, a);
};
case 2: return function(a, b){
return fn.call(that, a, b);
};
case 3: return function(a, b, c){
return fn.call(that, a, b, c);
};
}
return function(/* ...args */){
return fn.apply(that, arguments);
};
};
},{"./_a-function":9}],17:[function(require,module,exports){
// 7.2.1 RequireObjectCoercible(argument)
module.exports = function(it){
if(it == undefined)throw TypeError("Can't call method on " + it);
return it;
};
},{}],18:[function(require,module,exports){
// Thank's IE8 for his funny defineProperty
module.exports = !require('./_fails')(function(){
return Object.defineProperty({}, 'a', {get: function(){ return 7; }}).a != 7;
});
},{"./_fails":22}],19:[function(require,module,exports){
var isObject = require('./_is-object')
, document = require('./_global').document
// in old IE typeof document.createElement is 'object'
, is = isObject(document) && isObject(document.createElement);
module.exports = function(it){
return is ? document.createElement(it) : {};
};
},{"./_global":23,"./_is-object":29}],20:[function(require,module,exports){
// IE 8- don't enum bug keys
module.exports = (
'constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf'
).split(',');
},{}],21:[function(require,module,exports){
var global = require('./_global')
, core = require('./_core')
, ctx = require('./_ctx')
, hide = require('./_hide')
, PROTOTYPE = 'prototype';
var $export = function(type, name, source){
var IS_FORCED = type & $export.F
, IS_GLOBAL = type & $export.G
, IS_STATIC = type & $export.S
, IS_PROTO = type & $export.P
, IS_BIND = type & $export.B
, IS_WRAP = type & $export.W
, exports = IS_GLOBAL ? core : core[name] || (core[name] = {})
, expProto = exports[PROTOTYPE]
, target = IS_GLOBAL ? global : IS_STATIC ? global[name] : (global[name] || {})[PROTOTYPE]
, key, own, out;
if(IS_GLOBAL)source = name;
for(key in source){
// contains in native
own = !IS_FORCED && target && target[key] !== undefined;
if(own && key in exports)continue;
// export native or passed
out = own ? target[key] : source[key];
// prevent global pollution for namespaces
exports[key] = IS_GLOBAL && typeof target[key] != 'function' ? source[key]
// bind timers to global for call from export context
: IS_BIND && own ? ctx(out, global)
// wrap global constructors for prevent change them in library
: IS_WRAP && target[key] == out ? (function(C){
var F = function(a, b, c){
if(this instanceof C){
switch(arguments.length){
case 0: return new C;
case 1: return new C(a);
case 2: return new C(a, b);
} return new C(a, b, c);
} return C.apply(this, arguments);
};
F[PROTOTYPE] = C[PROTOTYPE];
return F;
// make static versions for prototype methods
})(out) : IS_PROTO && typeof out == 'function' ? ctx(Function.call, out) : out;
// export proto methods to core.%CONSTRUCTOR%.methods.%NAME%
if(IS_PROTO){
(exports.virtual || (exports.virtual = {}))[key] = out;
// export proto methods to core.%CONSTRUCTOR%.prototype.%NAME%
if(type & $export.R && expProto && !expProto[key])hide(expProto, key, out);
}
}
};
// type bitmap
$export.F = 1; // forced
$export.G = 2; // global
$export.S = 4; // static
$export.P = 8; // proto
$export.B = 16; // bind
$export.W = 32; // wrap
$export.U = 64; // safe
$export.R = 128; // real proto method for `library`
module.exports = $export;
},{"./_core":15,"./_ctx":16,"./_global":23,"./_hide":25}],22:[function(require,module,exports){
module.exports = function(exec){
try {
return !!exec();
} catch(e){
return true;
}
};
},{}],23:[function(require,module,exports){
// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
var global = module.exports = typeof window != 'undefined' && window.Math == Math
? window : typeof self != 'undefined' && self.Math == Math ? self : Function('return this')();
if(typeof __g == 'number')__g = global; // eslint-disable-line no-undef
},{}],24:[function(require,module,exports){
var hasOwnProperty = {}.hasOwnProperty;
module.exports = function(it, key){
return hasOwnProperty.call(it, key);
};
},{}],25:[function(require,module,exports){
var dP = require('./_object-dp')
, createDesc = require('./_property-desc');
module.exports = require('./_descriptors') ? function(object, key, value){
return dP.f(object, key, createDesc(1, value));
} : function(object, key, value){
object[key] = value;
return object;
};
},{"./_descriptors":18,"./_object-dp":36,"./_property-desc":42}],26:[function(require,module,exports){
module.exports = require('./_global').document && document.documentElement;
},{"./_global":23}],27:[function(require,module,exports){
module.exports = !require('./_descriptors') && !require('./_fails')(function(){
return Object.defineProperty(require('./_dom-create')('div'), 'a', {get: function(){ return 7; }}).a != 7;
});
},{"./_descriptors":18,"./_dom-create":19,"./_fails":22}],28:[function(require,module,exports){
// fallback for non-array-like ES3 and non-enumerable old V8 strings
var cof = require('./_cof');
module.exports = Object('z').propertyIsEnumerable(0) ? Object : function(it){
return cof(it) == 'String' ? it.split('') : Object(it);
};
},{"./_cof":14}],29:[function(require,module,exports){
module.exports = function(it){
return typeof it === 'object' ? it !== null : typeof it === 'function';
};
},{}],30:[function(require,module,exports){
'use strict';
var create = require('./_object-create')
, descriptor = require('./_property-desc')
, setToStringTag = require('./_set-to-string-tag')
, IteratorPrototype = {};
// 25.1.2.1.1 %IteratorPrototype%[@@iterator]()
require('./_hide')(IteratorPrototype, require('./_wks')('iterator'), function(){ return this; });
module.exports = function(Constructor, NAME, next){
Constructor.prototype = create(IteratorPrototype, {next: descriptor(1, next)});
setToStringTag(Constructor, NAME + ' Iterator');
};
},{"./_hide":25,"./_object-create":35,"./_property-desc":42,"./_set-to-string-tag":44,"./_wks":55}],31:[function(require,module,exports){
'use strict';
var LIBRARY = require('./_library')
, $export = require('./_export')
, redefine = require('./_redefine')
, hide = require('./_hide')
, has = require('./_has')
, Iterators = require('./_iterators')
, $iterCreate = require('./_iter-create')
, setToStringTag = require('./_set-to-string-tag')
, getPrototypeOf = require('./_object-gpo')
, ITERATOR = require('./_wks')('iterator')
, BUGGY = !([].keys && 'next' in [].keys()) // Safari has buggy iterators w/o `next`
, FF_ITERATOR = '@@iterator'
, KEYS = 'keys'
, VALUES = 'values';
var returnThis = function(){ return this; };
module.exports = function(Base, NAME, Constructor, next, DEFAULT, IS_SET, FORCED){
$iterCreate(Constructor, NAME, next);
var getMethod = function(kind){
if(!BUGGY && kind in proto)return proto[kind];
switch(kind){
case KEYS: return function keys(){ return new Constructor(this, kind); };
case VALUES: return function values(){ return new Constructor(this, kind); };
} return function entries(){ return new Constructor(this, kind); };
};
var TAG = NAME + ' Iterator'
, DEF_VALUES = DEFAULT == VALUES
, VALUES_BUG = false
, proto = Base.prototype
, $native = proto[ITERATOR] || proto[FF_ITERATOR] || DEFAULT && proto[DEFAULT]
, $default = $native || getMethod(DEFAULT)
, $entries = DEFAULT ? !DEF_VALUES ? $default : getMethod('entries') : undefined
, $anyNative = NAME == 'Array' ? proto.entries || $native : $native
, methods, key, IteratorPrototype;
// Fix native
if($anyNative){
IteratorPrototype = getPrototypeOf($anyNative.call(new Base));
if(IteratorPrototype !== Object.prototype){
// Set @@toStringTag to native iterators
setToStringTag(IteratorPrototype, TAG, true);
// fix for some old engines
if(!LIBRARY && !has(IteratorPrototype, ITERATOR))hide(IteratorPrototype, ITERATOR, returnThis);
}
}
// fix Array#{values, @@iterator}.name in V8 / FF
if(DEF_VALUES && $native && $native.name !== VALUES){
VALUES_BUG = true;
$default = function values(){ return $native.call(this); };
}
// Define iterator
if((!LIBRARY || FORCED) && (BUGGY || VALUES_BUG || !proto[ITERATOR])){
hide(proto, ITERATOR, $default);
}
// Plug for library
Iterators[NAME] = $default;
Iterators[TAG] = returnThis;
if(DEFAULT){
methods = {
values: DEF_VALUES ? $default : getMethod(VALUES),
keys: IS_SET ? $default : getMethod(KEYS),
entries: $entries
};
if(FORCED)for(key in methods){
if(!(key in proto))redefine(proto, key, methods[key]);
} else $export($export.P + $export.F * (BUGGY || VALUES_BUG), NAME, methods);
}
return methods;
};
},{"./_export":21,"./_has":24,"./_hide":25,"./_iter-create":30,"./_iterators":33,"./_library":34,"./_object-gpo":38,"./_redefine":43,"./_set-to-string-tag":44,"./_wks":55}],32:[function(require,module,exports){
module.exports = function(done, value){
return {value: value, done: !!done};
};
},{}],33:[function(require,module,exports){
module.exports = {};
},{}],34:[function(require,module,exports){
module.exports = true;
},{}],35:[function(require,module,exports){
// 19.1.2.2 / 15.2.3.5 Object.create(O [, Properties])
var anObject = require('./_an-object')
, dPs = require('./_object-dps')
, enumBugKeys = require('./_enum-bug-keys')
, IE_PROTO = require('./_shared-key')('IE_PROTO')
, Empty = function(){ /* empty */ }
, PROTOTYPE = 'prototype';
// Create object with fake `null` prototype: use iframe Object with cleared prototype
var createDict = function(){
// Thrash, waste and sodomy: IE GC bug
var iframe = require('./_dom-create')('iframe')
, i = enumBugKeys.length
, lt = '<'
, gt = '>'
, iframeDocument;
iframe.style.display = 'none';
require('./_html').appendChild(iframe);
iframe.src = 'javascript:'; // eslint-disable-line no-script-url
// createDict = iframe.contentWindow.Object;
// html.removeChild(iframe);
iframeDocument = iframe.contentWindow.document;
iframeDocument.open();<|fim▁hole|> iframeDocument.write(lt + 'script' + gt + 'document.F=Object' + lt + '/script' + gt);
iframeDocument.close();
createDict = iframeDocument.F;
while(i--)delete createDict[PROTOTYPE][enumBugKeys[i]];
return createDict();
};
module.exports = Object.create || function create(O, Properties){
var result;
if(O !== null){
Empty[PROTOTYPE] = anObject(O);
result = new Empty;
Empty[PROTOTYPE] = null;
// add "__proto__" for Object.getPrototypeOf polyfill
result[IE_PROTO] = O;
} else result = createDict();
return Properties === undefined ? result : dPs(result, Properties);
};
},{"./_an-object":11,"./_dom-create":19,"./_enum-bug-keys":20,"./_html":26,"./_object-dps":37,"./_shared-key":45}],36:[function(require,module,exports){
var anObject = require('./_an-object')
, IE8_DOM_DEFINE = require('./_ie8-dom-define')
, toPrimitive = require('./_to-primitive')
, dP = Object.defineProperty;
exports.f = require('./_descriptors') ? Object.defineProperty : function defineProperty(O, P, Attributes){
anObject(O);
P = toPrimitive(P, true);
anObject(Attributes);
if(IE8_DOM_DEFINE)try {
return dP(O, P, Attributes);
} catch(e){ /* empty */ }
if('get' in Attributes || 'set' in Attributes)throw TypeError('Accessors not supported!');
if('value' in Attributes)O[P] = Attributes.value;
return O;
};
},{"./_an-object":11,"./_descriptors":18,"./_ie8-dom-define":27,"./_to-primitive":53}],37:[function(require,module,exports){
var dP = require('./_object-dp')
, anObject = require('./_an-object')
, getKeys = require('./_object-keys');
module.exports = require('./_descriptors') ? Object.defineProperties : function defineProperties(O, Properties){
anObject(O);
var keys = getKeys(Properties)
, length = keys.length
, i = 0
, P;
while(length > i)dP.f(O, P = keys[i++], Properties[P]);
return O;
};
},{"./_an-object":11,"./_descriptors":18,"./_object-dp":36,"./_object-keys":40}],38:[function(require,module,exports){
// 19.1.2.9 / 15.2.3.2 Object.getPrototypeOf(O)
var has = require('./_has')
, toObject = require('./_to-object')
, IE_PROTO = require('./_shared-key')('IE_PROTO')
, ObjectProto = Object.prototype;
module.exports = Object.getPrototypeOf || function(O){
O = toObject(O);
if(has(O, IE_PROTO))return O[IE_PROTO];
if(typeof O.constructor == 'function' && O instanceof O.constructor){
return O.constructor.prototype;
} return O instanceof Object ? ObjectProto : null;
};
},{"./_has":24,"./_shared-key":45,"./_to-object":52}],39:[function(require,module,exports){
var has = require('./_has')
, toIObject = require('./_to-iobject')
, arrayIndexOf = require('./_array-includes')(false)
, IE_PROTO = require('./_shared-key')('IE_PROTO');
module.exports = function(object, names){
var O = toIObject(object)
, i = 0
, result = []
, key;
for(key in O)if(key != IE_PROTO)has(O, key) && result.push(key);
// Don't enum bug & hidden keys
while(names.length > i)if(has(O, key = names[i++])){
~arrayIndexOf(result, key) || result.push(key);
}
return result;
};
},{"./_array-includes":12,"./_has":24,"./_shared-key":45,"./_to-iobject":50}],40:[function(require,module,exports){
// 19.1.2.14 / 15.2.3.14 Object.keys(O)
var $keys = require('./_object-keys-internal')
, enumBugKeys = require('./_enum-bug-keys');
module.exports = Object.keys || function keys(O){
return $keys(O, enumBugKeys);
};
},{"./_enum-bug-keys":20,"./_object-keys-internal":39}],41:[function(require,module,exports){
// most Object methods by ES6 should accept primitives
var $export = require('./_export')
, core = require('./_core')
, fails = require('./_fails');
module.exports = function(KEY, exec){
var fn = (core.Object || {})[KEY] || Object[KEY]
, exp = {};
exp[KEY] = exec(fn);
$export($export.S + $export.F * fails(function(){ fn(1); }), 'Object', exp);
};
},{"./_core":15,"./_export":21,"./_fails":22}],42:[function(require,module,exports){
module.exports = function(bitmap, value){
return {
enumerable : !(bitmap & 1),
configurable: !(bitmap & 2),
writable : !(bitmap & 4),
value : value
};
};
},{}],43:[function(require,module,exports){
module.exports = require('./_hide');
},{"./_hide":25}],44:[function(require,module,exports){
var def = require('./_object-dp').f
, has = require('./_has')
, TAG = require('./_wks')('toStringTag');
module.exports = function(it, tag, stat){
if(it && !has(it = stat ? it : it.prototype, TAG))def(it, TAG, {configurable: true, value: tag});
};
},{"./_has":24,"./_object-dp":36,"./_wks":55}],45:[function(require,module,exports){
var shared = require('./_shared')('keys')
, uid = require('./_uid');
module.exports = function(key){
return shared[key] || (shared[key] = uid(key));
};
},{"./_shared":46,"./_uid":54}],46:[function(require,module,exports){
var global = require('./_global')
, SHARED = '__core-js_shared__'
, store = global[SHARED] || (global[SHARED] = {});
module.exports = function(key){
return store[key] || (store[key] = {});
};
},{"./_global":23}],47:[function(require,module,exports){
var toInteger = require('./_to-integer')
, defined = require('./_defined');
// true -> String#at
// false -> String#codePointAt
module.exports = function(TO_STRING){
return function(that, pos){
var s = String(defined(that))
, i = toInteger(pos)
, l = s.length
, a, b;
if(i < 0 || i >= l)return TO_STRING ? '' : undefined;
a = s.charCodeAt(i);
return a < 0xd800 || a > 0xdbff || i + 1 === l || (b = s.charCodeAt(i + 1)) < 0xdc00 || b > 0xdfff
? TO_STRING ? s.charAt(i) : a
: TO_STRING ? s.slice(i, i + 2) : (a - 0xd800 << 10) + (b - 0xdc00) + 0x10000;
};
};
},{"./_defined":17,"./_to-integer":49}],48:[function(require,module,exports){
var toInteger = require('./_to-integer')
, max = Math.max
, min = Math.min;
module.exports = function(index, length){
index = toInteger(index);
return index < 0 ? max(index + length, 0) : min(index, length);
};
},{"./_to-integer":49}],49:[function(require,module,exports){
// 7.1.4 ToInteger
var ceil = Math.ceil
, floor = Math.floor;
module.exports = function(it){
return isNaN(it = +it) ? 0 : (it > 0 ? floor : ceil)(it);
};
},{}],50:[function(require,module,exports){
// to indexed object, toObject with fallback for non-array-like ES3 strings
var IObject = require('./_iobject')
, defined = require('./_defined');
module.exports = function(it){
return IObject(defined(it));
};
},{"./_defined":17,"./_iobject":28}],51:[function(require,module,exports){
// 7.1.15 ToLength
var toInteger = require('./_to-integer')
, min = Math.min;
module.exports = function(it){
return it > 0 ? min(toInteger(it), 0x1fffffffffffff) : 0; // pow(2, 53) - 1 == 9007199254740991
};
},{"./_to-integer":49}],52:[function(require,module,exports){
// 7.1.13 ToObject(argument)
var defined = require('./_defined');
module.exports = function(it){
return Object(defined(it));
};
},{"./_defined":17}],53:[function(require,module,exports){
// 7.1.1 ToPrimitive(input [, PreferredType])
var isObject = require('./_is-object');
// instead of the ES6 spec version, we didn't implement @@toPrimitive case
// and the second argument - flag - preferred type is a string
module.exports = function(it, S){
if(!isObject(it))return it;
var fn, val;
if(S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it)))return val;
if(typeof (fn = it.valueOf) == 'function' && !isObject(val = fn.call(it)))return val;
if(!S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it)))return val;
throw TypeError("Can't convert object to primitive value");
};
},{"./_is-object":29}],54:[function(require,module,exports){
var id = 0
, px = Math.random();
module.exports = function(key){
return 'Symbol('.concat(key === undefined ? '' : key, ')_', (++id + px).toString(36));
};
},{}],55:[function(require,module,exports){
var store = require('./_shared')('wks')
, uid = require('./_uid')
, Symbol = require('./_global').Symbol
, USE_SYMBOL = typeof Symbol == 'function';
var $exports = module.exports = function(name){
return store[name] || (store[name] =
USE_SYMBOL && Symbol[name] || (USE_SYMBOL ? Symbol : uid)('Symbol.' + name));
};
$exports.store = store;
},{"./_global":23,"./_shared":46,"./_uid":54}],56:[function(require,module,exports){
var classof = require('./_classof')
, ITERATOR = require('./_wks')('iterator')
, Iterators = require('./_iterators');
module.exports = require('./_core').getIteratorMethod = function(it){
if(it != undefined)return it[ITERATOR]
|| it['@@iterator']
|| Iterators[classof(it)];
};
},{"./_classof":13,"./_core":15,"./_iterators":33,"./_wks":55}],57:[function(require,module,exports){
var anObject = require('./_an-object')
, get = require('./core.get-iterator-method');
module.exports = require('./_core').getIterator = function(it){
var iterFn = get(it);
if(typeof iterFn != 'function')throw TypeError(it + ' is not iterable!');
return anObject(iterFn.call(it));
};
},{"./_an-object":11,"./_core":15,"./core.get-iterator-method":56}],58:[function(require,module,exports){
'use strict';
var addToUnscopables = require('./_add-to-unscopables')
, step = require('./_iter-step')
, Iterators = require('./_iterators')
, toIObject = require('./_to-iobject');
// 22.1.3.4 Array.prototype.entries()
// 22.1.3.13 Array.prototype.keys()
// 22.1.3.29 Array.prototype.values()
// 22.1.3.30 Array.prototype[@@iterator]()
module.exports = require('./_iter-define')(Array, 'Array', function(iterated, kind){
this._t = toIObject(iterated); // target
this._i = 0; // next index
this._k = kind; // kind
// 22.1.5.2.1 %ArrayIteratorPrototype%.next()
}, function(){
var O = this._t
, kind = this._k
, index = this._i++;
if(!O || index >= O.length){
this._t = undefined;
return step(1);
}
if(kind == 'keys' )return step(0, index);
if(kind == 'values')return step(0, O[index]);
return step(0, [index, O[index]]);
}, 'values');
// argumentsList[@@iterator] is %ArrayProto_values% (9.4.4.6, 9.4.4.7)
Iterators.Arguments = Iterators.Array;
addToUnscopables('keys');
addToUnscopables('values');
addToUnscopables('entries');
},{"./_add-to-unscopables":10,"./_iter-define":31,"./_iter-step":32,"./_iterators":33,"./_to-iobject":50}],59:[function(require,module,exports){
var $export = require('./_export');
// 19.1.2.4 / 15.2.3.6 Object.defineProperty(O, P, Attributes)
$export($export.S + $export.F * !require('./_descriptors'), 'Object', {defineProperty: require('./_object-dp').f});
},{"./_descriptors":18,"./_export":21,"./_object-dp":36}],60:[function(require,module,exports){
// 19.1.2.14 Object.keys(O)
var toObject = require('./_to-object')
, $keys = require('./_object-keys');
require('./_object-sap')('keys', function(){
return function keys(it){
return $keys(toObject(it));
};
});
},{"./_object-keys":40,"./_object-sap":41,"./_to-object":52}],61:[function(require,module,exports){
'use strict';
var $at = require('./_string-at')(true);
// 21.1.3.27 String.prototype[@@iterator]()
require('./_iter-define')(String, 'String', function(iterated){
this._t = String(iterated); // target
this._i = 0; // next index
// 21.1.5.2.1 %StringIteratorPrototype%.next()
}, function(){
var O = this._t
, index = this._i
, point;
if(index >= O.length)return {value: undefined, done: true};
point = $at(O, index);
this._i += point.length;
return {value: point, done: false};
});
},{"./_iter-define":31,"./_string-at":47}],62:[function(require,module,exports){
require('./es6.array.iterator');
var global = require('./_global')
, hide = require('./_hide')
, Iterators = require('./_iterators')
, TO_STRING_TAG = require('./_wks')('toStringTag');
for(var collections = ['NodeList', 'DOMTokenList', 'MediaList', 'StyleSheetList', 'CSSRuleList'], i = 0; i < 5; i++){
var NAME = collections[i]
, Collection = global[NAME]
, proto = Collection && Collection.prototype;
if(proto && !proto[TO_STRING_TAG])hide(proto, TO_STRING_TAG, NAME);
Iterators[NAME] = Iterators.Array;
}
},{"./_global":23,"./_hide":25,"./_iterators":33,"./_wks":55,"./es6.array.iterator":58}],63:[function(require,module,exports){
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _keys = require("babel-runtime/core-js/object/keys");
var _keys2 = _interopRequireDefault(_keys);
var _getIterator2 = require("babel-runtime/core-js/get-iterator");
var _getIterator3 = _interopRequireDefault(_getIterator2);
var _classCallCheck2 = require("babel-runtime/helpers/classCallCheck");
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _createClass2 = require("babel-runtime/helpers/createClass");
var _createClass3 = _interopRequireDefault(_createClass2);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Emitter
*/
var Emitter = function () {
function Emitter() {
(0, _classCallCheck3.default)(this, Emitter);
this._data = {};
}
(0, _createClass3.default)(Emitter, [{
key: "on",
value: function on(name, data) {
if (!name) return;
if (!this._data[name]) {
this._data[name] = {};
}
this._data[name] = data;
}
}, {
key: "emit",
value: function emit(name, method, context) {
if (!name || !method) return;
this._data[name].run(method, context);
}
}, {
key: "broadcast",
value: function broadcast(method, context) {
if (!method) return;
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = (0, _getIterator3.default)((0, _keys2.default)(this._data)), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var name = _step.value;
this._data[name].run(method, context);
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
}
}, {
key: "keys",
value: function keys() {
return (0, _keys2.default)(this._data);
}
}, {
key: "is",
value: function is(name) {
return !!this._data[name];
}
}, {
key: "get",
value: function get(name) {
return this._data[name];
}
}]);
return Emitter;
}();
exports.default = Emitter;
},{"babel-runtime/core-js/get-iterator":1,"babel-runtime/core-js/object/keys":3,"babel-runtime/helpers/classCallCheck":4,"babel-runtime/helpers/createClass":5}],64:[function(require,module,exports){
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _keys = require('babel-runtime/core-js/object/keys');
var _keys2 = _interopRequireDefault(_keys);
var _getIterator2 = require('babel-runtime/core-js/get-iterator');
var _getIterator3 = _interopRequireDefault(_getIterator2);
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _createClass2 = require('babel-runtime/helpers/createClass');
var _createClass3 = _interopRequireDefault(_createClass2);
var _Emitter = require('../class/_Emitter');
var _Emitter2 = _interopRequireDefault(_Emitter);
var _Subscriber = require('../class/_Subscriber');
var _Subscriber2 = _interopRequireDefault(_Subscriber);
var _RequestAnim = require('../class/_RequestAnim');
var _RequestAnim2 = _interopRequireDefault(_RequestAnim);
var _calc = require('../module/_calc');
var _calc2 = _interopRequireDefault(_calc);
var _help = require('../module/_help');
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var Flyby = function () {
function Flyby(options) {
(0, _classCallCheck3.default)(this, Flyby);
this.enabled = false;
this.mode = '';
this.painting = false;
this.startX = 0;
this.startY = 0;
this.ghostX = 0;
this.ghostY = 0;
this.currentX = 0;
this.currentY = 0;
this.distanceX = 0;
this.distanceY = 0;
this.distanceAngle = 0;
this.invertX = 0;
this.invertY = 0;
this.originX = 0;
this.originY = 0;
this.between = 0;
this.betweenX = 0;
this.betweenY = 0;
this.betweenAngle = 0;
this.previousX = 0;
this.previousY = 0;
this.direction = '';
this.directionX = '';
this.directionY = '';
this.targetElement = null; // <event.target>
this.startTime = 0; // timestamp by OnStart
this.elapsed = 0; // <endTime> - <startTime>
this.ghostTime = 0; // previously <startTime>
this.onStart = this.onStart.bind(this);
this.onMove = this.onMove.bind(this);
this.onEnd = this.onEnd.bind(this);
this.update = this.update.bind(this);
// In this element, that enabled to mouse and touch events.
this.rangeElement = options.rangeElement || document;
this._addEventListeners();
// Browser painting update
// call requestAnimationFrame
this.paint = new _RequestAnim2.default();
this.paint.tick = this.update;
this.first = false;
this.pinchOnDesktop = false;
this.flickTime = 250;
// event emitter
this.emitter = new _Emitter2.default();
this._initSubscribe(options.subscribers);
// emit configuration
this.emitter.broadcast('onConfig', this);
this.subscriber = null;
}
(0, _createClass3.default)(Flyby, [{
key: 'onStart',
value: function onStart(event) {
// set the flags to initialize
this.enabled = true;
this.painting = true;
this.touched = event.touches ? true : false;
this.first = true;
this.pinchOnDesktop = false;
// set the position to initalize
this.startTime = Date.now();
this.startX = (0, _help.pageX)(event);
this.startY = (0, _help.pageY)(event);
this.distanceX = 0;
this.distanceY = 0;
this.distanceAngle = 0;
this.distance = 0;
this.currentX = this.startX;
this.currentY = this.startY;
this.invertX = this.startX;
this.invertY = this.startY;
this.previousX = this.startX;
this.previousY = this.startY;
this.between = 0;
this.betweenX = 0;
this.betweenY = 0;
this.betweenAngle = 0;
this.direction = '';
this.directionX = '';
this.directionY = '';
this.targetElement = event.target;
if (!this.touched) {
if (this.startTime - this.ghostTime < 750) {
this.pinchOnDesktop = true;
}
}
// find a target & subscriber
this._find(event.target);
// subscribe
this._emit('onStart');
// paint
this.paint.cancel();
this.paint.play();
this.ghostTime = this.startTime;
event.preventDefault();
}
}, {
key: 'onMove',
value: function onMove(event) {
if (!this.enabled) return;
if ((0, _help.hasTouches)(event, 2) || this.pinchOnDesktop) {
this.mode = 'pinch';
} else {
this.mode = 'swipe';
}
this.currentX = (0, _help.pageX)(event);
this.currentY = (0, _help.pageY)(event);
if (this.mode === 'pinch') {
this._invert(event);
this._between();
}
this._distance();
this.originX = this.currentX - parseInt(this.betweenX / 2, 10);
this.originY = this.currentY - parseInt(this.betweenY / 2, 10);
this._direction();
this.targetElement = event.target;
// check ignore list to subscribe
if (!this._inIgnore(this.mode)) {
if (this.first) {
// do of only once on `onMove` method.
this.direction = _calc2.default.which(this.distanceAngle);
}
if (this.mode !== 'swipe' || !this._inIgnore(this.direction)) {
if (this.first) {
this._emit('onOnce');
} else {
this._emit('on' + (0, _help.camel)(this.mode));
}
event.preventDefault();
}
}
// to next step
this.first = false;
this.previousX = this.currentX;
this.previousY = this.currentY;
}
}, {
key: 'onEnd',
value: function onEnd(event) {
this.enabled = false;
this.touched = false;
this.elapsed = Date.now() - this.startTime;
this.ghostX = this.startX;
this.ghostY = this.startY;
this.targetElement = event.target;
if (this._isFlick(this.elapsed)) {
this._emit('onFlick');
}
// element & subscriber
this._emit('onEnd');
}
/**
* update
* Call in requestAnimationFrame
* Related Paiting Dom is here.
*/
}, {
key: 'update',
value: function update() {
if (!this.painting) return;
this.paint.play();
// subscribe
this._emit('onUpdate');
}
/**
* invert
* The posision is another one that is B.
* @private
* @param {Object} event<EventObject>
*/
}, {
key: '_invert',
value: function _invert(event) {
if (this.touched && event.touches[1]) {
this.currentX = event.touches[0].pageX;
this.currentY = event.touches[0].pageY;
this.invertX = event.touches[1].pageX;
this.invertY = event.touches[1].pageY;
} else {
this.invertX = this.ghostX;
this.invertY = this.ghostY;
}
}
/**
* distance
* @private
*/
}, {
key: '_distance',
value: function _distance() {
this.distanceX = this.currentX - this.startX;
this.distanceY = this.currentY - this.startY;
this.distance = _calc2.default.diagonal(this.distanceX, this.distanceY);
this.distanceAngle = _calc2.default.angle(this.distanceY * -1, this.distanceX, true);
}
/**
* between
* Distance of between A and B
* @private
*/
}, {
key: '_between',
value: function _between() {
this.betweenX = this.currentX - this.invertX;
this.betweenY = this.currentY - this.invertY;
this.between = _calc2.default.diagonal(this.betweenX, this.betweenY);
this.betweenAngle = _calc2.default.angle(this.betweenY * -1, this.betweenX, true);
}
/**
* direction
* @private
*/
}, {
key: '_direction',
value: function _direction() {
if (this.currentX > this.previousX) {
this.directionX = 'to right';
} else if (this.currentX < this.previousX) {
this.directionX = 'to left';
}
if (this.currentY > this.previousY) {
this.directionY = 'to bottom';
} else if (this.currentY < this.previousY) {
this.directionY = 'to top';
}
}
/**
* emit
* @private
* @param {String} suffix
* 指定された文字列をキャメルケースに変換し, on と結合してメソッド名にする
* subscriber 内のメソッドを実行する
*/
}, {
key: '_emit',
value: function _emit(method) {
if (!method || !this.subscriber) return;
this.emitter.emit(this.subscriber.selector, method, this);
}
/**
* find
* @private
* @param {Object} node<HTMLElement>
* 今回実行するべき subscriber を探します
*/
}, {
key: '_find',
value: function _find(el) {
var found = false;
this.subscriber = null;
// nodeTree を上方向に探索します
while (el && !found) {
if (el === (0, _help.documentElement)()) {
return found;
}
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = (0, _getIterator3.default)(this.emitter.keys()), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var name = _step.value;
var prefix = name.slice(0, 1);
var selector = name.substr(1);
if ((0, _help.matchElement)(el, selector)) {
if (this.emitter.is(name)) {
this.subscriber = this.emitter.get(name);
this.subscriber.el = el;
found = true;
return found;
}
}
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
el = el.parentNode;
}
return found;
}
/**
* initSubscribe
* @private
*/
}, {
key: '_initSubscribe',
value: function _initSubscribe(subscribers) {
if (subscribers) {
var _iteratorNormalCompletion2 = true;
var _didIteratorError2 = false;
var _iteratorError2 = undefined;
try {
for (var _iterator2 = (0, _getIterator3.default)((0, _keys2.default)(subscribers)), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
var name = _step2.value;
this.emitter.on(name, new _Subscriber2.default(name, subscribers[name]));
}
} catch (err) {
_didIteratorError2 = true;
_iteratorError2 = err;
} finally {
try {
if (!_iteratorNormalCompletion2 && _iterator2.return) {
_iterator2.return();
}
} finally {
if (_didIteratorError2) {
throw _iteratorError2;
}
}
}
}
}
/**
* The `str` includes in ignore list.
* @param {String} str
* @return {Boolean}
*/
}, {
key: '_inIgnore',
value: function _inIgnore(str) {
return this.subscriber.ignore.includes(str);
}
/**
* is Flick?
* @return {Boolean}
*/
}, {
key: '_isFlick',
value: function _isFlick(elapsed) {
return elapsed < this.flickTime && elapsed > 50;
}
/**
* addEventListeners
* @private
*/
}, {
key: '_addEventListeners',
value: function _addEventListeners() {
this.rangeElement.addEventListener('mousedown', this.onStart);
this.rangeElement.addEventListener('mousemove', this.onMove);
this.rangeElement.addEventListener('mouseup', this.onEnd);
this.rangeElement.addEventListener('touchstart', this.onStart);
this.rangeElement.addEventListener('touchmove', this.onMove);
this.rangeElement.addEventListener('touchend', this.onEnd);
}
}]);
return Flyby;
}();
exports.default = Flyby;
},{"../class/_Emitter":63,"../class/_RequestAnim":65,"../class/_Subscriber":66,"../module/_calc":68,"../module/_help":69,"babel-runtime/core-js/get-iterator":1,"babel-runtime/core-js/object/keys":3,"babel-runtime/helpers/classCallCheck":4,"babel-runtime/helpers/createClass":5}],65:[function(require,module,exports){
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _classCallCheck2 = require("babel-runtime/helpers/classCallCheck");
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _createClass2 = require("babel-runtime/helpers/createClass");
var _createClass3 = _interopRequireDefault(_createClass2);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var requestAnimationFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.setTimeout;
var cancelAnimationFrame = window.cancelAnimationFrame || window.mozCancelAnimationFrame || window.clearTimeout;
window.requestAnimationFrame = requestAnimationFrame;
window.cancelAnimationFrame = cancelAnimationFrame;
var ReqestAnim = function () {
function ReqestAnim() {
(0, _classCallCheck3.default)(this, ReqestAnim);
this._id = null;
this._tick = null;
}
(0, _createClass3.default)(ReqestAnim, [{
key: "play",
value: function play(callback) {
var _this = this;
this.id = requestAnimationFrame(function () {
if (callback) callback();
_this._tick();
});
}
}, {
key: "cancel",
value: function cancel() {
if (this._id != null) {
cancelAnimationFrame(this._id);
}
}
}, {
key: "id",
set: function set(id) {
this._id = id;
},
get: function get() {
return this._id;
}
}, {
key: "tick",
set: function set(callback) {
this._tick = callback;
}
}]);
return ReqestAnim;
}();
exports.default = ReqestAnim;
},{"babel-runtime/helpers/classCallCheck":4,"babel-runtime/helpers/createClass":5}],66:[function(require,module,exports){
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _keys = require('babel-runtime/core-js/object/keys');
var _keys2 = _interopRequireDefault(_keys);
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _createClass2 = require('babel-runtime/helpers/createClass');
var _createClass3 = _interopRequireDefault(_createClass2);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Target
* @member {String} _name
* @member {HTMLElement} _el
* @member {Object} _methods
* @member {Array} _ignore
*/
var Subscriber = function () {
function Subscriber(name, body) {
(0, _classCallCheck3.default)(this, Subscriber);
this._name = '';
this._selector = '';
this._element = null;
this._methods = this._defaultMethods();
this._ignore = [];
this._scope = {};
this.name = name.replace(/^[.#]/, '');
this.selector = name;
this.ignore = body.ignore;
this.methods = body;
}
(0, _createClass3.default)(Subscriber, [{
key: 'run',
value: function run(methodName, context) {
if (this._methods[methodName]) {
this._methods[methodName].bind(context)(this._scope, this._elementement);
}
}
}, {
key: '_defaultMethods',
value: function _defaultMethods() {
var noop = function noop() {};
var methods = ['Config', 'Start', 'Once', 'Swipe', 'Pinch', 'Flick', 'End', 'Update'];
return function (res) {
methods.forEach(function (name) {
var key = ['on', name].join('');
res[key] = noop;
});
return res;
}({});
}
}, {
key: 'name',
set: function set(str) {
this._name = str;
},
get: function get() {
return this._name;
}
}, {
key: 'selector',
set: function set(str) {
this._selector = str;
},
get: function get() {
return this._selector;
}
}, {
key: 'methods',
set: function set(obj) {
var _this = this;
(0, _keys2.default)(obj).forEach(function (key) {
if (typeof obj[key] === 'function') {
_this._methods[key] = obj[key];
}
});
},
get: function get() {
return this._methods;
}
}, {
key: 'ignore',
set: function set(array) {
this._ignore = array || [];
},
get: function get() {
return this._ignore;
}
}, {
key: 'el',
set: function set(element) {
this._element = element;
},
get: function get() {
return this._element;
}
}]);
return Subscriber;
}();
exports.default = Subscriber;
},{"babel-runtime/core-js/object/keys":3,"babel-runtime/helpers/classCallCheck":4,"babel-runtime/helpers/createClass":5}],67:[function(require,module,exports){
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _Flyby = require('./class/_Flyby');
var _Flyby2 = _interopRequireDefault(_Flyby);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = _Flyby2.default; /**
* NOTE: Positions declarations
* Start - 始点 <x, y>
* End - 終点 <x, y> **不要?**
* Current - 現在地 <x, y>
* Distance - 始点から現在地への距離 <x, y> [swipe]
* Invert - 2つ目の始点 <x, y> [pinch]
* Between - Start と Invert の距離 <d> [pinch]
* Origin - Start と Invert の開始時の中間地点 <x, y> [pinch]
* Offset - 対象ノードの座標 <x, y> **外側でやる**
*/
/**
* NOTE: API methods
* onConfig(scope)
* onStart(scope, element)
* onEnd(scope, element)
* onFirstStep(scope, element)
* onPinch(scope, element)
* onSwipe(scope, element)
* onFlick(scope, element)
* onUpdate(scope, element)
* ignore [x, y, swipe, pinch]
*/
},{"./class/_Flyby":64}],68:[function(require,module,exports){
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _help = require('./_help');
exports.default = function () {
return { degree: degree, radian: radian, angle: angle, diagonal: diagonal, which: which };
}();
/**
* diagonal
* @param {Number} x
* @param {Number} y
* @return {Number} d
*/
function diagonal(x, y) {
return parseInt((0, _help.sqrt)((0, _help.pow)(x, 2) + (0, _help.pow)(y, 2)));
}
/**
* Wrapped `Math.atan2`
* @private
* @param {Number} y
* @param {Number} x
*/
function angle(y, x) {
var toDegree = arguments.length <= 2 || arguments[2] === undefined ? false : arguments[2];
if (toDegree) {
return degree((0, _help.atan2)(y, x));
}
return (0, _help.atan2)(y, x);
}
/**
* which
* swipe の軸の向きを検出します
* @return {String}
*/
function which(angle) {
angle = (0, _help.abs)(angle);
if (angle < 180 + 5 && angle > 180 - 5 || angle < 10) {
return 'x';
}
return 'y';
}
/**
* degree
* The radian convert to degree.
* @private
* @param {Number} radian
* @param {Number} degree
*/
function degree(radian) {
return radian * 180 / _help.PI;
}
/**
* radian
* The degree convert to radian
* @private
* @param {Number} degree
* @return {Number} radian
*/
function radian(degree) {
return degree * _help.PI / 180;
}
},{"./_help":69}],69:[function(require,module,exports){
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.camel = camel;
exports.hasClass = hasClass;
exports.hasId = hasId;
exports.matchElement = matchElement;
exports.documentElement = documentElement;
exports.pageX = pageX;
exports.pageY = pageY;
exports.touches = touches;
exports.hasTouches = hasTouches;
exports.noop = noop;
/**
* help.js
*/
var PI = Math.PI;
var sqrt = Math.sqrt;
var atan2 = Math.atan2;
var abs = Math.abs;
var pow = Math.pow;
exports.PI = PI;
exports.sqrt = sqrt;
exports.atan2 = atan2;
exports.abs = abs;
exports.pow = pow;
function camel(str) {
return [str.substr(0, 1).toUpperCase(), str.substr(1)].join('');
}
function hasClass(el, selector) {
return el.classList.contains(selector);
}
function hasId(el, selector) {
return el.getAttribute('id') === selector;
}
function matchElement(el, selector) {
return hasClass(el, selector) || hasId(el, selector);
}
function documentElement() {
return document.documentElement;
}
/**
* pageX
* @private
* @param {Object} event<EventObject>
*/
function pageX(event) {
if (event.pageX != null) {
return event.pageX;
}
return event.touches[0].pageX;
}
/**
* pageY
* @private
* @param {Object} event<EventObject>
*/
function pageY(event) {
if (event.pageY != null) {
return event.pageY;
}
return event.touches[0].pageY;
}
/**
* touches
* @param {Object} event<EventObject>
* @return {Boolean}
* Event オブジェクトから touches 抽出して返す
*/
function touches(event) {
return event.originalEvent ? event.originalEvent.touches : event.touches;
}
/**
* hasTouches
* @private
* @param {Object} event<EventObject>
* @param {Number} length
* @return {Boolean}
*/
function hasTouches(event, length) {
var _touches = touches(event);
if (_touches != null && _touches.length === length) {
return true;
}
return false;
}
/**
* No operation
*/
function noop() {
return;
}
},{}]},{},[67]);<|fim▁end|>
| |
<|file_name|>f32.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations and constants for 32-bits floats (`f32` type)
#![stable(feature = "rust1", since = "1.0.0")]
#![allow(missing_docs)]
#![allow(unsigned_negation)]
#![doc(primitive = "f32")]
use prelude::v1::*;
use core::num;
use intrinsics;
use libc::c_int;
use num::{FpCategory, ParseFloatError};
pub use core::f32::{RADIX, MANTISSA_DIGITS, DIGITS, EPSILON};
pub use core::f32::{MIN_EXP, MAX_EXP, MIN_10_EXP};
pub use core::f32::{MAX_10_EXP, NAN, INFINITY, NEG_INFINITY};
pub use core::f32::{MIN, MIN_POSITIVE, MAX};
pub use core::f32::consts;
#[allow(dead_code)]
mod cmath {
use libc::{c_float, c_int};
extern {
pub fn acosf(n: c_float) -> c_float;
pub fn asinf(n: c_float) -> c_float;
pub fn atanf(n: c_float) -> c_float;
pub fn atan2f(a: c_float, b: c_float) -> c_float;
pub fn cbrtf(n: c_float) -> c_float;
pub fn coshf(n: c_float) -> c_float;
pub fn erff(n: c_float) -> c_float;
pub fn erfcf(n: c_float) -> c_float;
pub fn expm1f(n: c_float) -> c_float;
pub fn fdimf(a: c_float, b: c_float) -> c_float;
pub fn fmaxf(a: c_float, b: c_float) -> c_float;
pub fn fminf(a: c_float, b: c_float) -> c_float;
pub fn fmodf(a: c_float, b: c_float) -> c_float;
pub fn nextafterf(x: c_float, y: c_float) -> c_float;
pub fn logbf(n: c_float) -> c_float;
pub fn log1pf(n: c_float) -> c_float;
pub fn ilogbf(n: c_float) -> c_int;
pub fn modff(n: c_float, iptr: &mut c_float) -> c_float;
pub fn sinhf(n: c_float) -> c_float;
pub fn tanf(n: c_float) -> c_float;
pub fn tanhf(n: c_float) -> c_float;
pub fn tgammaf(n: c_float) -> c_float;
#[cfg_attr(all(windows, target_env = "msvc"), link_name = "__lgammaf_r")]
pub fn lgammaf_r(n: c_float, sign: &mut c_int) -> c_float;
#[cfg_attr(all(windows, target_env = "msvc"), link_name = "_hypotf")]
pub fn hypotf(x: c_float, y: c_float) -> c_float;
#[cfg(any(unix, all(windows, not(target_env = "msvc"))))]
pub fn frexpf(n: c_float, value: &mut c_int) -> c_float;
#[cfg(any(unix, all(windows, not(target_env = "msvc"))))]
pub fn ldexpf(x: c_float, n: c_int) -> c_float;
}
#[cfg(all(windows, target_env = "msvc"))]
pub unsafe fn ldexpf(x: c_float, n: c_int) -> c_float {
f64::ldexp(x as f64, n as isize) as c_float
}
#[cfg(all(windows, target_env = "msvc"))]
pub unsafe fn frexpf(x: c_float, value: &mut c_int) -> c_float {
let (a, b) = f64::frexp(x as f64);
*value = b as c_int;
a as c_float
}
}
#[cfg(not(test))]
#[lang = "f32"]
#[stable(feature = "rust1", since = "1.0.0")]
impl f32 {
/// Parses a float as with a given radix
#[unstable(feature = "float_from_str_radix", reason = "recently moved API")]
pub fn from_str_radix(s: &str, radix: u32) -> Result<f32, ParseFloatError> {
num::Float::from_str_radix(s, radix)
}
/// Returns `true` if this value is `NaN` and false otherwise.
///
/// ```
/// use std::f32;
///
/// let nan = f32::NAN;
/// let f = 7.0_f32;
///
/// assert!(nan.is_nan());
/// assert!(!f.is_nan());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_nan(self) -> bool { num::Float::is_nan(self) }
/// Returns `true` if this value is positive infinity or negative infinity and
/// false otherwise.
///
/// ```
/// use std::f32;
///
/// let f = 7.0f32;
/// let inf = f32::INFINITY;
/// let neg_inf = f32::NEG_INFINITY;
/// let nan = f32::NAN;
///
/// assert!(!f.is_infinite());
/// assert!(!nan.is_infinite());
///
/// assert!(inf.is_infinite());
/// assert!(neg_inf.is_infinite());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_infinite(self) -> bool { num::Float::is_infinite(self) }
/// Returns `true` if this number is neither infinite nor `NaN`.
///
/// ```
/// use std::f32;
///
/// let f = 7.0f32;
/// let inf = f32::INFINITY;
/// let neg_inf = f32::NEG_INFINITY;
/// let nan = f32::NAN;
///
/// assert!(f.is_finite());
///
/// assert!(!nan.is_finite());
/// assert!(!inf.is_finite());
/// assert!(!neg_inf.is_finite());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_finite(self) -> bool { num::Float::is_finite(self) }
/// Returns `true` if the number is neither zero, infinite,
/// [subnormal][subnormal], or `NaN`.
///
/// ```
/// use std::f32;
///
/// let min = f32::MIN_POSITIVE; // 1.17549435e-38f32
/// let max = f32::MAX;
/// let lower_than_min = 1.0e-40_f32;
/// let zero = 0.0_f32;
///
/// assert!(min.is_normal());
/// assert!(max.is_normal());
///
/// assert!(!zero.is_normal());
/// assert!(!f32::NAN.is_normal());
/// assert!(!f32::INFINITY.is_normal());
/// // Values between `0` and `min` are Subnormal.
/// assert!(!lower_than_min.is_normal());
/// ```
/// [subnormal]: http://en.wikipedia.org/wiki/Denormal_number
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_normal(self) -> bool { num::Float::is_normal(self) }
/// Returns the floating point category of the number. If only one property
/// is going to be tested, it is generally faster to use the specific
/// predicate instead.
///
/// ```
/// use std::num::FpCategory;
/// use std::f32;
///
/// let num = 12.4_f32;
/// let inf = f32::INFINITY;
///
/// assert_eq!(num.classify(), FpCategory::Normal);
/// assert_eq!(inf.classify(), FpCategory::Infinite);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn classify(self) -> FpCategory { num::Float::classify(self) }
/// Returns the mantissa, base 2 exponent, and sign as integers, respectively.
/// The original number can be recovered by `sign * mantissa * 2 ^ exponent`.
/// The floating point encoding is documented in the [Reference][floating-point].
///
/// ```
/// # #![feature(std_misc)]
/// use std::f32;
///
/// let num = 2.0f32;
///
/// // (8388608, -22, 1)
/// let (mantissa, exponent, sign) = num.integer_decode();
/// let sign_f = sign as f32;
/// let mantissa_f = mantissa as f32;
/// let exponent_f = num.powf(exponent as f32);
///
/// // 1 * 8388608 * 2^(-22) == 2
/// let abs_difference = (sign_f * mantissa_f * exponent_f - num).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
/// [floating-point]: ../../../../../reference.html#machine-types
#[unstable(feature = "std_misc", reason = "signature is undecided")]
#[inline]
pub fn integer_decode(self) -> (u64, i16, i8) { num::Float::integer_decode(self) }
/// Returns the largest integer less than or equal to a number.
///
/// ```
/// let f = 3.99_f32;
/// let g = 3.0_f32;
///
/// assert_eq!(f.floor(), 3.0);
/// assert_eq!(g.floor(), 3.0);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn floor(self) -> f32 { num::Float::floor(self) }
/// Returns the smallest integer greater than or equal to a number.
///
/// ```
/// let f = 3.01_f32;
/// let g = 4.0_f32;
///
/// assert_eq!(f.ceil(), 4.0);
/// assert_eq!(g.ceil(), 4.0);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn ceil(self) -> f32 { num::Float::ceil(self) }
/// Returns the nearest integer to a number. Round half-way cases away from
/// `0.0`.
///
/// ```
/// let f = 3.3_f32;
/// let g = -3.3_f32;
///
/// assert_eq!(f.round(), 3.0);
/// assert_eq!(g.round(), -3.0);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn round(self) -> f32 { num::Float::round(self) }
/// Returns the integer part of a number.
///
/// ```
/// let f = 3.3_f32;
/// let g = -3.7_f32;
///
/// assert_eq!(f.trunc(), 3.0);
/// assert_eq!(g.trunc(), -3.0);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn trunc(self) -> f32 { num::Float::trunc(self) }
/// Returns the fractional part of a number.
///
/// ```
/// use std::f32;
///
/// let x = 3.5_f32;
/// let y = -3.5_f32;
/// let abs_difference_x = (x.fract() - 0.5).abs();
/// let abs_difference_y = (y.fract() - (-0.5)).abs();
///
/// assert!(abs_difference_x <= f32::EPSILON);
/// assert!(abs_difference_y <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn fract(self) -> f32 { num::Float::fract(self) }
/// Computes the absolute value of `self`. Returns `NAN` if the
/// number is `NAN`.
///
/// ```
/// use std::f32;
///
/// let x = 3.5_f32;
/// let y = -3.5_f32;
///
/// let abs_difference_x = (x.abs() - x).abs();
/// let abs_difference_y = (y.abs() - (-y)).abs();
///
/// assert!(abs_difference_x <= f32::EPSILON);
/// assert!(abs_difference_y <= f32::EPSILON);
///
/// assert!(f32::NAN.abs().is_nan());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn abs(self) -> f32 { num::Float::abs(self) }
/// Returns a number that represents the sign of `self`.
///
/// - `1.0` if the number is positive, `+0.0` or `INFINITY`
/// - `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY`
/// - `NAN` if the number is `NAN`
///
/// ```
/// use std::f32;
///
/// let f = 3.5_f32;
///
/// assert_eq!(f.signum(), 1.0);
/// assert_eq!(f32::NEG_INFINITY.signum(), -1.0);
///
/// assert!(f32::NAN.signum().is_nan());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn signum(self) -> f32 { num::Float::signum(self) }
/// Returns `true` if `self`'s sign bit is positive, including
/// `+0.0` and `INFINITY`.
///
/// ```
/// use std::f32;
///
/// let nan = f32::NAN;
/// let f = 7.0_f32;
/// let g = -7.0_f32;
///
/// assert!(f.is_sign_positive());
/// assert!(!g.is_sign_positive());
/// // Requires both tests to determine if is `NaN`
/// assert!(!nan.is_sign_positive() && !nan.is_sign_negative());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_sign_positive(self) -> bool { num::Float::is_positive(self) }
/// Returns `true` if `self`'s sign is negative, including `-0.0`
/// and `NEG_INFINITY`.
///
/// ```
/// use std::f32;
///
/// let nan = f32::NAN;
/// let f = 7.0f32;
/// let g = -7.0f32;
///
/// assert!(!f.is_sign_negative());
/// assert!(g.is_sign_negative());
/// // Requires both tests to determine if is `NaN`.
/// assert!(!nan.is_sign_positive() && !nan.is_sign_negative());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_sign_negative(self) -> bool { num::Float::is_negative(self) }
/// Fused multiply-add. Computes `(self * a) + b` with only one rounding
/// error. This produces a more accurate result with better performance than
/// a separate multiplication operation followed by an add.
///
/// ```
/// use std::f32;
///
/// let m = 10.0_f32;
/// let x = 4.0_f32;
/// let b = 60.0_f32;
///
/// // 100.0
/// let abs_difference = (m.mul_add(x, b) - (m*x + b)).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn mul_add(self, a: f32, b: f32) -> f32 { num::Float::mul_add(self, a, b) }
/// Takes the reciprocal (inverse) of a number, `1/x`.
///
/// ```
/// use std::f32;
///
/// let x = 2.0_f32;
/// let abs_difference = (x.recip() - (1.0/x)).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn recip(self) -> f32 { num::Float::recip(self) }
/// Raises a number to an integer power.
///
/// Using this function is generally faster than using `powf`
///
/// ```
/// use std::f32;
///
/// let x = 2.0_f32;
/// let abs_difference = (x.powi(2) - x*x).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn powi(self, n: i32) -> f32 { num::Float::powi(self, n) }
/// Raises a number to a floating point power.
///
/// ```
/// use std::f32;
///
/// let x = 2.0_f32;
/// let abs_difference = (x.powf(2.0) - x*x).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn powf(self, n: f32) -> f32 { num::Float::powf(self, n) }
/// Takes the square root of a number.
///
/// Returns NaN if `self` is a negative number.
///
/// ```
/// use std::f32;
///
/// let positive = 4.0_f32;
/// let negative = -4.0_f32;
///
/// let abs_difference = (positive.sqrt() - 2.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// assert!(negative.sqrt().is_nan());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn sqrt(self) -> f32 { num::Float::sqrt(self) }
/// Returns `e^(self)`, (the exponential function).
///
/// ```
/// use std::f32;
///
/// let one = 1.0f32;
/// // e^1
/// let e = one.exp();
///
/// // ln(e) - 1 == 0
/// let abs_difference = (e.ln() - 1.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn exp(self) -> f32 { num::Float::exp(self) }
/// Returns `2^(self)`.
///
/// ```
/// use std::f32;
///
/// let f = 2.0f32;
///
/// // 2^2 - 4 == 0
/// let abs_difference = (f.exp2() - 4.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn exp2(self) -> f32 { num::Float::exp2(self) }
/// Returns the natural logarithm of the number.
///
/// ```
/// use std::f32;
///
/// let one = 1.0f32;
/// // e^1
/// let e = one.exp();
///
/// // ln(e) - 1 == 0
/// let abs_difference = (e.ln() - 1.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn ln(self) -> f32 { num::Float::ln(self) }
/// Returns the logarithm of the number with respect to an arbitrary base.
///
/// ```
/// use std::f32;
///
/// let ten = 10.0f32;
/// let two = 2.0f32;
///
/// // log10(10) - 1 == 0
/// let abs_difference_10 = (ten.log(10.0) - 1.0).abs();
///
/// // log2(2) - 1 == 0
/// let abs_difference_2 = (two.log(2.0) - 1.0).abs();
///
/// assert!(abs_difference_10 <= f32::EPSILON);
/// assert!(abs_difference_2 <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn log(self, base: f32) -> f32 { num::Float::log(self, base) }
/// Returns the base 2 logarithm of the number.
///
/// ```
/// use std::f32;
///
/// let two = 2.0f32;
///
/// // log2(2) - 1 == 0
/// let abs_difference = (two.log2() - 1.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn log2(self) -> f32 { num::Float::log2(self) }
/// Returns the base 10 logarithm of the number.
///
/// ```
/// use std::f32;
///
/// let ten = 10.0f32;
///
/// // log10(10) - 1 == 0
/// let abs_difference = (ten.log10() - 1.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn log10(self) -> f32 { num::Float::log10(self) }
/// Converts radians to degrees.
///
/// ```
/// # #![feature(std_misc)]
/// use std::f32::{self, consts};
///
/// let angle = consts::PI;
///
/// let abs_difference = (angle.to_degrees() - 180.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[unstable(feature = "std_misc", reason = "desirability is unclear")]
#[inline]
pub fn to_degrees(self) -> f32 { num::Float::to_degrees(self) }
/// Converts degrees to radians.
///
/// ```
/// # #![feature(std_misc)]
/// use std::f32::{self, consts};
///
/// let angle = 180.0f32;
///
/// let abs_difference = (angle.to_radians() - consts::PI).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[unstable(feature = "std_misc", reason = "desirability is unclear")]
#[inline]
pub fn to_radians(self) -> f32 { num::Float::to_radians(self) }
/// Constructs a floating point number of `x*2^exp`.
///
/// ```
/// # #![feature(std_misc)]
/// use std::f32;
/// // 3*2^2 - 12 == 0
/// let abs_difference = (f32::ldexp(3.0, 2) - 12.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[unstable(feature = "std_misc",
reason = "pending integer conventions")]
#[inline]
pub fn ldexp(x: f32, exp: isize) -> f32 {
unsafe { cmath::ldexpf(x, exp as c_int) }
}
/// Breaks the number into a normalized fraction and a base-2 exponent,
/// satisfying:
///
/// * `self = x * 2^exp`
/// * `0.5 <= abs(x) < 1.0`
///
/// ```
/// # #![feature(std_misc)]
/// use std::f32;
///
/// let x = 4.0f32;
///
/// // (1/2)*2^3 -> 1 * 8/2 -> 4.0
/// let f = x.frexp();
/// let abs_difference_0 = (f.0 - 0.5).abs();
/// let abs_difference_1 = (f.1 as f32 - 3.0).abs();
///
/// assert!(abs_difference_0 <= f32::EPSILON);
/// assert!(abs_difference_1 <= f32::EPSILON);
/// ```
#[unstable(feature = "std_misc",
reason = "pending integer conventions")]
#[inline]
pub fn frexp(self) -> (f32, isize) {
unsafe {
let mut exp = 0;
let x = cmath::frexpf(self, &mut exp);
(x, exp as isize)
}
}
/// Returns the next representable floating-point value in the direction of
/// `other`.
///
/// ```
/// # #![feature(std_misc)]
/// use std::f32;
///
/// let x = 1.0f32;
///
/// let abs_diff = (x.next_after(2.0) - 1.00000011920928955078125_f32).abs();
///
/// assert!(abs_diff <= f32::EPSILON);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
#[inline]
pub fn next_after(self, other: f32) -> f32 {
unsafe { cmath::nextafterf(self, other) }
}
/// Returns the maximum of the two numbers.
///
/// ```
/// let x = 1.0f32;
/// let y = 2.0f32;
///
/// assert_eq!(x.max(y), y);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn max(self, other: f32) -> f32 {
unsafe { cmath::fmaxf(self, other) }
}
/// Returns the minimum of the two numbers.
///
/// ```
/// let x = 1.0f32;
/// let y = 2.0f32;
///
/// assert_eq!(x.min(y), x);
/// ```<|fim▁hole|> unsafe { cmath::fminf(self, other) }
}
/// The positive difference of two numbers.
///
/// * If `self <= other`: `0:0`
/// * Else: `self - other`
///
/// ```
/// use std::f32;
///
/// let x = 3.0f32;
/// let y = -3.0f32;
///
/// let abs_difference_x = (x.abs_sub(1.0) - 2.0).abs();
/// let abs_difference_y = (y.abs_sub(1.0) - 0.0).abs();
///
/// assert!(abs_difference_x <= f32::EPSILON);
/// assert!(abs_difference_y <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn abs_sub(self, other: f32) -> f32 {
unsafe { cmath::fdimf(self, other) }
}
/// Takes the cubic root of a number.
///
/// ```
/// use std::f32;
///
/// let x = 8.0f32;
///
/// // x^(1/3) - 2 == 0
/// let abs_difference = (x.cbrt() - 2.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn cbrt(self) -> f32 {
unsafe { cmath::cbrtf(self) }
}
/// Calculates the length of the hypotenuse of a right-angle triangle given
/// legs of length `x` and `y`.
///
/// ```
/// use std::f32;
///
/// let x = 2.0f32;
/// let y = 3.0f32;
///
/// // sqrt(x^2 + y^2)
/// let abs_difference = (x.hypot(y) - (x.powi(2) + y.powi(2)).sqrt()).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn hypot(self, other: f32) -> f32 {
unsafe { cmath::hypotf(self, other) }
}
/// Computes the sine of a number (in radians).
///
/// ```
/// use std::f32;
///
/// let x = f32::consts::PI/2.0;
///
/// let abs_difference = (x.sin() - 1.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn sin(self) -> f32 {
unsafe { intrinsics::sinf32(self) }
}
/// Computes the cosine of a number (in radians).
///
/// ```
/// use std::f32;
///
/// let x = 2.0*f32::consts::PI;
///
/// let abs_difference = (x.cos() - 1.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn cos(self) -> f32 {
unsafe { intrinsics::cosf32(self) }
}
/// Computes the tangent of a number (in radians).
///
/// ```
/// use std::f64;
///
/// let x = f64::consts::PI/4.0;
/// let abs_difference = (x.tan() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn tan(self) -> f32 {
unsafe { cmath::tanf(self) }
}
/// Computes the arcsine of a number. Return value is in radians in
/// the range [-pi/2, pi/2] or NaN if the number is outside the range
/// [-1, 1].
///
/// ```
/// use std::f32;
///
/// let f = f32::consts::PI / 2.0;
///
/// // asin(sin(pi/2))
/// let abs_difference = f.sin().asin().abs_sub(f32::consts::PI / 2.0);
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn asin(self) -> f32 {
unsafe { cmath::asinf(self) }
}
/// Computes the arccosine of a number. Return value is in radians in
/// the range [0, pi] or NaN if the number is outside the range
/// [-1, 1].
///
/// ```
/// use std::f32;
///
/// let f = f32::consts::PI / 4.0;
///
/// // acos(cos(pi/4))
/// let abs_difference = f.cos().acos().abs_sub(f32::consts::PI / 4.0);
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn acos(self) -> f32 {
unsafe { cmath::acosf(self) }
}
/// Computes the arctangent of a number. Return value is in radians in the
/// range [-pi/2, pi/2];
///
/// ```
/// use std::f32;
///
/// let f = 1.0f32;
///
/// // atan(tan(1))
/// let abs_difference = f.tan().atan().abs_sub(1.0);
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn atan(self) -> f32 {
unsafe { cmath::atanf(self) }
}
/// Computes the four quadrant arctangent of `self` (`y`) and `other` (`x`).
///
/// * `x = 0`, `y = 0`: `0`
/// * `x >= 0`: `arctan(y/x)` -> `[-pi/2, pi/2]`
/// * `y >= 0`: `arctan(y/x) + pi` -> `(pi/2, pi]`
/// * `y < 0`: `arctan(y/x) - pi` -> `(-pi, -pi/2)`
///
/// ```
/// use std::f32;
///
/// let pi = f32::consts::PI;
/// // All angles from horizontal right (+x)
/// // 45 deg counter-clockwise
/// let x1 = 3.0f32;
/// let y1 = -3.0f32;
///
/// // 135 deg clockwise
/// let x2 = -3.0f32;
/// let y2 = 3.0f32;
///
/// let abs_difference_1 = (y1.atan2(x1) - (-pi/4.0)).abs();
/// let abs_difference_2 = (y2.atan2(x2) - 3.0*pi/4.0).abs();
///
/// assert!(abs_difference_1 <= f32::EPSILON);
/// assert!(abs_difference_2 <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn atan2(self, other: f32) -> f32 {
unsafe { cmath::atan2f(self, other) }
}
/// Simultaneously computes the sine and cosine of the number, `x`. Returns
/// `(sin(x), cos(x))`.
///
/// ```
/// use std::f32;
///
/// let x = f32::consts::PI/4.0;
/// let f = x.sin_cos();
///
/// let abs_difference_0 = (f.0 - x.sin()).abs();
/// let abs_difference_1 = (f.1 - x.cos()).abs();
///
/// assert!(abs_difference_0 <= f32::EPSILON);
/// assert!(abs_difference_0 <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn sin_cos(self) -> (f32, f32) {
(self.sin(), self.cos())
}
/// Returns `e^(self) - 1` in a way that is accurate even if the
/// number is close to zero.
///
/// ```
/// let x = 7.0f64;
///
/// // e^(ln(7)) - 1
/// let abs_difference = x.ln().exp_m1().abs_sub(6.0);
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn exp_m1(self) -> f32 {
unsafe { cmath::expm1f(self) }
}
/// Returns `ln(1+n)` (natural logarithm) more accurately than if
/// the operations were performed separately.
///
/// ```
/// use std::f32;
///
/// let x = f32::consts::E - 1.0;
///
/// // ln(1 + (e - 1)) == ln(e) == 1
/// let abs_difference = (x.ln_1p() - 1.0).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn ln_1p(self) -> f32 {
unsafe { cmath::log1pf(self) }
}
/// Hyperbolic sine function.
///
/// ```
/// use std::f32;
///
/// let e = f32::consts::E;
/// let x = 1.0f32;
///
/// let f = x.sinh();
/// // Solving sinh() at 1 gives `(e^2-1)/(2e)`
/// let g = (e*e - 1.0)/(2.0*e);
/// let abs_difference = (f - g).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn sinh(self) -> f32 {
unsafe { cmath::sinhf(self) }
}
/// Hyperbolic cosine function.
///
/// ```
/// use std::f32;
///
/// let e = f32::consts::E;
/// let x = 1.0f32;
/// let f = x.cosh();
/// // Solving cosh() at 1 gives this result
/// let g = (e*e + 1.0)/(2.0*e);
/// let abs_difference = f.abs_sub(g);
///
/// // Same result
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn cosh(self) -> f32 {
unsafe { cmath::coshf(self) }
}
/// Hyperbolic tangent function.
///
/// ```
/// use std::f32;
///
/// let e = f32::consts::E;
/// let x = 1.0f32;
///
/// let f = x.tanh();
/// // Solving tanh() at 1 gives `(1 - e^(-2))/(1 + e^(-2))`
/// let g = (1.0 - e.powi(-2))/(1.0 + e.powi(-2));
/// let abs_difference = (f - g).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn tanh(self) -> f32 {
unsafe { cmath::tanhf(self) }
}
/// Inverse hyperbolic sine function.
///
/// ```
/// use std::f32;
///
/// let x = 1.0f32;
/// let f = x.sinh().asinh();
///
/// let abs_difference = (f - x).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn asinh(self) -> f32 {
match self {
NEG_INFINITY => NEG_INFINITY,
x => (x + ((x * x) + 1.0).sqrt()).ln(),
}
}
/// Inverse hyperbolic cosine function.
///
/// ```
/// use std::f32;
///
/// let x = 1.0f32;
/// let f = x.cosh().acosh();
///
/// let abs_difference = (f - x).abs();
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn acosh(self) -> f32 {
match self {
x if x < 1.0 => ::f32::NAN,
x => (x + ((x * x) - 1.0).sqrt()).ln(),
}
}
/// Inverse hyperbolic tangent function.
///
/// ```
/// use std::f32;
///
/// let e = f32::consts::E;
/// let f = e.tanh().atanh();
///
/// let abs_difference = f.abs_sub(e);
///
/// assert!(abs_difference <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn atanh(self) -> f32 {
0.5 * ((2.0 * self) / (1.0 - self)).ln_1p()
}
}
#[cfg(test)]
mod tests {
use f32;
use f32::*;
use num::*;
use num::FpCategory as Fp;
#[test]
fn test_num_f32() {
test_num(10f32, 2f32);
}
#[test]
fn test_min_nan() {
assert_eq!(NAN.min(2.0), 2.0);
assert_eq!(2.0f32.min(NAN), 2.0);
}
#[test]
fn test_max_nan() {
assert_eq!(NAN.max(2.0), 2.0);
assert_eq!(2.0f32.max(NAN), 2.0);
}
#[test]
fn test_nan() {
let nan: f32 = f32::NAN;
assert!(nan.is_nan());
assert!(!nan.is_infinite());
assert!(!nan.is_finite());
assert!(!nan.is_normal());
assert!(!nan.is_sign_positive());
assert!(!nan.is_sign_negative());
assert_eq!(Fp::Nan, nan.classify());
}
#[test]
fn test_infinity() {
let inf: f32 = f32::INFINITY;
assert!(inf.is_infinite());
assert!(!inf.is_finite());
assert!(inf.is_sign_positive());
assert!(!inf.is_sign_negative());
assert!(!inf.is_nan());
assert!(!inf.is_normal());
assert_eq!(Fp::Infinite, inf.classify());
}
#[test]
fn test_neg_infinity() {
let neg_inf: f32 = f32::NEG_INFINITY;
assert!(neg_inf.is_infinite());
assert!(!neg_inf.is_finite());
assert!(!neg_inf.is_sign_positive());
assert!(neg_inf.is_sign_negative());
assert!(!neg_inf.is_nan());
assert!(!neg_inf.is_normal());
assert_eq!(Fp::Infinite, neg_inf.classify());
}
#[test]
fn test_zero() {
let zero: f32 = 0.0f32;
assert_eq!(0.0, zero);
assert!(!zero.is_infinite());
assert!(zero.is_finite());
assert!(zero.is_sign_positive());
assert!(!zero.is_sign_negative());
assert!(!zero.is_nan());
assert!(!zero.is_normal());
assert_eq!(Fp::Zero, zero.classify());
}
#[test]
fn test_neg_zero() {
let neg_zero: f32 = -0.0;
assert_eq!(0.0, neg_zero);
assert!(!neg_zero.is_infinite());
assert!(neg_zero.is_finite());
assert!(!neg_zero.is_sign_positive());
assert!(neg_zero.is_sign_negative());
assert!(!neg_zero.is_nan());
assert!(!neg_zero.is_normal());
assert_eq!(Fp::Zero, neg_zero.classify());
}
#[test]
fn test_one() {
let one: f32 = 1.0f32;
assert_eq!(1.0, one);
assert!(!one.is_infinite());
assert!(one.is_finite());
assert!(one.is_sign_positive());
assert!(!one.is_sign_negative());
assert!(!one.is_nan());
assert!(one.is_normal());
assert_eq!(Fp::Normal, one.classify());
}
#[test]
fn test_is_nan() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert!(nan.is_nan());
assert!(!0.0f32.is_nan());
assert!(!5.3f32.is_nan());
assert!(!(-10.732f32).is_nan());
assert!(!inf.is_nan());
assert!(!neg_inf.is_nan());
}
#[test]
fn test_is_infinite() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert!(!nan.is_infinite());
assert!(inf.is_infinite());
assert!(neg_inf.is_infinite());
assert!(!0.0f32.is_infinite());
assert!(!42.8f32.is_infinite());
assert!(!(-109.2f32).is_infinite());
}
#[test]
fn test_is_finite() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert!(!nan.is_finite());
assert!(!inf.is_finite());
assert!(!neg_inf.is_finite());
assert!(0.0f32.is_finite());
assert!(42.8f32.is_finite());
assert!((-109.2f32).is_finite());
}
#[test]
fn test_is_normal() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
let zero: f32 = 0.0f32;
let neg_zero: f32 = -0.0;
assert!(!nan.is_normal());
assert!(!inf.is_normal());
assert!(!neg_inf.is_normal());
assert!(!zero.is_normal());
assert!(!neg_zero.is_normal());
assert!(1f32.is_normal());
assert!(1e-37f32.is_normal());
assert!(!1e-38f32.is_normal());
}
#[test]
fn test_classify() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
let zero: f32 = 0.0f32;
let neg_zero: f32 = -0.0;
assert_eq!(nan.classify(), Fp::Nan);
assert_eq!(inf.classify(), Fp::Infinite);
assert_eq!(neg_inf.classify(), Fp::Infinite);
assert_eq!(zero.classify(), Fp::Zero);
assert_eq!(neg_zero.classify(), Fp::Zero);
assert_eq!(1f32.classify(), Fp::Normal);
assert_eq!(1e-37f32.classify(), Fp::Normal);
assert_eq!(1e-38f32.classify(), Fp::Subnormal);
}
#[test]
fn test_integer_decode() {
assert_eq!(3.14159265359f32.integer_decode(), (13176795, -22, 1));
assert_eq!((-8573.5918555f32).integer_decode(), (8779358, -10, -1));
assert_eq!(2f32.powf(100.0).integer_decode(), (8388608, 77, 1));
assert_eq!(0f32.integer_decode(), (0, -150, 1));
assert_eq!((-0f32).integer_decode(), (0, -150, -1));
assert_eq!(INFINITY.integer_decode(), (8388608, 105, 1));
assert_eq!(NEG_INFINITY.integer_decode(), (8388608, 105, -1));
assert_eq!(NAN.integer_decode(), (12582912, 105, 1));
}
#[test]
fn test_floor() {
assert_approx_eq!(1.0f32.floor(), 1.0f32);
assert_approx_eq!(1.3f32.floor(), 1.0f32);
assert_approx_eq!(1.5f32.floor(), 1.0f32);
assert_approx_eq!(1.7f32.floor(), 1.0f32);
assert_approx_eq!(0.0f32.floor(), 0.0f32);
assert_approx_eq!((-0.0f32).floor(), -0.0f32);
assert_approx_eq!((-1.0f32).floor(), -1.0f32);
assert_approx_eq!((-1.3f32).floor(), -2.0f32);
assert_approx_eq!((-1.5f32).floor(), -2.0f32);
assert_approx_eq!((-1.7f32).floor(), -2.0f32);
}
#[test]
fn test_ceil() {
assert_approx_eq!(1.0f32.ceil(), 1.0f32);
assert_approx_eq!(1.3f32.ceil(), 2.0f32);
assert_approx_eq!(1.5f32.ceil(), 2.0f32);
assert_approx_eq!(1.7f32.ceil(), 2.0f32);
assert_approx_eq!(0.0f32.ceil(), 0.0f32);
assert_approx_eq!((-0.0f32).ceil(), -0.0f32);
assert_approx_eq!((-1.0f32).ceil(), -1.0f32);
assert_approx_eq!((-1.3f32).ceil(), -1.0f32);
assert_approx_eq!((-1.5f32).ceil(), -1.0f32);
assert_approx_eq!((-1.7f32).ceil(), -1.0f32);
}
#[test]
fn test_round() {
assert_approx_eq!(1.0f32.round(), 1.0f32);
assert_approx_eq!(1.3f32.round(), 1.0f32);
assert_approx_eq!(1.5f32.round(), 2.0f32);
assert_approx_eq!(1.7f32.round(), 2.0f32);
assert_approx_eq!(0.0f32.round(), 0.0f32);
assert_approx_eq!((-0.0f32).round(), -0.0f32);
assert_approx_eq!((-1.0f32).round(), -1.0f32);
assert_approx_eq!((-1.3f32).round(), -1.0f32);
assert_approx_eq!((-1.5f32).round(), -2.0f32);
assert_approx_eq!((-1.7f32).round(), -2.0f32);
}
#[test]
fn test_trunc() {
assert_approx_eq!(1.0f32.trunc(), 1.0f32);
assert_approx_eq!(1.3f32.trunc(), 1.0f32);
assert_approx_eq!(1.5f32.trunc(), 1.0f32);
assert_approx_eq!(1.7f32.trunc(), 1.0f32);
assert_approx_eq!(0.0f32.trunc(), 0.0f32);
assert_approx_eq!((-0.0f32).trunc(), -0.0f32);
assert_approx_eq!((-1.0f32).trunc(), -1.0f32);
assert_approx_eq!((-1.3f32).trunc(), -1.0f32);
assert_approx_eq!((-1.5f32).trunc(), -1.0f32);
assert_approx_eq!((-1.7f32).trunc(), -1.0f32);
}
#[test]
fn test_fract() {
assert_approx_eq!(1.0f32.fract(), 0.0f32);
assert_approx_eq!(1.3f32.fract(), 0.3f32);
assert_approx_eq!(1.5f32.fract(), 0.5f32);
assert_approx_eq!(1.7f32.fract(), 0.7f32);
assert_approx_eq!(0.0f32.fract(), 0.0f32);
assert_approx_eq!((-0.0f32).fract(), -0.0f32);
assert_approx_eq!((-1.0f32).fract(), -0.0f32);
assert_approx_eq!((-1.3f32).fract(), -0.3f32);
assert_approx_eq!((-1.5f32).fract(), -0.5f32);
assert_approx_eq!((-1.7f32).fract(), -0.7f32);
}
#[test]
fn test_abs() {
assert_eq!(INFINITY.abs(), INFINITY);
assert_eq!(1f32.abs(), 1f32);
assert_eq!(0f32.abs(), 0f32);
assert_eq!((-0f32).abs(), 0f32);
assert_eq!((-1f32).abs(), 1f32);
assert_eq!(NEG_INFINITY.abs(), INFINITY);
assert_eq!((1f32/NEG_INFINITY).abs(), 0f32);
assert!(NAN.abs().is_nan());
}
#[test]
fn test_signum() {
assert_eq!(INFINITY.signum(), 1f32);
assert_eq!(1f32.signum(), 1f32);
assert_eq!(0f32.signum(), 1f32);
assert_eq!((-0f32).signum(), -1f32);
assert_eq!((-1f32).signum(), -1f32);
assert_eq!(NEG_INFINITY.signum(), -1f32);
assert_eq!((1f32/NEG_INFINITY).signum(), -1f32);
assert!(NAN.signum().is_nan());
}
#[test]
fn test_is_sign_positive() {
assert!(INFINITY.is_sign_positive());
assert!(1f32.is_sign_positive());
assert!(0f32.is_sign_positive());
assert!(!(-0f32).is_sign_positive());
assert!(!(-1f32).is_sign_positive());
assert!(!NEG_INFINITY.is_sign_positive());
assert!(!(1f32/NEG_INFINITY).is_sign_positive());
assert!(!NAN.is_sign_positive());
}
#[test]
fn test_is_sign_negative() {
assert!(!INFINITY.is_sign_negative());
assert!(!1f32.is_sign_negative());
assert!(!0f32.is_sign_negative());
assert!((-0f32).is_sign_negative());
assert!((-1f32).is_sign_negative());
assert!(NEG_INFINITY.is_sign_negative());
assert!((1f32/NEG_INFINITY).is_sign_negative());
assert!(!NAN.is_sign_negative());
}
#[test]
fn test_mul_add() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert_approx_eq!(12.3f32.mul_add(4.5, 6.7), 62.05);
assert_approx_eq!((-12.3f32).mul_add(-4.5, -6.7), 48.65);
assert_approx_eq!(0.0f32.mul_add(8.9, 1.2), 1.2);
assert_approx_eq!(3.4f32.mul_add(-0.0, 5.6), 5.6);
assert!(nan.mul_add(7.8, 9.0).is_nan());
assert_eq!(inf.mul_add(7.8, 9.0), inf);
assert_eq!(neg_inf.mul_add(7.8, 9.0), neg_inf);
assert_eq!(8.9f32.mul_add(inf, 3.2), inf);
assert_eq!((-3.2f32).mul_add(2.4, neg_inf), neg_inf);
}
#[test]
fn test_recip() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert_eq!(1.0f32.recip(), 1.0);
assert_eq!(2.0f32.recip(), 0.5);
assert_eq!((-0.4f32).recip(), -2.5);
assert_eq!(0.0f32.recip(), inf);
assert!(nan.recip().is_nan());
assert_eq!(inf.recip(), 0.0);
assert_eq!(neg_inf.recip(), 0.0);
}
#[test]
fn test_powi() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert_eq!(1.0f32.powi(1), 1.0);
assert_approx_eq!((-3.1f32).powi(2), 9.61);
assert_approx_eq!(5.9f32.powi(-2), 0.028727);
assert_eq!(8.3f32.powi(0), 1.0);
assert!(nan.powi(2).is_nan());
assert_eq!(inf.powi(3), inf);
assert_eq!(neg_inf.powi(2), inf);
}
#[test]
fn test_powf() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert_eq!(1.0f32.powf(1.0), 1.0);
assert_approx_eq!(3.4f32.powf(4.5), 246.408218);
assert_approx_eq!(2.7f32.powf(-3.2), 0.041652);
assert_approx_eq!((-3.1f32).powf(2.0), 9.61);
assert_approx_eq!(5.9f32.powf(-2.0), 0.028727);
assert_eq!(8.3f32.powf(0.0), 1.0);
assert!(nan.powf(2.0).is_nan());
assert_eq!(inf.powf(2.0), inf);
assert_eq!(neg_inf.powf(3.0), neg_inf);
}
#[test]
fn test_sqrt_domain() {
assert!(NAN.sqrt().is_nan());
assert!(NEG_INFINITY.sqrt().is_nan());
assert!((-1.0f32).sqrt().is_nan());
assert_eq!((-0.0f32).sqrt(), -0.0);
assert_eq!(0.0f32.sqrt(), 0.0);
assert_eq!(1.0f32.sqrt(), 1.0);
assert_eq!(INFINITY.sqrt(), INFINITY);
}
#[test]
fn test_exp() {
assert_eq!(1.0, 0.0f32.exp());
assert_approx_eq!(2.718282, 1.0f32.exp());
assert_approx_eq!(148.413162, 5.0f32.exp());
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
let nan: f32 = f32::NAN;
assert_eq!(inf, inf.exp());
assert_eq!(0.0, neg_inf.exp());
assert!(nan.exp().is_nan());
}
#[test]
fn test_exp2() {
assert_eq!(32.0, 5.0f32.exp2());
assert_eq!(1.0, 0.0f32.exp2());
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
let nan: f32 = f32::NAN;
assert_eq!(inf, inf.exp2());
assert_eq!(0.0, neg_inf.exp2());
assert!(nan.exp2().is_nan());
}
#[test]
fn test_ln() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert_approx_eq!(1.0f32.exp().ln(), 1.0);
assert!(nan.ln().is_nan());
assert_eq!(inf.ln(), inf);
assert!(neg_inf.ln().is_nan());
assert!((-2.3f32).ln().is_nan());
assert_eq!((-0.0f32).ln(), neg_inf);
assert_eq!(0.0f32.ln(), neg_inf);
assert_approx_eq!(4.0f32.ln(), 1.386294);
}
#[test]
fn test_log() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert_eq!(10.0f32.log(10.0), 1.0);
assert_approx_eq!(2.3f32.log(3.5), 0.664858);
assert_eq!(1.0f32.exp().log(1.0f32.exp()), 1.0);
assert!(1.0f32.log(1.0).is_nan());
assert!(1.0f32.log(-13.9).is_nan());
assert!(nan.log(2.3).is_nan());
assert_eq!(inf.log(10.0), inf);
assert!(neg_inf.log(8.8).is_nan());
assert!((-2.3f32).log(0.1).is_nan());
assert_eq!((-0.0f32).log(2.0), neg_inf);
assert_eq!(0.0f32.log(7.0), neg_inf);
}
#[test]
fn test_log2() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert_approx_eq!(10.0f32.log2(), 3.321928);
assert_approx_eq!(2.3f32.log2(), 1.201634);
assert_approx_eq!(1.0f32.exp().log2(), 1.442695);
assert!(nan.log2().is_nan());
assert_eq!(inf.log2(), inf);
assert!(neg_inf.log2().is_nan());
assert!((-2.3f32).log2().is_nan());
assert_eq!((-0.0f32).log2(), neg_inf);
assert_eq!(0.0f32.log2(), neg_inf);
}
#[test]
fn test_log10() {
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert_eq!(10.0f32.log10(), 1.0);
assert_approx_eq!(2.3f32.log10(), 0.361728);
assert_approx_eq!(1.0f32.exp().log10(), 0.434294);
assert_eq!(1.0f32.log10(), 0.0);
assert!(nan.log10().is_nan());
assert_eq!(inf.log10(), inf);
assert!(neg_inf.log10().is_nan());
assert!((-2.3f32).log10().is_nan());
assert_eq!((-0.0f32).log10(), neg_inf);
assert_eq!(0.0f32.log10(), neg_inf);
}
#[test]
fn test_to_degrees() {
let pi: f32 = consts::PI;
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert_eq!(0.0f32.to_degrees(), 0.0);
assert_approx_eq!((-5.8f32).to_degrees(), -332.315521);
assert_eq!(pi.to_degrees(), 180.0);
assert!(nan.to_degrees().is_nan());
assert_eq!(inf.to_degrees(), inf);
assert_eq!(neg_inf.to_degrees(), neg_inf);
}
#[test]
fn test_to_radians() {
let pi: f32 = consts::PI;
let nan: f32 = f32::NAN;
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
assert_eq!(0.0f32.to_radians(), 0.0);
assert_approx_eq!(154.6f32.to_radians(), 2.698279);
assert_approx_eq!((-332.31f32).to_radians(), -5.799903);
assert_eq!(180.0f32.to_radians(), pi);
assert!(nan.to_radians().is_nan());
assert_eq!(inf.to_radians(), inf);
assert_eq!(neg_inf.to_radians(), neg_inf);
}
#[test]
fn test_ldexp() {
// We have to use from_str until base-2 exponents
// are supported in floating-point literals
let f1: f32 = f32::from_str_radix("1p-123", 16).unwrap();
let f2: f32 = f32::from_str_radix("1p-111", 16).unwrap();
let f3: f32 = f32::from_str_radix("1.Cp-12", 16).unwrap();
assert_eq!(f32::ldexp(1f32, -123), f1);
assert_eq!(f32::ldexp(1f32, -111), f2);
assert_eq!(f32::ldexp(1.75f32, -12), f3);
assert_eq!(f32::ldexp(0f32, -123), 0f32);
assert_eq!(f32::ldexp(-0f32, -123), -0f32);
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
let nan: f32 = f32::NAN;
assert_eq!(f32::ldexp(inf, -123), inf);
assert_eq!(f32::ldexp(neg_inf, -123), neg_inf);
assert!(f32::ldexp(nan, -123).is_nan());
}
#[test]
fn test_frexp() {
// We have to use from_str until base-2 exponents
// are supported in floating-point literals
let f1: f32 = f32::from_str_radix("1p-123", 16).unwrap();
let f2: f32 = f32::from_str_radix("1p-111", 16).unwrap();
let f3: f32 = f32::from_str_radix("1.Cp-123", 16).unwrap();
let (x1, exp1) = f1.frexp();
let (x2, exp2) = f2.frexp();
let (x3, exp3) = f3.frexp();
assert_eq!((x1, exp1), (0.5f32, -122));
assert_eq!((x2, exp2), (0.5f32, -110));
assert_eq!((x3, exp3), (0.875f32, -122));
assert_eq!(f32::ldexp(x1, exp1), f1);
assert_eq!(f32::ldexp(x2, exp2), f2);
assert_eq!(f32::ldexp(x3, exp3), f3);
assert_eq!(0f32.frexp(), (0f32, 0));
assert_eq!((-0f32).frexp(), (-0f32, 0));
}
#[test] #[cfg_attr(windows, ignore)] // FIXME #8755
fn test_frexp_nowin() {
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
let nan: f32 = f32::NAN;
assert_eq!(match inf.frexp() { (x, _) => x }, inf);
assert_eq!(match neg_inf.frexp() { (x, _) => x }, neg_inf);
assert!(match nan.frexp() { (x, _) => x.is_nan() })
}
#[test]
fn test_abs_sub() {
assert_eq!((-1f32).abs_sub(1f32), 0f32);
assert_eq!(1f32.abs_sub(1f32), 0f32);
assert_eq!(1f32.abs_sub(0f32), 1f32);
assert_eq!(1f32.abs_sub(-1f32), 2f32);
assert_eq!(NEG_INFINITY.abs_sub(0f32), 0f32);
assert_eq!(INFINITY.abs_sub(1f32), INFINITY);
assert_eq!(0f32.abs_sub(NEG_INFINITY), INFINITY);
assert_eq!(0f32.abs_sub(INFINITY), 0f32);
}
#[test]
fn test_abs_sub_nowin() {
assert!(NAN.abs_sub(-1f32).is_nan());
assert!(1f32.abs_sub(NAN).is_nan());
}
#[test]
fn test_asinh() {
assert_eq!(0.0f32.asinh(), 0.0f32);
assert_eq!((-0.0f32).asinh(), -0.0f32);
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
let nan: f32 = f32::NAN;
assert_eq!(inf.asinh(), inf);
assert_eq!(neg_inf.asinh(), neg_inf);
assert!(nan.asinh().is_nan());
assert_approx_eq!(2.0f32.asinh(), 1.443635475178810342493276740273105f32);
assert_approx_eq!((-2.0f32).asinh(), -1.443635475178810342493276740273105f32);
}
#[test]
fn test_acosh() {
assert_eq!(1.0f32.acosh(), 0.0f32);
assert!(0.999f32.acosh().is_nan());
let inf: f32 = f32::INFINITY;
let neg_inf: f32 = f32::NEG_INFINITY;
let nan: f32 = f32::NAN;
assert_eq!(inf.acosh(), inf);
assert!(neg_inf.acosh().is_nan());
assert!(nan.acosh().is_nan());
assert_approx_eq!(2.0f32.acosh(), 1.31695789692481670862504634730796844f32);
assert_approx_eq!(3.0f32.acosh(), 1.76274717403908605046521864995958461f32);
}
#[test]
fn test_atanh() {
assert_eq!(0.0f32.atanh(), 0.0f32);
assert_eq!((-0.0f32).atanh(), -0.0f32);
let inf32: f32 = f32::INFINITY;
let neg_inf32: f32 = f32::NEG_INFINITY;
assert_eq!(1.0f32.atanh(), inf32);
assert_eq!((-1.0f32).atanh(), neg_inf32);
assert!(2f64.atanh().atanh().is_nan());
assert!((-2f64).atanh().atanh().is_nan());
let inf64: f32 = f32::INFINITY;
let neg_inf64: f32 = f32::NEG_INFINITY;
let nan32: f32 = f32::NAN;
assert!(inf64.atanh().is_nan());
assert!(neg_inf64.atanh().is_nan());
assert!(nan32.atanh().is_nan());
assert_approx_eq!(0.5f32.atanh(), 0.54930614433405484569762261846126285f32);
assert_approx_eq!((-0.5f32).atanh(), -0.54930614433405484569762261846126285f32);
}
#[test]
fn test_real_consts() {
use super::consts;
let pi: f32 = consts::PI;
let two_pi: f32 = consts::PI_2;
let frac_pi_2: f32 = consts::FRAC_PI_2;
let frac_pi_3: f32 = consts::FRAC_PI_3;
let frac_pi_4: f32 = consts::FRAC_PI_4;
let frac_pi_6: f32 = consts::FRAC_PI_6;
let frac_pi_8: f32 = consts::FRAC_PI_8;
let frac_1_pi: f32 = consts::FRAC_1_PI;
let frac_2_pi: f32 = consts::FRAC_2_PI;
let frac_2_sqrtpi: f32 = consts::FRAC_2_SQRT_PI;
let sqrt2: f32 = consts::SQRT_2;
let frac_1_sqrt2: f32 = consts::FRAC_1_SQRT_2;
let e: f32 = consts::E;
let log2_e: f32 = consts::LOG2_E;
let log10_e: f32 = consts::LOG10_E;
let ln_2: f32 = consts::LN_2;
let ln_10: f32 = consts::LN_10;
assert_approx_eq!(two_pi, 2f32 * pi);
assert_approx_eq!(frac_pi_2, pi / 2f32);
assert_approx_eq!(frac_pi_3, pi / 3f32);
assert_approx_eq!(frac_pi_4, pi / 4f32);
assert_approx_eq!(frac_pi_6, pi / 6f32);
assert_approx_eq!(frac_pi_8, pi / 8f32);
assert_approx_eq!(frac_1_pi, 1f32 / pi);
assert_approx_eq!(frac_2_pi, 2f32 / pi);
assert_approx_eq!(frac_2_sqrtpi, 2f32 / pi.sqrt());
assert_approx_eq!(sqrt2, 2f32.sqrt());
assert_approx_eq!(frac_1_sqrt2, 1f32 / 2f32.sqrt());
assert_approx_eq!(log2_e, e.log2());
assert_approx_eq!(log10_e, e.log10());
assert_approx_eq!(ln_2, 2f32.ln());
assert_approx_eq!(ln_10, 10f32.ln());
}
}<|fim▁end|>
|
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn min(self, other: f32) -> f32 {
|
<|file_name|>mainapp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from flask import Flask, jsonify, request, abort, make_response
from futu_server_api import *
from db import save_update_token
from db import delete_tokens
from db import list_cards
import logging
import logging.config
import json
app = Flask(__name__)
logging.config.fileConfig('./conf/log.ini')
no_db_logger = logging.getLogger()
def check_parameters(pjson):
if not pjson or not 'app_account' in pjson or not 'card' in pjson or not 'appid' in pjson:
no_db_logger.info('No Parameter')
abort(400)
cli = {'account':pjson['app_account'], 'card':pjson['card'], 'appid':pjson['appid']}
return client(cli['account'], cli['card'], cli['appid'])
def log_handler(myjson, mytitle):
if 'ClientWarning' in myjson:
return '%s' % myjson['ClientWarning']
elif myjson['result_code'] == 0:
return 'SUCCESS'
else:
return 'FAIL ,REASON OF FAILURE:%s ,PARAMETER:%s' % (myjson['error_msg'], request.json)
@app.route('/')
def hello_world():
no_db_logger.info('server start#####')
return 'hello 22222222 world!'
@app.route('/api/v1/tradetoken', methods=['POST'])
def trade_token():
trade_pswd = request.json['trade_pswd']
account = request.json['app_account']
card = request.json['card']
appid = request.json['appid']
cc = check_parameters(request.json)
message = cc.get_trade_token(trade_pswd)
if message['result_code'] != 0 and message['error_msg'] == 'didn\'t get accesstoken':
no_db_logger.info('didn\'t get accesstoken')
return json.dumps({'result_code':2,'error_msg':'didn\'t get accesstoken'}, ensure_ascii=False)
if message['result_code'] == 0:
token = message['data']['trade_token']
save_update_token(account, appid, None, token, card, True)
return jsonify(**message)
@app.route('/api/v1/account', methods=['POST'])
def get_account_detail():
cc = check_parameters(request.json)
message = cc.get_account_detail()
logtext = log_handler(message, '获取账户信息')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/account/cash', methods=['POST'])
def get_account_cash():<|fim▁hole|> message = cc.get_account_cash()
logtext = log_handler(message, '获取账户现金')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/account/portfolio', methods=['POST'])
def get_account_portfolio():
cc = check_parameters(request.json)
message = cc.get_account_portfolio()
logtext = log_handler(message, '获取账户持仓')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/get_list_orders', methods=['POST'])
def get_list_orders():
date_begin = request.json['date_begin']
date_end = request.json['date_end']
cc = check_parameters(request.json)
message = cc.get_list_orders()
logtext = log_handler(message, '获取订单列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/get_list_trades', methods=['POST'])
def get_list_trades():
cc = check_parameters(request.json)
message = cc.get_list_trades()
logtext = log_handler(message, '获取交易列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/place_order', methods=['POST'])
def place_order():
code = request.json['code']
quantity = request.json['quantity']
price = request.json['price']
side = request.json['side']
ltype = request.json['type']
cc = check_parameters(request.json)
message = cc.place_order(code, quantity, price, side, ltype)
logtext = log_handler(message, '下单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/change_order', methods=['POST'])
def change_order():
order_id = request.json['order_id']
quantity = request.json['quantity']
price = request.json['price']
cc = check_parameters(request.json)
message = cc.change_order(order_id, quantity, price)
logtext = log_handler(message, '改单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/cancle_order', methods=['POST'])
def cancle_order():
order_id = request.json['order_id']
cc = check_parameters(request.json)
message = cc.cancel_order(order_id)
logtext = log_handler(message, '撤单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/ap1/v1/save_token', methods=['POST'])
def save_token():
account = request.json['app_account']
appid = request.json['appid']
market = request.json['market']
token = request.json['token']
card = request.json['card']
card_desc = request.json['text']
DB_result = save_update_token(account, appid, market, token, card, False, card_desc)
if DB_result == 'success':
no_db_logger.info('token save success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token save fail')
return json.dumps({'result_code':1,'error_msg':'token保存失败'}, ensure_ascii=False)
@app.route('/api/v1/delete_token', methods=['POST'])
def delete_token():
appid = request.json['appid']
account = request.json['app_account']
DB_result = delete_tokens(account, appid)
if DB_result == 'success':
no_db_logger.info('token delete success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token delete fail')
return json.dumps({'result_code':1,'error_msg':'token删除失败'}, ensure_ascii=False)
@app.route('/api/v1/list_card', methods=['POST'])
def list_card():
appid = request.json['appid']
account = request.json['app_account']
cards = list_cards(account, appid)
message = dict(cards=cards)
if isinstance(cards, list):
no_db_logger.info('list cards success')
return json.dumps({'result_code':0,'error_msg':'','data':message}, ensure_ascii=False)
else:
no_db_logger.info('list cards fail')
return json.dumps({'result_code':1,'error_msg':'查询账户卡号失败'}, ensure_ascii=False)
if __name__ == '__main__':
app.run()<|fim▁end|>
|
cc = check_parameters(request.json)
|
<|file_name|>teldrassil.cpp<|end_file_name|><|fim▁begin|>/**
* ScriptDev2 is an extension for mangos providing enhanced features for
* area triggers, creatures, game objects, instances, items, and spells beyond
* the default database scripting in mangos.
*
* Copyright (C) 2006-2013 ScriptDev2 <http://www.scriptdev2.com/>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* World of Warcraft, and all World of Warcraft or Warcraft art, images,
* and lore are copyrighted by Blizzard Entertainment, Inc.
*/
/**
* ScriptData
* SDName: Teldrassil
* SD%Complete: 100
* SDComment: Quest support: 938
* SDCategory: Teldrassil
* EndScriptData
*/
/**
* ContentData
* npc_mist
* EndContentData
*/
#include "precompiled.h"<|fim▁hole|>####*/
enum
{
SAY_AT_HOME = -1000323,
EMOTE_AT_HOME = -1000324,
QUEST_MIST = 938,
NPC_ARYNIA = 3519,
FACTION_DARNASSUS = 79
};
struct npc_mistAI : public FollowerAI
{
npc_mistAI(Creature* pCreature) : FollowerAI(pCreature) { Reset(); }
void Reset() override { }
void MoveInLineOfSight(Unit* pWho) override
{
FollowerAI::MoveInLineOfSight(pWho);
if (!m_creature->getVictim() && !HasFollowState(STATE_FOLLOW_COMPLETE) && pWho->GetEntry() == NPC_ARYNIA)
{
if (m_creature->IsWithinDistInMap(pWho, 10.0f))
{
DoScriptText(SAY_AT_HOME, pWho);
DoComplete();
}
}
}
void DoComplete()
{
DoScriptText(EMOTE_AT_HOME, m_creature);
if (Player* pPlayer = GetLeaderForFollower())
{
if (pPlayer->GetQuestStatus(QUEST_MIST) == QUEST_STATUS_INCOMPLETE)
{
pPlayer->GroupEventHappens(QUEST_MIST, m_creature);
}
}
// The follow is over (and for later development, run off to the woods before really end)
SetFollowComplete();
}
// call not needed here, no known abilities
/*void UpdateFollowerAI(const uint32 uiDiff) override
{
if (!m_creature->SelectHostileTarget() || !m_creature->getVictim())
return;
DoMeleeAttackIfReady();
}*/
};
CreatureAI* GetAI_npc_mist(Creature* pCreature)
{
return new npc_mistAI(pCreature);
}
bool QuestAccept_npc_mist(Player* pPlayer, Creature* pCreature, const Quest* pQuest)
{
if (pQuest->GetQuestId() == QUEST_MIST)
{
if (npc_mistAI* pMistAI = dynamic_cast<npc_mistAI*>(pCreature->AI()))
{
pMistAI->StartFollow(pPlayer, FACTION_DARNASSUS, pQuest);
}
}
return true;
}
void AddSC_teldrassil()
{
Script* pNewScript;
pNewScript = new Script;
pNewScript->Name = "npc_mist";
pNewScript->GetAI = &GetAI_npc_mist;
pNewScript->pQuestAcceptNPC = &QuestAccept_npc_mist;
pNewScript->RegisterSelf();
}<|fim▁end|>
|
#include "follower_ai.h"
/*####
# npc_mist
|
<|file_name|>SpliteratorTraversingAndSplittingTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2013, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* @test
* @summary Spliterator traversing and splitting tests
* @library ../stream/bootlib
* @build java.base/java.util.SpliteratorOfIntDataBuilder
* java.base/java.util.SpliteratorTestHelper
* @run testng SpliteratorTraversingAndSplittingTest
* @bug 8020016 8071477 8072784 8169838
*/
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.nio.CharBuffer;
import java.util.AbstractCollection;
import java.util.AbstractList;
import java.util.AbstractSet;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.RandomAccess;
import java.util.Set;
import java.util.SortedSet;
import java.util.Spliterator;
import java.util.SpliteratorOfIntDataBuilder;
import java.util.SpliteratorTestHelper;
import java.util.Spliterators;
import java.util.Stack;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Vector;
import java.util.WeakHashMap;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.LinkedTransferQueue;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.function.Consumer;
import java.util.function.DoubleConsumer;
import java.util.function.Function;
import java.util.function.IntConsumer;
import java.util.function.LongConsumer;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
public class SpliteratorTraversingAndSplittingTest extends SpliteratorTestHelper {
private static final List<Integer> SIZES = Arrays.asList(0, 1, 10, 42);
private static final String LOW = new String(new char[] {Character.MIN_LOW_SURROGATE});
private static final String HIGH = new String(new char[] {Character.MIN_HIGH_SURROGATE});
private static final String HIGH_LOW = HIGH + LOW;
private static final String CHAR_HIGH_LOW = "A" + HIGH_LOW;
private static final String HIGH_LOW_CHAR = HIGH_LOW + "A";
private static final String CHAR_HIGH_LOW_CHAR = "A" + HIGH_LOW + "A";
private static final List<String> STRINGS = generateTestStrings();
private static List<String> generateTestStrings() {
List<String> strings = new ArrayList<>();
for (int n : Arrays.asList(1, 2, 3, 16, 17)) {
strings.add(generate("A", n));
strings.add(generate(LOW, n));
strings.add(generate(HIGH, n));
strings.add(generate(HIGH_LOW, n));
strings.add(generate(CHAR_HIGH_LOW, n));
strings.add(generate(HIGH_LOW_CHAR, n));
strings.add(generate(CHAR_HIGH_LOW_CHAR, n));
}
return strings;
}
private static String generate(String s, int n) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < n; i++) {
sb.append(s);
}
return sb.toString();
}
private static class SpliteratorDataBuilder<T> {
List<Object[]> data;
List<T> exp;
Map<T, T> mExp;
SpliteratorDataBuilder(List<Object[]> data, List<T> exp) {
this.data = data;
this.exp = exp;
this.mExp = createMap(exp);
}
Map<T, T> createMap(List<T> l) {
Map<T, T> m = new LinkedHashMap<>();
for (T t : l) {
m.put(t, t);
}
return m;
}
void add(String description, Collection<?> expected, Supplier<Spliterator<?>> s) {
description = joiner(description).toString();
data.add(new Object[]{description, expected, s});
}
void add(String description, Supplier<Spliterator<?>> s) {
add(description, exp, s);
}
void addCollection(Function<Collection<T>, ? extends Collection<T>> c) {
add("new " + c.apply(Collections.<T>emptyList()).getClass().getName() + ".spliterator()",
() -> c.apply(exp).spliterator());
}
void addList(Function<Collection<T>, ? extends List<T>> l) {
addCollection(l);
addCollection(l.andThen(list -> list.subList(0, list.size())));
}
void addMap(Function<Map<T, T>, ? extends Map<T, T>> m) {
String description = "new " + m.apply(Collections.<T, T>emptyMap()).getClass().getName();
addMap(m, description);
}
void addMap(Function<Map<T, T>, ? extends Map<T, T>> m, String description) {
add(description + ".keySet().spliterator()", () -> m.apply(mExp).keySet().spliterator());
add(description + ".values().spliterator()", () -> m.apply(mExp).values().spliterator());
add(description + ".entrySet().spliterator()", mExp.entrySet(), () -> m.apply(mExp).entrySet().spliterator());
}
StringBuilder joiner(String description) {
return new StringBuilder(description).
append(" {").
append("size=").append(exp.size()).
append("}");
}
}
static Object[][] spliteratorDataProvider;
@DataProvider(name = "Spliterator<Integer>")
public static Object[][] spliteratorDataProvider() {
if (spliteratorDataProvider != null) {
return spliteratorDataProvider;
}
List<Object[]> data = new ArrayList<>();
for (int size : SIZES) {
List<Integer> exp = listIntRange(size);
SpliteratorDataBuilder<Integer> db = new SpliteratorDataBuilder<>(data, exp);
// Direct spliterator methods
db.add("Spliterators.spliterator(Collection, ...)",
() -> Spliterators.spliterator(exp, 0));
db.add("Spliterators.spliterator(Iterator, ...)",
() -> Spliterators.spliterator(exp.iterator(), exp.size(), 0));
db.add("Spliterators.spliteratorUnknownSize(Iterator, ...)",
() -> Spliterators.spliteratorUnknownSize(exp.iterator(), 0));
db.add("Spliterators.spliterator(Spliterators.iteratorFromSpliterator(Spliterator ), ...)",
() -> Spliterators.spliterator(Spliterators.iterator(exp.spliterator()), exp.size(), 0));
db.add("Spliterators.spliterator(T[], ...)",
() -> Spliterators.spliterator(exp.toArray(new Integer[0]), 0));
db.add("Arrays.spliterator(T[], ...)",
() -> Arrays.spliterator(exp.toArray(new Integer[0])));
class SpliteratorFromIterator extends Spliterators.AbstractSpliterator<Integer> {
Iterator<Integer> it;
SpliteratorFromIterator(Iterator<Integer> it, long est) {
super(est, Spliterator.SIZED);
this.it = it;
}
@Override
public boolean tryAdvance(Consumer<? super Integer> action) {
if (action == null)
throw new NullPointerException();
if (it.hasNext()) {
action.accept(it.next());
return true;
}
else {
return false;
}
}
}
db.add("new Spliterators.AbstractSpliterator()",
() -> new SpliteratorFromIterator(exp.iterator(), exp.size()));
// Collections
// default method implementations
class AbstractCollectionImpl extends AbstractCollection<Integer> {
Collection<Integer> c;
AbstractCollectionImpl(Collection<Integer> c) {
this.c = c;
}
@Override
public Iterator<Integer> iterator() {
return c.iterator();
}
@Override
public int size() {
return c.size();
}
}
db.addCollection(
c -> new AbstractCollectionImpl(c));
class AbstractListImpl extends AbstractList<Integer> {
List<Integer> l;
AbstractListImpl(Collection<Integer> c) {
this.l = new ArrayList<>(c);
}
@Override
public Integer get(int index) {
return l.get(index);
}
@Override
public int size() {
return l.size();
}
}
db.addCollection(
c -> new AbstractListImpl(c));
class AbstractSetImpl extends AbstractSet<Integer> {
Set<Integer> s;
AbstractSetImpl(Collection<Integer> c) {
this.s = new HashSet<>(c);
}
@Override
public Iterator<Integer> iterator() {
return s.iterator();
}
@Override
public int size() {
return s.size();
}
}
db.addCollection(
c -> new AbstractSetImpl(c));
class AbstractSortedSetImpl extends AbstractSet<Integer> implements SortedSet<Integer> {
SortedSet<Integer> s;
AbstractSortedSetImpl(Collection<Integer> c) {
this.s = new TreeSet<>(c);
}
@Override
public Iterator<Integer> iterator() {
return s.iterator();
}
@Override
public int size() {
return s.size();
}
@Override
public Comparator<? super Integer> comparator() {
return s.comparator();
}
@Override
public SortedSet<Integer> subSet(Integer fromElement, Integer toElement) {
return s.subSet(fromElement, toElement);
}
@Override
public SortedSet<Integer> headSet(Integer toElement) {
return s.headSet(toElement);
}
@Override
public SortedSet<Integer> tailSet(Integer fromElement) {
return s.tailSet(fromElement);
}
@Override
public Integer first() {
return s.first();
}
@Override
public Integer last() {
return s.last();
}
@Override
public Spliterator<Integer> spliterator() {
return SortedSet.super.spliterator();
}
}
db.addCollection(
c -> new AbstractSortedSetImpl(c));
class IterableWrapper implements Iterable<Integer> {
final Iterable<Integer> it;
IterableWrapper(Iterable<Integer> it) {
this.it = it;
}
@Override
public Iterator<Integer> iterator() {
return it.iterator();
}
}
db.add("new Iterable.spliterator()",
() -> new IterableWrapper(exp).spliterator());
//
db.add("Arrays.asList().spliterator()",
() -> Spliterators.spliterator(Arrays.asList(exp.toArray(new Integer[0])), 0));
db.addList(ArrayList::new);
db.addList(LinkedList::new);
db.addList(Vector::new);
class AbstractRandomAccessListImpl extends AbstractList<Integer> implements RandomAccess {
Integer[] ia;
AbstractRandomAccessListImpl(Collection<Integer> c) {
this.ia = c.toArray(new Integer[c.size()]);
}
@Override
public Integer get(int index) {
return ia[index];
}
@Override
public int size() {
return ia.length;
}
}
db.addList(AbstractRandomAccessListImpl::new);
class RandomAccessListImpl implements List<Integer>, RandomAccess {
Integer[] ia;
List<Integer> l;
RandomAccessListImpl(Collection<Integer> c) {
this.ia = c.toArray(new Integer[c.size()]);
this.l = Arrays.asList(ia);
}
@Override
public Integer get(int index) {
return ia[index];
}
@Override
public Integer set(int index, Integer element) {
throw new UnsupportedOperationException();
}
@Override
public void add(int index, Integer element) {
throw new UnsupportedOperationException();
}
@Override
public Integer remove(int index) {
throw new UnsupportedOperationException();
}
@Override
public int indexOf(Object o) {
return l.indexOf(o);
}
@Override
public int lastIndexOf(Object o) {
return Arrays.asList(ia).lastIndexOf(o);
}
@Override
public ListIterator<Integer> listIterator() {
return l.listIterator();
}
@Override
public ListIterator<Integer> listIterator(int index) {
return l.listIterator(index);
}
@Override
public List<Integer> subList(int fromIndex, int toIndex) {
return l.subList(fromIndex, toIndex);
}
@Override
public int size() {
return ia.length;
}
@Override
public boolean isEmpty() {
return size() != 0;
}
@Override
public boolean contains(Object o) {
return l.contains(o);
}
@Override
public Iterator<Integer> iterator() {
return l.iterator();
}
@Override
public Object[] toArray() {
return l.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
return l.toArray(a);
}
@Override
public boolean add(Integer integer) {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean containsAll(Collection<?> c) {
return l.containsAll(c);
}
@Override
public boolean addAll(Collection<? extends Integer> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(int index, Collection<? extends Integer> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
}
db.addList(RandomAccessListImpl::new);
db.addCollection(HashSet::new);
db.addCollection(LinkedHashSet::new);
db.addCollection(TreeSet::new);
db.addCollection(c -> { Stack<Integer> s = new Stack<>(); s.addAll(c); return s;});
db.addCollection(PriorityQueue::new);
db.addCollection(ArrayDeque::new);
db.addCollection(ConcurrentSkipListSet::new);
if (size > 0) {
db.addCollection(c -> {
ArrayBlockingQueue<Integer> abq = new ArrayBlockingQueue<>(size);
abq.addAll(c);
return abq;
});
}
db.addCollection(PriorityBlockingQueue::new);
db.addCollection(LinkedBlockingQueue::new);
db.addCollection(LinkedTransferQueue::new);
db.addCollection(ConcurrentLinkedQueue::new);
db.addCollection(LinkedBlockingDeque::new);
db.addCollection(CopyOnWriteArrayList::new);
db.addCollection(CopyOnWriteArraySet::new);
if (size == 0) {
db.addCollection(c -> Collections.<Integer>emptySet());
db.addList(c -> Collections.<Integer>emptyList());
}
else if (size == 1) {
db.addCollection(c -> Collections.singleton(exp.get(0)));
db.addCollection(c -> Collections.singletonList(exp.get(0)));
}
{
Integer[] ai = new Integer[size];
Arrays.fill(ai, 1);
db.add(String.format("Collections.nCopies(%d, 1)", exp.size()),
Arrays.asList(ai),
() -> Collections.nCopies(exp.size(), 1).spliterator());
}
// Collections.synchronized/unmodifiable/checked wrappers
db.addCollection(Collections::unmodifiableCollection);
db.addCollection(c -> Collections.unmodifiableSet(new HashSet<>(c)));
db.addCollection(c -> Collections.unmodifiableSortedSet(new TreeSet<>(c)));
db.addList(c -> Collections.unmodifiableList(new ArrayList<>(c)));
db.addMap(Collections::unmodifiableMap);
db.addMap(m -> Collections.unmodifiableSortedMap(new TreeMap<>(m)));
db.addCollection(Collections::synchronizedCollection);
db.addCollection(c -> Collections.synchronizedSet(new HashSet<>(c)));
db.addCollection(c -> Collections.synchronizedSortedSet(new TreeSet<>(c)));
db.addList(c -> Collections.synchronizedList(new ArrayList<>(c)));
db.addMap(Collections::synchronizedMap);
db.addMap(m -> Collections.synchronizedSortedMap(new TreeMap<>(m)));
db.addCollection(c -> Collections.checkedCollection(c, Integer.class));
db.addCollection(c -> Collections.checkedQueue(new ArrayDeque<>(c), Integer.class));
db.addCollection(c -> Collections.checkedSet(new HashSet<>(c), Integer.class));
db.addCollection(c -> Collections.checkedSortedSet(new TreeSet<>(c), Integer.class));
db.addList(c -> Collections.checkedList(new ArrayList<>(c), Integer.class));
db.addMap(c -> Collections.checkedMap(c, Integer.class, Integer.class));
db.addMap(m -> Collections.checkedSortedMap(new TreeMap<>(m), Integer.class, Integer.class));
// Maps
db.addMap(HashMap::new);
db.addMap(m -> {
// Create a Map ensuring that for large sizes
// buckets will contain 2 or more entries
HashMap<Integer, Integer> cm = new HashMap<>(1, m.size() + 1);
// Don't use putAll which inflates the table by
// m.size() * loadFactor, thus creating a very sparse
// map for 1000 entries defeating the purpose of this test,
// in addition it will cause the split until null test to fail
// because the number of valid splits is larger than the
// threshold
for (Map.Entry<Integer, Integer> e : m.entrySet())
cm.put(e.getKey(), e.getValue());
return cm;
}, "new java.util.HashMap(1, size + 1)");
db.addMap(LinkedHashMap::new);
db.addMap(IdentityHashMap::new);
db.addMap(WeakHashMap::new);
db.addMap(m -> {
// Create a Map ensuring that for large sizes
// buckets will be consist of 2 or more entries
WeakHashMap<Integer, Integer> cm = new WeakHashMap<>(1, m.size() + 1);
for (Map.Entry<Integer, Integer> e : m.entrySet())
cm.put(e.getKey(), e.getValue());
return cm;
}, "new java.util.WeakHashMap(1, size + 1)");
// @@@ Descending maps etc
db.addMap(TreeMap::new);
db.addMap(ConcurrentHashMap::new);
db.addMap(ConcurrentSkipListMap::new);
if (size == 0) {
db.addMap(m -> Collections.<Integer, Integer>emptyMap());
}
else if (size == 1) {
db.addMap(m -> Collections.singletonMap(exp.get(0), exp.get(0)));
}
}
return spliteratorDataProvider = data.toArray(new Object[0][]);
}
private static List<Integer> listIntRange(int upTo) {
List<Integer> exp = new ArrayList<>();
for (int i = 0; i < upTo; i++)
exp.add(i);
return Collections.unmodifiableList(exp);
}
@Test(dataProvider = "Spliterator<Integer>")
public void testNullPointerException(String description, Collection<Integer> exp, Supplier<Spliterator<Integer>> s) {
executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining(null));
executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance(null));
}
@Test(dataProvider = "Spliterator<Integer>")
public void testForEach(String description, Collection<Integer> exp, Supplier<Spliterator<Integer>> s) {
testForEach(exp, s, UnaryOperator.identity());
}
<|fim▁hole|>
@Test(dataProvider = "Spliterator<Integer>")
public void testMixedTryAdvanceForEach(String description, Collection<Integer> exp, Supplier<Spliterator<Integer>> s) {
testMixedTryAdvanceForEach(exp, s, UnaryOperator.identity());
}
@Test(dataProvider = "Spliterator<Integer>")
public void testMixedTraverseAndSplit(String description, Collection<Integer> exp, Supplier<Spliterator<Integer>> s) {
testMixedTraverseAndSplit(exp, s, UnaryOperator.identity());
}
@Test(dataProvider = "Spliterator<Integer>")
public void testSplitAfterFullTraversal(String description, Collection<Integer> exp, Supplier<Spliterator<Integer>> s) {
testSplitAfterFullTraversal(s, UnaryOperator.identity());
}
@Test(dataProvider = "Spliterator<Integer>")
public void testSplitOnce(String description, Collection<Integer> exp, Supplier<Spliterator<Integer>> s) {
testSplitOnce(exp, s, UnaryOperator.identity());
}
@Test(dataProvider = "Spliterator<Integer>")
public void testSplitSixDeep(String description, Collection<Integer> exp, Supplier<Spliterator<Integer>> s) {
testSplitSixDeep(exp, s, UnaryOperator.identity());
}
@Test(dataProvider = "Spliterator<Integer>")
public void testSplitUntilNull(String description, Collection<Integer> exp, Supplier<Spliterator<Integer>> s) {
testSplitUntilNull(exp, s, UnaryOperator.identity());
}
//
private static class SpliteratorOfIntCharDataBuilder {
List<Object[]> data;
String s;
List<Integer> expChars;
List<Integer> expCodePoints;
SpliteratorOfIntCharDataBuilder(List<Object[]> data, String s) {
this.data = data;
this.s = s;
this.expChars = transform(s, false);
this.expCodePoints = transform(s, true);
}
static List<Integer> transform(String s, boolean toCodePoints) {
List<Integer> l = new ArrayList<>();
if (!toCodePoints) {
for (int i = 0; i < s.length(); i++) {
l.add((int) s.charAt(i));
}
}
else {
for (int i = 0; i < s.length();) {
char c1 = s.charAt(i++);
int cp = c1;
if (Character.isHighSurrogate(c1) && i < s.length()) {
char c2 = s.charAt(i);
if (Character.isLowSurrogate(c2)) {
i++;
cp = Character.toCodePoint(c1, c2);
}
}
l.add(cp);
}
}
return l;
}
void add(String description, Function<String, CharSequence> f) {
description = description.replace("%s", s);
{
Supplier<Spliterator.OfInt> supplier = () -> f.apply(s).chars().spliterator();
data.add(new Object[]{description + ".chars().spliterator()", expChars, supplier});
}
{
Supplier<Spliterator.OfInt> supplier = () -> f.apply(s).codePoints().spliterator();
data.add(new Object[]{description + ".codePoints().spliterator()", expCodePoints, supplier});
}
}
}
static Object[][] spliteratorOfIntDataProvider;
@DataProvider(name = "Spliterator.OfInt")
public static Object[][] spliteratorOfIntDataProvider() {
if (spliteratorOfIntDataProvider != null) {
return spliteratorOfIntDataProvider;
}
List<Object[]> data = new ArrayList<>();
for (int size : SIZES) {
int exp[] = arrayIntRange(size);
SpliteratorOfIntDataBuilder db = new SpliteratorOfIntDataBuilder(data, listIntRange(size));
db.add("Spliterators.spliterator(int[], ...)",
() -> Spliterators.spliterator(exp, 0));
db.add("Arrays.spliterator(int[], ...)",
() -> Arrays.spliterator(exp));
db.add("Spliterators.spliterator(PrimitiveIterator.OfInt, ...)",
() -> Spliterators.spliterator(Spliterators.iterator(Arrays.spliterator(exp)), exp.length, 0));
db.add("Spliterators.spliteratorUnknownSize(PrimitiveIterator.OfInt, ...)",
() -> Spliterators.spliteratorUnknownSize(Spliterators.iterator(Arrays.spliterator(exp)), 0));
class IntSpliteratorFromArray extends Spliterators.AbstractIntSpliterator {
int[] a;
int index = 0;
IntSpliteratorFromArray(int[] a) {
super(a.length, Spliterator.SIZED);
this.a = a;
}
@Override
public boolean tryAdvance(IntConsumer action) {
if (action == null)
throw new NullPointerException();
if (index < a.length) {
action.accept(a[index++]);
return true;
}
else {
return false;
}
}
}
db.add("new Spliterators.AbstractIntAdvancingSpliterator()",
() -> new IntSpliteratorFromArray(exp));
}
// Class for testing default methods
class CharSequenceImpl implements CharSequence {
final String s;
public CharSequenceImpl(String s) {
this.s = s;
}
@Override
public int length() {
return s.length();
}
@Override
public char charAt(int index) {
return s.charAt(index);
}
@Override
public CharSequence subSequence(int start, int end) {
return s.subSequence(start, end);
}
@Override
public String toString() {
return s;
}
}
for (String string : STRINGS) {
SpliteratorOfIntCharDataBuilder cdb = new SpliteratorOfIntCharDataBuilder(data, string);
cdb.add("\"%s\"", s -> s);
cdb.add("new CharSequenceImpl(\"%s\")", CharSequenceImpl::new);
cdb.add("new StringBuilder(\"%s\")", StringBuilder::new);
cdb.add("new StringBuffer(\"%s\")", StringBuffer::new);
cdb.add("CharBuffer.wrap(\"%s\".toCharArray())", s -> CharBuffer.wrap(s.toCharArray()));
}
return spliteratorOfIntDataProvider = data.toArray(new Object[0][]);
}
private static int[] arrayIntRange(int upTo) {
int[] exp = new int[upTo];
for (int i = 0; i < upTo; i++)
exp[i] = i;
return exp;
}
@Test(dataProvider = "Spliterator.OfInt")
public void testIntNullPointerException(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining((IntConsumer) null));
executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance((IntConsumer) null));
}
@Test(dataProvider = "Spliterator.OfInt")
public void testIntForEach(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
testForEach(exp, s, intBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfInt")
public void testIntTryAdvance(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
testTryAdvance(exp, s, intBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfInt")
public void testIntMixedTryAdvanceForEach(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
testMixedTryAdvanceForEach(exp, s, intBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfInt")
public void testIntMixedTraverseAndSplit(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
testMixedTraverseAndSplit(exp, s, intBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfInt")
public void testIntSplitAfterFullTraversal(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
testSplitAfterFullTraversal(s, intBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfInt")
public void testIntSplitOnce(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
testSplitOnce(exp, s, intBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfInt")
public void testIntSplitSixDeep(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
testSplitSixDeep(exp, s, intBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfInt")
public void testIntSplitUntilNull(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
testSplitUntilNull(exp, s, intBoxingConsumer());
}
//
private static class SpliteratorOfLongDataBuilder {
List<Object[]> data;
List<Long> exp;
SpliteratorOfLongDataBuilder(List<Object[]> data, List<Long> exp) {
this.data = data;
this.exp = exp;
}
void add(String description, List<Long> expected, Supplier<Spliterator.OfLong> s) {
description = joiner(description).toString();
data.add(new Object[]{description, expected, s});
}
void add(String description, Supplier<Spliterator.OfLong> s) {
add(description, exp, s);
}
StringBuilder joiner(String description) {
return new StringBuilder(description).
append(" {").
append("size=").append(exp.size()).
append("}");
}
}
static Object[][] spliteratorOfLongDataProvider;
@DataProvider(name = "Spliterator.OfLong")
public static Object[][] spliteratorOfLongDataProvider() {
if (spliteratorOfLongDataProvider != null) {
return spliteratorOfLongDataProvider;
}
List<Object[]> data = new ArrayList<>();
for (int size : SIZES) {
long exp[] = arrayLongRange(size);
SpliteratorOfLongDataBuilder db = new SpliteratorOfLongDataBuilder(data, listLongRange(size));
db.add("Spliterators.spliterator(long[], ...)",
() -> Spliterators.spliterator(exp, 0));
db.add("Arrays.spliterator(long[], ...)",
() -> Arrays.spliterator(exp));
db.add("Spliterators.spliterator(PrimitiveIterator.OfLong, ...)",
() -> Spliterators.spliterator(Spliterators.iterator(Arrays.spliterator(exp)), exp.length, 0));
db.add("Spliterators.spliteratorUnknownSize(PrimitiveIterator.OfLong, ...)",
() -> Spliterators.spliteratorUnknownSize(Spliterators.iterator(Arrays.spliterator(exp)), 0));
class LongSpliteratorFromArray extends Spliterators.AbstractLongSpliterator {
long[] a;
int index = 0;
LongSpliteratorFromArray(long[] a) {
super(a.length, Spliterator.SIZED);
this.a = a;
}
@Override
public boolean tryAdvance(LongConsumer action) {
if (action == null)
throw new NullPointerException();
if (index < a.length) {
action.accept(a[index++]);
return true;
}
else {
return false;
}
}
}
db.add("new Spliterators.AbstractLongAdvancingSpliterator()",
() -> new LongSpliteratorFromArray(exp));
}
return spliteratorOfLongDataProvider = data.toArray(new Object[0][]);
}
private static List<Long> listLongRange(int upTo) {
List<Long> exp = new ArrayList<>();
for (long i = 0; i < upTo; i++)
exp.add(i);
return Collections.unmodifiableList(exp);
}
private static long[] arrayLongRange(int upTo) {
long[] exp = new long[upTo];
for (int i = 0; i < upTo; i++)
exp[i] = i;
return exp;
}
@Test(dataProvider = "Spliterator.OfLong")
public void testLongNullPointerException(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining((LongConsumer) null));
executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance((LongConsumer) null));
}
@Test(dataProvider = "Spliterator.OfLong")
public void testLongForEach(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
testForEach(exp, s, longBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfLong")
public void testLongTryAdvance(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
testTryAdvance(exp, s, longBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfLong")
public void testLongMixedTryAdvanceForEach(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
testMixedTryAdvanceForEach(exp, s, longBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfLong")
public void testLongMixedTraverseAndSplit(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
testMixedTraverseAndSplit(exp, s, longBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfLong")
public void testLongSplitAfterFullTraversal(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
testSplitAfterFullTraversal(s, longBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfLong")
public void testLongSplitOnce(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
testSplitOnce(exp, s, longBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfLong")
public void testLongSplitSixDeep(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
testSplitSixDeep(exp, s, longBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfLong")
public void testLongSplitUntilNull(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
testSplitUntilNull(exp, s, longBoxingConsumer());
}
//
private static class SpliteratorOfDoubleDataBuilder {
List<Object[]> data;
List<Double> exp;
SpliteratorOfDoubleDataBuilder(List<Object[]> data, List<Double> exp) {
this.data = data;
this.exp = exp;
}
void add(String description, List<Double> expected, Supplier<Spliterator.OfDouble> s) {
description = joiner(description).toString();
data.add(new Object[]{description, expected, s});
}
void add(String description, Supplier<Spliterator.OfDouble> s) {
add(description, exp, s);
}
StringBuilder joiner(String description) {
return new StringBuilder(description).
append(" {").
append("size=").append(exp.size()).
append("}");
}
}
static Object[][] spliteratorOfDoubleDataProvider;
@DataProvider(name = "Spliterator.OfDouble")
public static Object[][] spliteratorOfDoubleDataProvider() {
if (spliteratorOfDoubleDataProvider != null) {
return spliteratorOfDoubleDataProvider;
}
List<Object[]> data = new ArrayList<>();
for (int size : SIZES) {
double exp[] = arrayDoubleRange(size);
SpliteratorOfDoubleDataBuilder db = new SpliteratorOfDoubleDataBuilder(data, listDoubleRange(size));
db.add("Spliterators.spliterator(double[], ...)",
() -> Spliterators.spliterator(exp, 0));
db.add("Arrays.spliterator(double[], ...)",
() -> Arrays.spliterator(exp));
db.add("Spliterators.spliterator(PrimitiveIterator.OfDouble, ...)",
() -> Spliterators.spliterator(Spliterators.iterator(Arrays.spliterator(exp)), exp.length, 0));
db.add("Spliterators.spliteratorUnknownSize(PrimitiveIterator.OfDouble, ...)",
() -> Spliterators.spliteratorUnknownSize(Spliterators.iterator(Arrays.spliterator(exp)), 0));
class DoubleSpliteratorFromArray extends Spliterators.AbstractDoubleSpliterator {
double[] a;
int index = 0;
DoubleSpliteratorFromArray(double[] a) {
super(a.length, Spliterator.SIZED);
this.a = a;
}
@Override
public boolean tryAdvance(DoubleConsumer action) {
if (action == null)
throw new NullPointerException();
if (index < a.length) {
action.accept(a[index++]);
return true;
}
else {
return false;
}
}
}
db.add("new Spliterators.AbstractDoubleAdvancingSpliterator()",
() -> new DoubleSpliteratorFromArray(exp));
}
return spliteratorOfDoubleDataProvider = data.toArray(new Object[0][]);
}
private static List<Double> listDoubleRange(int upTo) {
List<Double> exp = new ArrayList<>();
for (double i = 0; i < upTo; i++)
exp.add(i);
return Collections.unmodifiableList(exp);
}
private static double[] arrayDoubleRange(int upTo) {
double[] exp = new double[upTo];
for (int i = 0; i < upTo; i++)
exp[i] = i;
return exp;
}
@Test(dataProvider = "Spliterator.OfDouble")
public void testDoubleNullPointerException(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining((DoubleConsumer) null));
executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance((DoubleConsumer) null));
}
@Test(dataProvider = "Spliterator.OfDouble")
public void testDoubleForEach(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
testForEach(exp, s, doubleBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfDouble")
public void testDoubleTryAdvance(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
testTryAdvance(exp, s, doubleBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfDouble")
public void testDoubleMixedTryAdvanceForEach(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
testMixedTryAdvanceForEach(exp, s, doubleBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfDouble")
public void testDoubleMixedTraverseAndSplit(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
testMixedTraverseAndSplit(exp, s, doubleBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfDouble")
public void testDoubleSplitAfterFullTraversal(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
testSplitAfterFullTraversal(s, doubleBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfDouble")
public void testDoubleSplitOnce(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
testSplitOnce(exp, s, doubleBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfDouble")
public void testDoubleSplitSixDeep(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
testSplitSixDeep(exp, s, doubleBoxingConsumer());
}
@Test(dataProvider = "Spliterator.OfDouble")
public void testDoubleSplitUntilNull(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
testSplitUntilNull(exp, s, doubleBoxingConsumer());
}
}<|fim▁end|>
|
@Test(dataProvider = "Spliterator<Integer>")
public void testTryAdvance(String description, Collection<Integer> exp, Supplier<Spliterator<Integer>> s) {
testTryAdvance(exp, s, UnaryOperator.identity());
}
|
<|file_name|>PrimaryModelFactoryTest.java<|end_file_name|><|fim▁begin|>package com.carbon108.tilde;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @author K Moroz
* @version 1.0
*/
public class PrimaryModelFactoryTest {
private PrimaryModelFactory factory;
@Before
public void setUp() {
factory = new PrimaryModelFactory();
}
@Rule
public final ExpectedException exception = ExpectedException.none();
@Test<|fim▁hole|> Collection<String> factoryIDs = factory.getIDs();
assertEquals(2, factoryIDs.size());
assertEquals(true, factoryIDs.contains(ModelID.LINEAR));
assertEquals(true, factoryIDs.contains(ModelID.CONSTANT));
// check immutability
exception.expect(UnsupportedOperationException.class);
factoryIDs.add("someString");
}
@Test
public void makeValidModelIDGetsModel() {
TildeModel model1 = factory.make(ModelID.LINEAR);
TildeModel model2 = factory.make(ModelID.CONSTANT);
assertEquals(ModelID.LINEAR, model1.getID());
assertEquals(ModelID.CONSTANT, model2.getID());
}
@Test
public void makeInvalidIDGetsNullModel() {
TildeModel m1null = factory.make(null);
TildeModel m1blank = factory.make("");
TildeModel m2invalid = factory.make("invalidModelID");
assertTrue(m1null.isNullModel());
assertTrue(m1blank.isNullModel());
assertTrue(m2invalid.isNullModel());
}
@Test
public void makeAll() {
Collection<TildeModel> models = factory.makeAll();
assertEquals(2, models.size());
assertEquals(true, models.contains(new LinearModel()));
assertEquals(true, models.contains(new ConstantModel()));
}
}<|fim▁end|>
|
public void getIDsGetsAllValidModelIDs() {
|
<|file_name|>test_config.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""Test configuration of toolbox."""
<|fim▁hole|>import importlib
import os
import pytest
from snntoolbox.bin.utils import update_setup
from snntoolbox.utils.utils import import_configparser
with open(os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', 'requirements.txt'))) as f:
requirements = []
for s in f.readlines():
requirements.append(s.rstrip('\n').split('==')[0])
@pytest.mark.parametrize('required_module', requirements)
def test_imports_from_requirements(required_module):
assert importlib.import_module(required_module)
# Todo: Add configuration that is expected to pass.
_in_and_out = [
({}, False),
({'paths': {'path_wd': os.path.dirname(__file__),
'dataset_path': os.path.dirname(__file__),
'filename_ann': '98.96'}}, False)
]
@pytest.mark.parametrize('params, expect_pass', _in_and_out)
def test_updating_settings(params, expect_pass, _path_wd):
configparser = import_configparser()
config = configparser.ConfigParser()
config.read_dict(params)
configpath = os.path.join(str(_path_wd), 'config')
with open(configpath, 'w') as file:
config.write(file)
if expect_pass:
assert update_setup(configpath)
else:
pytest.raises(AssertionError, update_setup, configpath)<|fim▁end|>
| |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md><|fim▁hole|><|fim▁end|>
|
fn main() {
println!("cargo:rustc-flags=-l wow32");
}
|
<|file_name|>test_astype.py<|end_file_name|><|fim▁begin|>from datetime import timedelta
import numpy as np
import pytest
import pandas as pd
from pandas import (
Float64Index, Index, Int64Index, NaT, Timedelta, TimedeltaIndex,
timedelta_range)
import pandas.util.testing as tm
class TestTimedeltaIndex(object):
def test_astype_object(self):
idx = timedelta_range(start='1 days', periods=4, freq='D', name='idx')
expected_list = [Timedelta('1 days'), Timedelta('2 days'),
Timedelta('3 days'), Timedelta('4 days')]
result = idx.astype(object)
expected = Index(expected_list, dtype=object, name='idx')
tm.assert_index_equal(result, expected)
assert idx.tolist() == expected_list
def test_astype_object_with_nat(self):
idx = TimedeltaIndex([timedelta(days=1), timedelta(days=2), NaT,
timedelta(days=4)], name='idx')
expected_list = [Timedelta('1 days'), Timedelta('2 days'), NaT,
Timedelta('4 days')]
result = idx.astype(object)
expected = Index(expected_list, dtype=object, name='idx')
tm.assert_index_equal(result, expected)
assert idx.tolist() == expected_list
def test_astype(self):
# GH 13149, GH 13209
idx = TimedeltaIndex([1e14, 'NaT', NaT, np.NaN])
result = idx.astype(object)
expected = Index([Timedelta('1 days 03:46:40')] + [NaT] * 3,
dtype=object)
tm.assert_index_equal(result, expected)
result = idx.astype(int)
expected = Int64Index([100000000000000] + [-9223372036854775808] * 3,
dtype=np.int64)
tm.assert_index_equal(result, expected)
result = idx.astype(str)
expected = Index(str(x) for x in idx)
tm.assert_index_equal(result, expected)
rng = timedelta_range('1 days', periods=10)
result = rng.astype('i8')
tm.assert_index_equal(result, Index(rng.asi8))
tm.assert_numpy_array_equal(rng.asi8, result.values)
def test_astype_uint(self):
arr = timedelta_range('1H', periods=2)
expected = pd.UInt64Index(
np.array([3600000000000, 90000000000000], dtype="uint64")
)
tm.assert_index_equal(arr.astype("uint64"), expected)
tm.assert_index_equal(arr.astype("uint32"), expected)
def test_astype_timedelta64(self):
# GH 13149, GH 13209
idx = TimedeltaIndex([1e14, 'NaT', NaT, np.NaN])
result = idx.astype('timedelta64')
expected = Float64Index([1e+14] + [np.NaN] * 3, dtype='float64')
tm.assert_index_equal(result, expected)
result = idx.astype('timedelta64[ns]')
tm.assert_index_equal(result, idx)
assert result is not idx
result = idx.astype('timedelta64[ns]', copy=False)
tm.assert_index_equal(result, idx)
assert result is idx
@pytest.mark.parametrize('dtype', [
float, 'datetime64', 'datetime64[ns]'])
def test_astype_raises(self, dtype):
# GH 13149, GH 13209
idx = TimedeltaIndex([1e14, 'NaT', NaT, np.NaN])
msg = 'Cannot cast TimedeltaArray to dtype'
with pytest.raises(TypeError, match=msg):
idx.astype(dtype)
def test_astype_category(self):
obj = pd.timedelta_range("1H", periods=2, freq='H')
result = obj.astype('category')
expected = pd.CategoricalIndex([pd.Timedelta('1H'),
pd.Timedelta('2H')])
tm.assert_index_equal(result, expected)
result = obj._data.astype('category')
expected = expected.values
tm.assert_categorical_equal(result, expected)
def test_astype_array_fallback(self):<|fim▁hole|>
result = obj._data.astype(bool)
expected = np.array([True, True])
tm.assert_numpy_array_equal(result, expected)<|fim▁end|>
|
obj = pd.timedelta_range("1H", periods=2)
result = obj.astype(bool)
expected = pd.Index(np.array([True, True]))
tm.assert_index_equal(result, expected)
|
<|file_name|>fracture_configurator_factory.cpp<|end_file_name|><|fim▁begin|>#include "fracture_configurator_factory.h"
#include "regional_uniform_fracture_configurator.h"
#include "settings.h"
#include "uniform_fracture_configurator.h"
#include <TensorVariable.h>
#include <string>
using namespace std;
namespace csmp {
namespace tperm {
FractureConfiguratorFactory::FractureConfiguratorFactory()
: ConfiguratorFactory() {}
FractureConfiguratorFactory::~FractureConfiguratorFactory() {}
/**
Returns nullptr if settings incorrect.
*/
std::unique_ptr<Configurator>
FractureConfiguratorFactory::configurator(const Settings &s) const {
std::unique_ptr<Configurator> pConf(nullptr);
const string c = s.json["configuration"].get<string>();
if (c == string("uniform")) {
const double am = s.json["mechanical aperture"].get<double>();
if (s.json["hydraulic aperture"].size() == 9) // tensor
pConf.reset(new UniformFractureConfigurator(
tensor("hydraulic aperture", s), tensor("permeability", s),
tensor("conductivity", s), am));
else // scalar
pConf.reset(new UniformFractureConfigurator(
s.json["hydraulic aperture"].get<double>(), am));
} else if (c == string("regional uniform")) {
const auto ams = s.json["mechanical aperture"].get<vector<double>>();
const auto frnames = s.json["fracture regions"].get<vector<string>>();
if (s.json.count("permeability")) { // tensor
vector<string> props = {"hydraulic aperture", "permeability",
"conductivity"};
vector<vector<TensorVariable<3>>> vals(
props.size(),
vector<TensorVariable<3>>(ams.size(), TensorVariable<3>()));
for (size_t i(0); i < props.size(); ++i) {
auto jvals = s.json[props[i].c_str()].get<vector<vector<double>>>();
for (size_t j(0); j < ams.size(); ++j)
vals[i][j] = tensor(jvals.at(j));
}
pConf.reset(new RegionalUniformFractureConfigurator(
vals.at(0), vals.at(1), vals.at(2), ams, frnames));
} else { // scalar
const auto ahs = s.json["hydraulic aperture"].get<vector<double>>();
pConf.reset(new RegionalUniformFractureConfigurator(ahs, ams, frnames));<|fim▁hole|> return pConf;
}
} // !tperm
} // ! csmp<|fim▁end|>
|
}
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import string
from future_builtins import map
from calibre.utils.config import JSONConfig
from calibre.spell.dictionary import Dictionaries, parse_lang_code
tprefs = JSONConfig('tweak_book_gui')
d = tprefs.defaults
d['editor_theme'] = None
d['editor_font_family'] = None
d['editor_font_size'] = 12
d['editor_line_wrap'] = True
d['editor_tab_stop_width'] = 2
d['editor_show_char_under_cursor'] = True
d['replace_entities_as_typed'] = True
d['preview_refresh_time'] = 2
d['choose_tweak_fmt'] = True
d['tweak_fmt_order'] = ['EPUB', 'AZW3']
d['update_metadata_from_calibre'] = True
d['nestable_dock_widgets'] = False
d['dock_top_left'] = 'horizontal'
d['dock_top_right'] = 'horizontal'
d['dock_bottom_left'] = 'horizontal'
d['dock_bottom_right'] = 'horizontal'
d['preview_serif_family'] = 'Liberation Serif'
d['preview_sans_family'] = 'Liberation Sans'
d['preview_mono_family'] = 'Liberation Mono'
d['preview_standard_font_family'] = 'serif'
d['preview_base_font_size'] = 18
d['preview_mono_font_size'] = 14
d['preview_minimum_font_size'] = 8
d['remove_existing_links_when_linking_sheets'] = True
d['charmap_favorites'] = list(map(ord, '\xa0\u2002\u2003\u2009\xad' '‘’“”‹›«»‚„' '—–§¶†‡©®™' '→⇒•·°±−×÷¼½½¾' '…µ¢£€¿¡¨´¸ˆ˜' 'ÀÁÂÃÄÅÆÇÈÉÊË' 'ÌÍÎÏÐÑÒÓÔÕÖØ' 'ŒŠÙÚÛÜÝŸÞßàá' 'âãäåæçèéêëìí' 'îïðñòóôõöøœš' 'ùúûüýÿþªºαΩ∞')) # noqa
d['folders_for_types'] = {'style':'styles', 'image':'images', 'font':'fonts', 'audio':'audio', 'video':'video'}
d['pretty_print_on_open'] = False
d['disable_completion_popup_for_search'] = False
d['saved_searches'] = []
d['insert_tag_mru'] = ['p', 'div', 'li', 'h1', 'h2', 'h3', 'h4', 'em', 'strong', 'td', 'tr']
d['spell_check_case_sensitive_sort'] = False
d['inline_spell_check'] = True
d['custom_themes'] = {}
d['remove_unused_classes'] = False
d['global_book_toolbar'] = [
'new-file', 'open-book', 'save-book', None, 'global-undo', 'global-redo', 'create-checkpoint', None, 'donate', 'user-manual']
d['global_tools_toolbar'] = ['check-book', 'spell-check-book', 'edit-toc', 'insert-character', 'manage-fonts', 'smarten-punctuation', 'remove-unused-css']
d['editor_css_toolbar'] = ['pretty-current', 'insert-image']
d['editor_xml_toolbar'] = ['pretty-current', 'insert-tag']
d['editor_html_toolbar'] = ['fix-html-current', 'pretty-current', 'insert-image', 'insert-hyperlink', 'insert-tag', 'change-paragraph']
d['editor_format_toolbar'] = [('format-text-' + x) if x else x for x in (
'bold', 'italic', 'underline', 'strikethrough', 'subscript', 'superscript',
None, 'color', 'background-color', None, 'justify-left', 'justify-center',
'justify-right', 'justify-fill')]
d['spell_check_case_sensitive_search'] = False
d['add_cover_preserve_aspect_ratio'] = False
del d
ucase_map = {l:string.ascii_uppercase[i] for i, l in enumerate(string.ascii_lowercase)}
def capitalize(x):
return ucase_map[x[0]] + x[1:]
_current_container = None
def current_container():
return _current_container
def set_current_container(container):
global _current_container
_current_container = container
class NonReplaceDict(dict):
def __setitem__(self, k, v):
if k in self:
raise ValueError('The key %s is already present' % k)
dict.__setitem__(self, k, v)
actions = NonReplaceDict()
editors = NonReplaceDict()
toolbar_actions = NonReplaceDict()
editor_toolbar_actions = {
'format':NonReplaceDict(), 'html':NonReplaceDict(), 'xml':NonReplaceDict(), 'css':NonReplaceDict()}
TOP = object()
dictionaries = Dictionaries()
def editor_name(editor):
for n, ed in editors.iteritems():<|fim▁hole|> dictionaries.initialize()
try:
dictionaries.default_locale = parse_lang_code(lang)
if dictionaries.default_locale.langcode == 'und':
raise ValueError('')
except ValueError:
dictionaries.default_locale = dictionaries.ui_locale
from calibre.gui2.tweak_book.editor.syntax.html import refresh_spell_check_status
refresh_spell_check_status()
def verify_link(url, name=None):
if _current_container is None or name is None:
return None
target = _current_container.href_to_name(url, name)
if _current_container.has_name(target):
return True
if url.startswith('#'):
return True
if url.partition(':')[0] in {'http', 'https', 'mailto'}:
return True
return False<|fim▁end|>
|
if ed is editor:
return n
def set_book_locale(lang):
|
<|file_name|>ScrollPanesAPICssTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package test.css.controls.api;
import org.junit.Test;
import client.test.Keywords;
import client.test.Smoke;
import org.junit.BeforeClass;
import org.junit.Before;
import test.javaclient.shared.TestBase;
import static test.css.controls.ControlPage.ScrollPanes;
import test.javaclient.shared.screenshots.ScreenshotUtils;
/**
* Generated test
*/<|fim▁hole|> {
ScreenshotUtils.setComparatorDistance(0.003f);
}
@BeforeClass
public static void runUI() {
test.css.controls.api.APIStylesApp.main(null);
}
@Before
public void createPage () {
((test.css.controls.api.APIStylesApp)getApplication()).open(ScrollPanes);
}
/**
* test ScrollPane with css: -fx-border-color
*/
@Test
public void ScrollPanes_BORDER_COLOR() throws Exception {
testAdditionalAction(ScrollPanes.name(), "BORDER-COLOR", true);
}
/**
* test ScrollPane with css: -fx-border-width
*/
@Test
public void ScrollPanes_BORDER_WIDTH() throws Exception {
testAdditionalAction(ScrollPanes.name(), "BORDER-WIDTH", true);
}
/**
* test ScrollPane with css: -fx-border-width-dotted
*/
@Test
public void ScrollPanes_BORDER_WIDTH_dotted() throws Exception {
testAdditionalAction(ScrollPanes.name(), "BORDER-WIDTH-dotted", true);
}
/**
* test ScrollPane with css: -fx-border-width-dashed
*/
@Test
public void ScrollPanes_BORDER_WIDTH_dashed() throws Exception {
testAdditionalAction(ScrollPanes.name(), "BORDER-WIDTH-dashed", true);
}
/**
* test ScrollPane with css: -fx-border-inset
*/
@Test
public void ScrollPanes_BORDER_INSET() throws Exception {
testAdditionalAction(ScrollPanes.name(), "BORDER-INSET", true);
}
/**
* test ScrollPane with css: -fx-border-style-dashed
*/
@Test
public void ScrollPanes_BORDER_STYLE_DASHED() throws Exception {
testAdditionalAction(ScrollPanes.name(), "BORDER-STYLE-DASHED", true);
}
/**
* test ScrollPane with css: -fx-border-style-dotted
*/
@Test
public void ScrollPanes_BORDER_STYLE_DOTTED() throws Exception {
testAdditionalAction(ScrollPanes.name(), "BORDER-STYLE-DOTTED", true);
}
/**
* test ScrollPane with css: -fx-image-border
*/
@Test
public void ScrollPanes_IMAGE_BORDER() throws Exception {
testAdditionalAction(ScrollPanes.name(), "IMAGE-BORDER", true);
}
/**
* test ScrollPane with css: -fx-image-border-insets
*/
@Test
public void ScrollPanes_IMAGE_BORDER_INSETS() throws Exception {
testAdditionalAction(ScrollPanes.name(), "IMAGE-BORDER-INSETS", true);
}
/**
* test ScrollPane with css: -fx-image-border-no-repeat
*/
@Test
public void ScrollPanes_IMAGE_BORDER_NO_REPEAT() throws Exception {
testAdditionalAction(ScrollPanes.name(), "IMAGE-BORDER-NO-REPEAT", true);
}
/**
* test ScrollPane with css: -fx-image-border-repeat-x
*/
@Test
public void ScrollPanes_IMAGE_BORDER_REPEAT_X() throws Exception {
testAdditionalAction(ScrollPanes.name(), "IMAGE-BORDER-REPEAT-X", true);
}
/**
* test ScrollPane with css: -fx-image-border-repeat-y
*/
@Test
public void ScrollPanes_IMAGE_BORDER_REPEAT_Y() throws Exception {
testAdditionalAction(ScrollPanes.name(), "IMAGE-BORDER-REPEAT-Y", true);
}
/**
* test ScrollPane with css: -fx-image-border-round
*/
@Test
public void ScrollPanes_IMAGE_BORDER_ROUND() throws Exception {
testAdditionalAction(ScrollPanes.name(), "IMAGE-BORDER-ROUND", true);
}
/**
* test ScrollPane with css: -fx-image-border-space
*/
@Test
public void ScrollPanes_IMAGE_BORDER_SPACE() throws Exception {
testAdditionalAction(ScrollPanes.name(), "IMAGE-BORDER-SPACE", true);
}
public String getName() {
return "ControlCss";
}
}<|fim▁end|>
|
public class ScrollPanesAPICssTest extends TestBase {
|
<|file_name|>persona.go<|end_file_name|><|fim▁begin|>package persona
import (
"encoding/json"
"errors"
"io/ioutil"
"net/http"
"net/url"
)
type personaResponse struct {
Status string `json:"status"`
Email string `json:"email"`
}
func assert(audience, assertion string) (string, error) {
params := url.Values{}
params.Add("assertion", assertion)
params.Add("audience", audience)
resp, err := http.PostForm("https://verifier.login.persona.org/verify", params)
if err != nil {
return "", err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
var f personaResponse
err = json.Unmarshal(body, &f)
<|fim▁hole|> if err != nil {
return "", err
}
if f.Status != "okay" {
return "", errors.New("Status not okay")
}
return f.Email, nil
}<|fim▁end|>
| |
<|file_name|>match-nowrap.rs<|end_file_name|><|fim▁begin|>// rustfmt-wrap_match_arms: false
// Match expressions, no unwrapping of block arms or wrapping of multiline
// expressions.
fn foo() {
match x {<|fim▁hole|> a => foo(),
b => (
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
),
}
}<|fim▁end|>
| |
<|file_name|>tomap.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> * Transforms object or iterable to map. Iterable needs to be in the format acceptable by the `Map` constructor.
*
* map = toMap( { 'foo': 1, 'bar': 2 } );
* map = toMap( [ [ 'foo', 1 ], [ 'bar', 2 ] ] );
* map = toMap( anotherMap );
*
*/
export default function toMap<T>(data: Record<string, T> | Array<[string, T]> | Map<string, T>): Map<string, T>;<|fim▁end|>
|
/**
|
<|file_name|>backup.ts<|end_file_name|><|fim▁begin|>import $copy from './copy'<|fim▁hole|>import $source from './source'
import $wrapList from './wrapList'
// function
const main = async (
source: string | string[],
): Promise<void> => {
const msg = `backed up ${$wrapList(source)}`
await Promise.all((await $source(source)).map(sub))
$info('backup', msg)
}
const sub = async (
src: string,
): Promise<void> => {
const suffix = $getExtname(src)
const extname = '.bak'
await $info.whisper($copy(src, '', { extname, suffix }))
}
// export
export default main<|fim▁end|>
|
import $getExtname from './getExtname'
import $info from './info'
|
<|file_name|>fm_demod.py<|end_file_name|><|fim▁begin|>#
# Copyright 2006,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, optfir
from gnuradio.blks2impl.fm_emph import fm_deemph
from math import pi
class fm_demod_cf(gr.hier_block2):
"""
Generalized FM demodulation block with deemphasis and audio
filtering.<|fim▁hole|> produces an output float strem in the range of [-1.0, +1.0].
@param channel_rate: incoming sample rate of the FM baseband
@type sample_rate: integer
@param deviation: maximum FM deviation (default = 5000)
@type deviation: float
@param audio_decim: input to output decimation rate
@type audio_decim: integer
@param audio_pass: audio low pass filter passband frequency
@type audio_pass: float
@param audio_stop: audio low pass filter stop frequency
@type audio_stop: float
@param gain: gain applied to audio output (default = 1.0)
@type gain: float
@param tau: deemphasis time constant (default = 75e-6), specify 'None'
to prevent deemphasis
"""
def __init__(self, channel_rate, audio_decim, deviation,
audio_pass, audio_stop, gain=1.0, tau=75e-6):
gr.hier_block2.__init__(self, "fm_demod_cf",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(1, 1, gr.sizeof_float)) # Output signature
k = channel_rate/(2*pi*deviation)
QUAD = gr.quadrature_demod_cf(k)
audio_taps = optfir.low_pass(gain, # Filter gain
channel_rate, # Sample rate
audio_pass, # Audio passband
audio_stop, # Audio stopband
0.1, # Passband ripple
60) # Stopband attenuation
LPF = gr.fir_filter_fff(audio_decim, audio_taps)
if tau is not None:
DEEMPH = fm_deemph(channel_rate, tau)
self.connect(self, QUAD, DEEMPH, LPF, self)
else:
self.connect(self, QUAD, LPF, self)
class demod_20k0f3e_cf(fm_demod_cf):
"""
NBFM demodulation block, 20 KHz channels
This block demodulates a complex, downconverted, narrowband FM
channel conforming to 20K0F3E emission standards, outputting
floats in the range [-1.0, +1.0].
@param sample_rate: incoming sample rate of the FM baseband
@type sample_rate: integer
@param audio_decim: input to output decimation rate
@type audio_decim: integer
"""
def __init__(self, channel_rate, audio_decim):
fm_demod_cf.__init__(self, channel_rate, audio_decim,
5000, # Deviation
3000, # Audio passband frequency
4500) # Audio stopband frequency
class demod_200kf3e_cf(fm_demod_cf):
"""
WFM demodulation block, mono.
This block demodulates a complex, downconverted, wideband FM
channel conforming to 200KF3E emission standards, outputting
floats in the range [-1.0, +1.0].
@param sample_rate: incoming sample rate of the FM baseband
@type sample_rate: integer
@param audio_decim: input to output decimation rate
@type audio_decim: integer
"""
def __init__(self, channel_rate, audio_decim):
fm_demod_cf.__init__(self, channel_rate, audio_decim,
75000, # Deviation
15000, # Audio passband
16000, # Audio stopband
20.0) # Audio gain<|fim▁end|>
|
This block demodulates a band-limited, complex down-converted FM
channel into the the original baseband signal, optionally applying
deemphasis. Low pass filtering is done on the resultant signal. It
|
<|file_name|>GElektra.py<|end_file_name|><|fim▁begin|>from ..module import get_introspection_module
from ..overrides import override
import warnings
GElektra = get_introspection_module('GElektra')
def __func_alias(klass, old, new):
func = getattr(klass, old)
setattr(klass, new, func)
def __func_rename(klass, old, new):
__func_alias(klass, old, new)
delattr(klass, old)
__all__ = []
## make the enums global
for n in GElektra.KeySwitch.__dict__:
if n.isupper():
globals()['KEY_' + n] = getattr(GElektra.KeySwitch, n)
__all__.append('KEY_' + n)
for n in GElektra.KdbOptions.__dict__:
if n.isupper():
globals()['KDB_O_' + n] = getattr(GElektra.KdbOptions, n)
__all__.append('KDB_O_' + n)
KS_END = None
__all__.append('KS_END')
# exceptions
class Exception(Exception):
def __init__(self, args = "Exception thrown by Elektra"):
super().__init__(args)
class KeyException(Exception):
def __init__(self, args = "Exception thrown by a Key, typically "
"because you called a method on a null key. "
"Make sure to check this with !key first"):
super().__init__(args)
class KeyInvalidName(KeyException):
def __init__(self, args = "Invalid Keyname: keyname needs to start "
"with user/ or system/"):
super().__init__(args)
__all__.extend([ 'Exception', 'KeyException', 'KeyInvalidName' ])
## Key
# rename gi-specific functions
__func_rename(GElektra.Key, 'gi_init', '_init')
__func_rename(GElektra.Key, 'gi_make', '_make')
__func_rename(GElektra.Key, 'gi_getstring', '_getstring')
__func_rename(GElektra.Key, 'gi_getbinary', '_getbinary')
# Python API convenience
__func_rename(GElektra.Key, 'cmp', '__cmp__')
__func_rename(GElektra.Key, 'setname', '_setname')
__func_rename(GElektra.Key, 'setbasename', '_setbasename')
__func_rename(GElektra.Key, 'getnamesize', '_getnamesize')
__func_rename(GElektra.Key, 'getbasenamesize', '_getbasenamesize')
__func_rename(GElektra.Key, 'getfullnamesize', '_getfullnamesize')
__func_rename(GElektra.Key, 'setstring', '_setstring')
__func_rename(GElektra.Key, 'setbinary', '_setbinary')
__func_rename(GElektra.Key, 'getvaluesize', '_getvaluesize')
__func_rename(GElektra.Key, 'rewindmeta', '_rewindmeta')
__func_rename(GElektra.Key, 'nextmeta', '_nextmeta')
__func_rename(GElektra.Key, 'currentmeta', '_currentmeta')
class Key(GElektra.Key):
def __new__(cls, *args):
# copy constructor
if len(args) == 1 and isinstance(args[0], cls):
return super()._make(args[0])
return super().__new__(cls, args)
def __init__(self, *args):
super().__init__()
if len(args) == 0:
return
arg0, *args = args
# copy constructor has been used, no init needed
if isinstance(arg0, self.__class__):
return
flags = 0
value = None
meta = {}
args = iter(args)
for arg in args:
if arg == KEY_END:
break
elif arg == KEY_SIZE:
# ignore value
next(args)
elif arg == KEY_VALUE:
value = next(args)
elif arg == KEY_FUNC:
raise TypeError("Unsupported meta type")
elif arg == KEY_FLAGS:
flags = next(args)
elif arg == KEY_META:
k = next(args)
meta[k] = next(args)
elif isinstance(arg, GElektra.KeySwitch):
warnings.warn("Deprecated option in keyNew: {0}".format(arg),
DeprecationWarning)
flags |= arg
else:
warnings.warn("Unknown option in keyNew: {0}".format(arg),
RuntimeWarning)
# _init clears our key
if isinstance(value, bytes):
super()._init(arg0, flags | KEY_BINARY, None, value)
else:
super()._init(arg0, flags & ~KEY_BINARY, value, None)
for k in meta:
self.setmeta(k, meta[k])
def _setname(self, name):
ret = super()._setname(name)
if ret < 0:
raise KeyInvalidName()
return ret
def _setbasename(self, name):
ret = super()._setbasename(name)
if ret < 0:
raise KeyInvalidName()
return ret
def addbasename(self, name):
ret = super().addbasename(name)
if ret < 0:
raise KeyInvalidName()
return ret
def get(self):
"""returns the keys value"""
if self.isbinary():
return self._getbinary()
return self._getstring()
def set(self, value):
"""set the keys value. Can be either string or binary"""
if isinstance(value, bytes):
return self._setbinary(value)
return self._setstring(str(value))
def getmeta(self, name = None):
"""returns a metakey given by name. Name can be either string or Key.
If no metakey is found None is returned.
If name is omitted an iterator object is returned.
"""
if name is not None:
meta = super().getmeta(name)
return meta if meta else None
return self.__metaIter()
def setmeta(self, name, value):
"""set a new metakey consisting of name and value"""
if isinstance(value, str):
return super().setmeta(name, value)
raise TypeError("Unsupported value type")
def __metaIter(self):
self._rewindmeta()
meta = self._nextmeta()
while meta:
yield meta
meta = self._nextmeta()
def __str__(self):
return self.name
def __bool__(self):
return not self.isnull();
def __eq__(self, o):
return self.__cmp__(o) == 0
def __ne__(self, o):
return self.__cmp__(o) != 0
def __gt__(self, o):
return self.__cmp__(o) > 0
def __ge__(self, o):
return self.__cmp__(o) >= 0
def __lt__(self, o):
return self.__cmp__(o) < 0
def __le__(self, o):
return self.__cmp__(o) <= 0
name = property(lambda self: self.get_property('name'), _setname)
value = property(get, set, None, "Key value")
basename = property(lambda self: self.get_property('basename'), _setbasename)
fullname = property(lambda self: self.get_property('fullname'))
Key = override(Key)
__all__.append('Key')
## KeySet
# rename gi-specific functions
__func_rename(GElektra.KeySet, 'gi_append', 'append')
__func_rename(GElektra.KeySet, 'gi_append_keyset', '_append_keyset')
# Python API convenience
__func_rename(GElektra.KeySet, 'len', '__len__')
__func_rename(GElektra.KeySet, 'lookup_byname', '_lookup_byname')
__func_rename(GElektra.KeySet, 'rewind', '_rewind')
__func_rename(GElektra.KeySet, 'next', '_next')
__func_rename(GElektra.KeySet, 'current', '_current')
__func_rename(GElektra.KeySet, 'atcursor', '_atcursor')
class KeySet(GElektra.KeySet):
def __new__(cls, *args):
if len(args) == 1 and isinstance(args[0], __class__):
return super().dup(args[0])
return super().__new__(cls, args)
def __init__(self, *args):
super().__init__()
if len(args) == 0:
return
arg0, *args = args
if isinstance(arg0, __class__):
return
self.resize(arg0)
for arg in args:
if arg is KS_END:
break
self.append(arg)
def lookup(self, name):
"""Lookup a key by name. Name can be either string, Key or indexes.
If index is negative, search starts at the end.
Returns None if no key is found.
"""
if isinstance(name, Key):
key = super().lookup(name, KDB_O_NONE)
elif isinstance(name, str):
key = self._lookup_byname(name, KDB_O_NONE)
elif isinstance(name, int):
key = self._atcursor(name)
else:
raise TypeError("Unsupported type")
return key if key else None
def append(self, data):
if isinstance(data, __class__):
return self._append_keyset(data)
return super().append(data)
def __getitem__(self, key):
"""See lookup(...) for details.
Slices and negative indexes are supported as well.
"""
if isinstance(key, slice):
return [ self[k] for k in range(*key.indices(len(self))) ]
elif isinstance(key, ( int )):
item = self.lookup(key)
if item is None:
raise IndexError("index out of range")
return item
elif isinstance(key, ( str, Key )):
item = self.lookup(key)
if item is None:
raise KeyError(str(key))
return item
raise TypeError("Invalid argument type")
def __contains__(self, item):
"""See lookup(...) for details"""<|fim▁hole|> raise TypeError("Invalid argument type")
def __iter__(self):
i = 0
key = self.lookup(i)
while key:
yield key
i = i + 1
key = self.lookup(i)
KeySet = override(KeySet)
__all__.append('KeySet')
## Kdb
# rename gi-specific functions
__func_rename(GElektra.Kdb, 'gi_open', 'open')
class Kdb(GElektra.Kdb):
def __init__(self, *args):
super().__init__()
self.open(args[0] if len(args) else Key())
def get(self, ks, parent):
if isinstance(parent, str):
parent = Key(parent)
return super().get(ks, parent)
def set(self, ks, parent):
if isinstance(parent, str):
parent = Key(parent)
super().set(ks, parent)
def __enter__(self):
"""Internal method for usage with context managers"""
return self
def __exit__(self, type, value, tb):
"""Internal method for usage with context managers.
Closes the database.
"""
try:
self.close(Key())
except:
pass
Kdb = override(Kdb)
KDB = Kdb
__all__.extend([ 'Kdb', 'KDB' ])<|fim▁end|>
|
if isinstance(item, ( str, Key )):
key = self.lookup(item)
return True if key else False
|
<|file_name|>stream_blob00.rs<|end_file_name|><|fim▁begin|>#![cfg(all(test, feature = "test_e2e"))]
use azure_sdk_core::prelude::*;
use azure_sdk_core::{range::Range, DeleteSnapshotsMethod};
use azure_sdk_storage_blob::prelude::*;
use azure_sdk_storage_core::prelude::*;
use futures::stream::StreamExt;
#[tokio::test]
async fn create_blob_and_stream_back() {
code().await.unwrap();
}<|fim▁hole|>
// First we retrieve the account name and master key from environment variables.
let account =
std::env::var("STORAGE_ACCOUNT").expect("Set env variable STORAGE_ACCOUNT first!");
let master_key =
std::env::var("STORAGE_MASTER_KEY").expect("Set env variable STORAGE_MASTER_KEY first!");
let client = client::with_access_key(&account, &master_key);
if client
.list_containers()
.finalize()
.await?
.incomplete_vector
.iter()
.find(|x| x.name == container_name)
.is_none()
{
client
.create_container()
.with_container_name(container_name)
.with_public_access(PublicAccess::Blob)
.finalize()
.await?;
}
let string = "0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF";
client
.put_block_blob()
.with_container_name(&container_name)
.with_blob_name(file_name)
.with_content_type("text/plain")
.with_body(string.as_ref())
.finalize()
.await?;
println!("{}/{} blob created!", container_name, file_name);
for dropped_suffix_len in &[3usize, 2, 1, 0] {
// this is how you stream data from azure blob. Notice that you have
// to specify the range requested. Also make sure to specify how big
// a chunk is going to be. Bigger chunks are of course more efficient as the
// http overhead will be less but it also means you will have to wait for more
// time before receiving anything. In this example we use an awkward value
// just to make the test worthwile.
let slice_range = 0..(string.len() - dropped_suffix_len);
let expected_string = &string[slice_range.clone()];
let range: Range = slice_range.into();
let chunk_size: usize = 4;
let mut stream = Box::pin(
client
.stream_blob()
.with_container_name(&container_name)
.with_blob_name(file_name)
.with_range(&range)
.with_chunk_size(chunk_size as u64)
.finalize(),
);
let result = std::rc::Rc::new(std::cell::RefCell::new(Vec::new()));
{
let mut res_closure = result.borrow_mut();
while let Some(value) = stream.next().await {
let mut value = value?;
assert!(value.len() <= chunk_size);
println!("received {:?} bytes", value.len());
res_closure.append(&mut value);
}
}
let returned_string = {
let rlock = result.borrow();
String::from_utf8(rlock.to_vec())?
};
println!(
"dropped_suffix_len == {} returned_string == {}",
dropped_suffix_len, returned_string
);
assert_eq!(expected_string, returned_string);
}
client
.delete_blob()
.with_container_name(&container_name)
.with_blob_name(file_name)
.with_delete_snapshots_method(DeleteSnapshotsMethod::Include)
.finalize()
.await?;
println!("{}/{} blob deleted!", container_name, file_name);
Ok(())
}<|fim▁end|>
|
async fn code() -> Result<(), Box<dyn std::error::Error>> {
let container_name = "azuresdkforrust";
let file_name = "azure_sdk_for_rust_stream_test.txt";
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.