prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Gfx-rs Developers. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![deny(missing_docs)] //! An efficient, low-level, bindless graphics API for Rust. See [the //! blog](http://gfx-rs.github.io/) for explanations and annotated examples.<|fim▁hole|>extern crate draw_state; extern crate gfx_core; /// public re-exported traits pub mod traits { pub use gfx_core::{Device, Factory, DeviceFence}; pub use factory::FactoryExt; } // draw state re-exports pub use draw_state::{preset, state}; pub use draw_state::target::*; // public re-exports pub use gfx_core as core; pub use gfx_core::{Device, Resources, Primitive}; pub use gfx_core::{VertexCount, InstanceCount}; pub use gfx_core::{ShaderSet, VertexShader, HullShader, DomainShader, GeometryShader, PixelShader}; pub use gfx_core::{format, handle, tex}; pub use gfx_core::factory::{Factory, Usage, Bind, MapAccess, ResourceViewError, TargetViewError, BufferRole, BufferInfo, BufferError, BufferUpdateError, CombinedError, RENDER_TARGET, DEPTH_STENCIL, SHADER_RESOURCE, UNORDERED_ACCESS, cast_slice}; pub use gfx_core::draw::{CommandBuffer, InstanceOption}; pub use gfx_core::shade::{ProgramInfo, UniformValue}; pub use encoder::{Encoder, UpdateError}; pub use factory::PipelineStateError; pub use mesh::{Slice, ToIndexSlice, SliceKind}; pub use pso::{PipelineState}; pub use pso::buffer::{VertexBuffer, InstanceBuffer, RawVertexBuffer, ConstantBuffer, Global}; pub use pso::resource::{ShaderResource, RawShaderResource, UnorderedAccess, Sampler, TextureSampler}; pub use pso::target::{DepthStencilTarget, DepthTarget, StencilTarget, RenderTarget, RawRenderTarget, BlendTarget, BlendRef, Scissor}; /// Render commands encoder mod encoder; /// Factory extensions mod factory; /// Meshes mod mesh; /// Pipeline states pub mod pso; /// Shaders pub mod shade; /// Convenience macros pub mod macros;<|fim▁end|>
#[macro_use] extern crate log;
<|file_name|>ResultAssociator.java<|end_file_name|><|fim▁begin|>package com.exabilan.interfaces; import java.util.List; import com.exabilan.types.exalang.Answer; import com.exabilan.types.exalang.ExaLang; import com.exabilan.types.exalang.Question; public interface ResultAssociator { /** * Finds the question corresponding to a question number for a given version of Exalang */ Question getQuestion(ExaLang exalang, int questionNumber); <|fim▁hole|> /** * Parses the answers written in Exalang specific storage files */ List<Answer> parseAnswer(String result); }<|fim▁end|>
<|file_name|>arrayShift.js<|end_file_name|><|fim▁begin|>/** * Created by zad on 17/4/20. */ /** to left shift an Array * @param {Array} arr * @param {Number} num * @return {Array} */ function leftShift(arr, num) { const result = arr.concat();<|fim▁hole|> return rightShift(arr, -num); } while (num > 0) { result.push(result.shift()); num--; } return result; } /** to right shift an Array * @param {Array} arr * @param {Number} num * @return {Array} */ function rightShift(arr, num) { return leftShift(arr, arr.length - num); } export {leftShift, rightShift};<|fim▁end|>
if (num < 0) {
<|file_name|>modules_handler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """A handler that displays servers and their instances.""" from google.appengine.tools.devappserver2.admin import admin_request_handler class ModulesHandler(admin_request_handler.AdminRequestHandler):<|fim▁hole|><|fim▁end|>
def get(self): values = {'modules': self.dispatcher.modules} self.response.write(self.render('modules.html', values))
<|file_name|>cache.py<|end_file_name|><|fim▁begin|>from __future__ import annotations import abc import shutil import functools from pathlib import Path import urllib.parse<|fim▁hole|>import logging from edgar_code.types import PathLike, Serializer, UserDict from edgar_code.util.picklable_threading import RLock logger = logging.getLogger(__name__) CacheKey = TypeVar('CacheKey') CacheReturn = TypeVar('CacheReturn') CacheFunc = TypeVar('CacheFunc', bound=Callable[..., Any]) class Cache: @classmethod def decor( cls, obj_store: Callable[[str], ObjectStore[CacheKey, CacheReturn]], hit_msg: bool = False, miss_msg: bool = False, suffix: str = '', ) -> Callable[[CacheFunc], CacheFunc]: '''Decorator that creates a cached function >>> @Cache.decor(ObjectStore()) >>> def foo(): ... pass ''' def decor_(function: CacheFunc) -> CacheFunc: return cast( CacheFunc, functools.wraps(function)( cls(obj_store, function, hit_msg, miss_msg, suffix) ) ) return decor_ disabled: bool #pylint: disable=too-many-arguments def __init__( self, obj_store: Callable[[str], ObjectStore[CacheKey, CacheReturn]], function: CacheFunc, hit_msg: bool = False, miss_msg: bool = False, suffix: str = '' ) -> None: '''Cache a function. Note this uses `function.__qualname__` to determine the file name. If this is not unique within your program, define suffix. Note this uses `function.version` when defined, so objects of the same functions of different versions will not collide. ''' self.function = function self.name = '-'.join(filter(bool, [ self.function.__qualname__, suffix, getattr(self.function, 'version', ''), ])) self.obj_store = obj_store(self.name) self.hit_msg = hit_msg self.miss_msg = miss_msg self.sem = RLock() self.__qualname__ = f'Cache({self.name})' self.disabled = False def __call__(self, *pos_args: Any, **kwargs: Any) -> Any: if self.disabled: return self.function(*pos_args, **kwargs) else: with self.sem: args_key = self.obj_store.args2key(pos_args, kwargs) if args_key in self.obj_store: if self.hit_msg: logger.info('hit %s with %s, %s', self.name, pos_args, kwargs) res = self.obj_store[args_key] else: if self.miss_msg: logger.info('miss %s with %s, %s', self.name, pos_args, kwargs) res = self.function(*pos_args, **kwargs) self.obj_store[args_key] = res return res def clear(self) -> None: '''Removes all cached items''' self.obj_store.clear() def __str__(self) -> str: store_type = type(self.obj_store).__name__ return f'Cache of {self.name} with {store_type}' ObjectStoreKey = TypeVar('ObjectStoreKey') ObjectStoreValue = TypeVar('ObjectStoreValue') class ObjectStore(UserDict[ObjectStoreKey, ObjectStoreValue], abc.ABC): @classmethod def create( cls, *args: Any, **kwargs: Any ) -> Callable[[str], ObjectStore[ObjectStoreKey, ObjectStoreValue]]: '''Curried init. Name will be applied later.''' @functools.wraps(cls) def create_(name: str) -> ObjectStore[ObjectStoreKey, ObjectStoreValue]: return cls(*args, name=name, **kwargs) # type: ignore return create_ def __init__(self, name: str) -> None: super().__init__() self.name = name @abc.abstractmethod def args2key(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> ObjectStoreKey: # pylint: disable=unused-argument,no-self-use ... class MemoryStore(ObjectStore[Hashable, Any]): def __init__(self, name: str): # pylint: disable=non-parent-init-called ObjectStore.__init__(self, name) def args2key(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Hashable: # pylint: disable=no-self-use return to_hashable((args, kwargs)) class FileStore(MemoryStore): '''An obj_store that persists at ./${CACHE_PATH}/${FUNCTION_NAME}_cache.pickle''' def __init__( self, cache_path: PathLike, name: str, serializer: Optional[Serializer] = None, ): # pylint: disable=non-parent-init-called,super-init-not-called ObjectStore.__init__(self, name) if serializer is None: import pickle self.serializer = cast(Serializer, pickle) else: self.serializer = serializer self.cache_path = pathify(cache_path) / (self.name + '_cache.pickle') self.loaded = False self.data = {} def load_if_not_loaded(self) -> None: if not self.loaded: self.loaded = True if self.cache_path.exists(): with self.cache_path.open('rb') as fil: self.data = self.serializer.load(fil) else: self.cache_path.parent.mkdir(parents=True, exist_ok=True) self.data = {} def args2key(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Hashable: # pylint: disable=no-self-use return to_hashable((args, kwargs)) def commit(self) -> None: self.load_if_not_loaded() if self.data: with self.cache_path.open('wb') as fil: self.serializer.dump(self.data, fil) else: if self.cache_path.exists(): print('deleting ', self.cache_path) self.cache_path.unlink() def __setitem__(self, key: Hashable, obj: Any) -> None: self.load_if_not_loaded() super().__setitem__(key, obj) self.commit() def __delitem__(self, key: Hashable) -> None: self.load_if_not_loaded() super().__delitem__(key) self.commit() def clear(self) -> None: self.load_if_not_loaded() super().clear() self.commit() class DirectoryStore(ObjectStore[PathLike, Any]): '''Stores objects at ./${CACHE_PATH}/${FUNCTION_NAME}/${urlencode(args)}.pickle''' def __init__( self, object_path: PathLike, name: str, serializer: Optional[Serializer] = None ) -> None: # pylint: disable=non-parent-init-called ObjectStore.__init__(self, name) if serializer is None: import pickle self.serializer = cast(Serializer, pickle) else: self.serializer = serializer self.cache_path = pathify(object_path) / self.name def args2key(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> PathLike: if kwargs: args = args + (kwargs,) fname = urllib.parse.quote(f'{safe_str(args)}.pickle', safe='') return self.cache_path / fname def __setitem__(self, path: PathLike, obj: Any) -> None: path.parent.mkdir(parents=True, exist_ok=True) with path.open('wb') as fil: self.serializer.dump(obj, fil) def __delitem__(self, path: PathLike) -> None: path.unlink() def __getitem__(self, path: PathLike) -> Any: with path.open('rb') as fil: return self.serializer.load(fil) def __contains__(self, path: Any) -> bool: if hasattr(path, 'exists'): return bool(path.exists()) else: return False def clear(self) -> None: print('deleting') if hasattr(self.cache_path, 'rmtree'): cast(Any, self.cache_path).rmtree() else: shutil.rmtree(str(self.cache_path)) def to_hashable(obj: Any) -> Hashable: '''Converts args and kwargs into a hashable type (overridable)''' try: hash(obj) except TypeError: if hasattr(obj, 'items'): # turn dictionaries into frozenset((key, val)) # sorting is necessary to make equal dictionaries map to equal things # sorted(..., key=hash) return tuple(sorted( [(keyf, to_hashable(val)) for keyf, val in obj.items()], key=hash )) elif hasattr(obj, '__iter__'): # turn iterables into tuples return tuple(to_hashable(val) for val in obj) else: raise TypeError(f"I don't know how to hash {obj} ({type(obj)})") else: return cast(Hashable, obj) def safe_str(obj: Any) -> str: ''' Safe names are compact, unique, urlsafe, and equal when the objects are equal str does not work because x == y does not imply str(x) == str(y). >>> a = dict(d=1, e=1) >>> b = dict(e=1, d=1) >>> a == b True >>> str(a) == str(b) False >>> safe_str(a) == safe_str(b) True ''' if isinstance(obj, int): ret = str(obj) elif isinstance(obj, float): ret = str(round(obj, 3)) elif isinstance(obj, str): ret = repr(obj) elif isinstance(obj, list): ret = '[' + ','.join(map(safe_str, obj)) + ']' elif isinstance(obj, tuple): ret = '(' + ','.join(map(safe_str, obj)) + ')' elif isinstance(obj, dict): ret = '{' + ','.join(sorted( safe_str(key) + ':' + safe_str(val) for key, val in obj.items() )) + '}' else: raise TypeError() return urllib.parse.quote(ret, safe='') def pathify(obj: Union[str, PathLike]) -> PathLike: if isinstance(obj, str): return Path(obj) else: return obj<|fim▁end|>
from typing import ( Callable, Any, TypeVar, cast, Tuple, Dict, Optional, Union, Hashable, )
<|file_name|>ojrunnerlinux.py<|end_file_name|><|fim▁begin|>import lorun import os import codecs import random import subprocess import config import sys RESULT_MAP = [ 2, 10, 5, 4, 3, 6, 11, 7, 12 ] class Runner: def __init__(self): return def compile(self, judger, srcPath, outPath): cmd = config.langCompile[judger.lang] % {'root': sys.path[0], 'src': srcPath, 'target': outPath} p = subprocess.Popen(cmd, shell = True, stdout = subprocess.PIPE, stdin = subprocess.PIPE, stderr = subprocess.STDOUT) retval = p.wait() return (retval, p.stdout.read()) def judge(self, judger, srcPath, outPath, inFile, ansFile, memlimit, timelimit): cmd = config.langRun[judger.lang] % {'src': srcPath, 'target': outPath} fout_path = "".join([sys.path[0], "/", "%s/%d.out" % (config.dataPath["tempPath"], random.randint(0, 65536))]) if os.path.exists(fout_path): os.remove(fout_path) fin = open(inFile, 'rU') fout = open(fout_path, 'w')<|fim▁hole|> runcfg = { 'args': cmd.split(" "), 'fd_in': fin.fileno(), 'fd_out': fout.fileno(), 'timelimit': int(timelimit), 'memorylimit': int(memlimit) } rst = lorun.run(runcfg) fin.close() fout.close() if rst['result'] == 0: fans = open(ansFile, 'rU') fout = open(fout_path, 'rU') crst = lorun.check(fans.fileno(), fout.fileno()) fout.close() fans.close() return (RESULT_MAP[crst], int(rst['memoryused']), int(rst['timeused'])) return (RESULT_MAP[rst['result']], 0, 0)<|fim▁end|>
<|file_name|>value.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Delix Project Authors. See the AUTHORS file at the top level directory. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // <|fim▁hole|>#[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum Value { Counter(usize), Gauge(isize), }<|fim▁end|>
<|file_name|>mpunreachnlri.py<|end_file_name|><|fim▁begin|># Copyright 2015-2017 Cisco Systems, Inc. # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """BGP Attribute MP_UNREACH_NLRI """ import struct from yabgp.message.attribute import Attribute from yabgp.message.attribute import AttributeFlag from yabgp.message.attribute import AttributeID from yabgp.message.attribute.nlri.ipv4_mpls_vpn import IPv4MPLSVPN from yabgp.message.attribute.nlri.ipv6_mpls_vpn import IPv6MPLSVPN from yabgp.message.attribute.nlri.ipv4_flowspec import IPv4FlowSpec from yabgp.message.attribute.nlri.ipv6_unicast import IPv6Unicast from yabgp.message.attribute.nlri.labeled_unicast.ipv4 import IPv4LabeledUnicast from yabgp.message.attribute.nlri.evpn import EVPN from yabgp.message.attribute.nlri.linkstate import BGPLS from yabgp.message.attribute.nlri.ipv4_srte import IPv4SRTE from yabgp.common import afn from yabgp.common import safn from yabgp.common import exception as excep from yabgp.common import constants as bgp_cons class MpUnReachNLRI(Attribute): """ This is an optional non-transitive attribute that can be used for the purpose of withdrawing multiple unfeasible routes from service. An UPDATE message that contains the MP_UNREACH_NLRI is not required to carry any other path attributes. MP_UNREACH_NLRI coding information +---------------------------------------------------------+ | Address Family Identifier (2 octets) | +---------------------------------------------------------+ | Subsequent Address Family Identifier (1 octet) | +---------------------------------------------------------+ | Withdrawn Routes (variable) | +---------------------------------------------------------+ """ ID = AttributeID.MP_UNREACH_NLRI FLAG = AttributeFlag.OPTIONAL + AttributeFlag.EXTENDED_LENGTH @classmethod def parse(cls, value): try: afi, safi = struct.unpack('!HB', value[0:3]) except Exception: raise excep.UpdateMessageError(sub_error=bgp_cons.ERR_MSG_UPDATE_ATTR_LEN, data='') nlri_bin = value[3:] # for IPv4 if afi == afn.AFNUM_INET: # VPNv4 if safi == safn.SAFNUM_LAB_VPNUNICAST: nlri = IPv4MPLSVPN.parse(nlri_bin, iswithdraw=True) return dict(afi_safi=(afi, safi), withdraw=nlri) # BGP flow spec elif safi == safn.SAFNUM_FSPEC_RULE: # if nlri length is greater than 240 bytes, it is encoded over 2 bytes withdraw_list = [] while nlri_bin: length = ord(nlri_bin[0:1]) if length >> 4 == 0xf and len(nlri_bin) > 2: length = struct.unpack('!H', nlri_bin[:2])[0] nlri_tmp = nlri_bin[2: length + 2] nlri_bin = nlri_bin[length + 2:] else: nlri_tmp = nlri_bin[1: length + 1] nlri_bin = nlri_bin[length + 1:] nlri = IPv4FlowSpec.parse(nlri_tmp) if nlri: withdraw_list.append(nlri) return dict(afi_safi=(afi, safi), withdraw=withdraw_list) else: return dict(afi_safi=(afn.AFNUM_INET, safi), withdraw=repr(nlri_bin)) # for ipv6 elif afi == afn.AFNUM_INET6: # for ipv6 unicast if safi == safn.SAFNUM_UNICAST: return dict(afi_safi=(afi, safi), withdraw=IPv6Unicast.parse(nlri_data=nlri_bin)) elif safi == safn.SAFNUM_LAB_VPNUNICAST: return dict(afi_safi=(afi, safi), withdraw=IPv6MPLSVPN.parse(value=nlri_bin, iswithdraw=True)) else: return dict(afi_safi=(afi, safi), withdraw=repr(nlri_bin)) # for l2vpn elif afi == afn.AFNUM_L2VPN: # for evpn if safi == safn.SAFNUM_EVPN: return dict(afi_safi=(afi, safi), withdraw=EVPN.parse(nlri_data=nlri_bin)) else: return dict(afi_safi=(afi, safi), withdraw=repr(nlri_bin)) # BGP LS elif afi == afn.AFNUM_BGPLS: if safi == safn.SAFNUM_BGPLS: withdraw = BGPLS.parse(nlri_bin) return dict(afi_safi=(afi, safi), withdraw=withdraw) else: pass else:<|fim▁hole|> def construct(cls, value): """Construct a attribute :param value: python dictionary {'afi_safi': (1,128), 'withdraw': [] """ afi, safi = value['afi_safi'] if afi == afn.AFNUM_INET: if safi == safn.SAFNUM_LAB_VPNUNICAST: # MPLS VPN nlri = IPv4MPLSVPN.construct(value['withdraw'], iswithdraw=True) if nlri: attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \ + struct.pack('!H', len(attr_value)) + attr_value else: return None elif safi == safn.SAFNUM_FSPEC_RULE: try: nlri_list = value.get('withdraw') or [] if not nlri_list: return None nlri_hex = b'' nlri_hex += IPv4FlowSpec.construct(value=nlri_list) attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri_hex return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \ + struct.pack('!H', len(attr_value)) + attr_value except Exception: raise excep.ConstructAttributeFailed( reason='failed to construct attributes', data=value ) elif safi == safn.SAFNUM_SRTE: try: nlri_list = value.get('withdraw') or {} if not nlri_list: return None nlri_hex = b'' nlri_hex += IPv4SRTE.construct(data=value['withdraw']) attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri_hex return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \ + struct.pack('!H', len(attr_value)) + attr_value except Exception: raise excep.ConstructAttributeFailed( reason='failed to construct attributes', data=value ) elif safi == safn.SAFNUM_MPLS_LABEL: try: nlri_list = value.get('withdraw') or [] if not nlri_list: return None nlri_hex = b'' flag = 'withdraw' nlri_hex += IPv4LabeledUnicast.construct(nlri_list, flag) attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri_hex return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \ + struct.pack('!H', len(attr_value)) + attr_value except Exception: raise excep.ConstructAttributeFailed( reason='failed to construct attributes', data=value ) else: raise excep.ConstructAttributeFailed( reason='unsupport this sub address family', data=value) elif afi == afn.AFNUM_INET6: if safi == safn.SAFNUM_UNICAST: nlri = IPv6Unicast.construct(nlri_list=value['withdraw']) if nlri: attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \ + struct.pack('!H', len(attr_value)) + attr_value elif safi == safn.SAFNUM_LAB_VPNUNICAST: nlri = IPv6MPLSVPN.construct(value=value['withdraw'], iswithdraw=True) if nlri: attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \ + struct.pack('!H', len(attr_value)) + attr_value else: return None # for l2vpn elif afi == afn.AFNUM_L2VPN: # for evpn if safi == safn.SAFNUM_EVPN: nlri = EVPN.construct(nlri_list=value['withdraw']) if nlri: attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \ + struct.pack('!H', len(attr_value)) + attr_value else: return None else: raise excep.ConstructAttributeFailed( reason='unsupport this sub address family', data=value)<|fim▁end|>
return dict(afi_safi=(afi, safi), withdraw=repr(nlri_bin)) @classmethod
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Some code that abstracts away much of the boilerplate of writing //! `derive` instances for traits. Among other things it manages getting //! access to the fields of the 4 different sorts of structs and enum //! variants, as well as creating the method and impl ast instances. //! //! Supported features (fairly exhaustive): //! //! - Methods taking any number of parameters of any type, and returning //! any type, other than vectors, bottom and closures. //! - Generating `impl`s for types with type parameters and lifetimes //! (e.g. `Option<T>`), the parameters are automatically given the //! current trait as a bound. (This includes separate type parameters //! and lifetimes for methods.) //! - Additional bounds on the type parameters (`TraitDef.additional_bounds`) //! //! The most important thing for implementers is the `Substructure` and //! `SubstructureFields` objects. The latter groups 5 possibilities of the //! arguments: //! //! - `Struct`, when `Self` is a struct (including tuple structs, e.g //! `struct T(i32, char)`). //! - `EnumMatching`, when `Self` is an enum and all the arguments are the //! same variant of the enum (e.g. `Some(1)`, `Some(3)` and `Some(4)`) //! - `EnumNonMatchingCollapsed` when `Self` is an enum and the arguments //! are not the same variant (e.g. `None`, `Some(1)` and `None`). //! - `StaticEnum` and `StaticStruct` for static methods, where the type //! being derived upon is either an enum or struct respectively. (Any //! argument with type Self is just grouped among the non-self //! arguments.) //! //! In the first two cases, the values from the corresponding fields in //! all the arguments are grouped together. For `EnumNonMatchingCollapsed` //! this isn't possible (different variants have different fields), so the //! fields are inaccessible. (Previous versions of the deriving infrastructure //! had a way to expand into code that could access them, at the cost of //! generating exponential amounts of code; see issue #15375). There are no //! fields with values in the static cases, so these are treated entirely //! differently. //! //! The non-static cases have `Option<ident>` in several places associated //! with field `expr`s. This represents the name of the field it is //! associated with. It is only not `None` when the associated field has //! an identifier in the source code. For example, the `x`s in the //! following snippet //! //! ```rust //! struct A { x : i32 } //! //! struct B(i32); //! //! enum C { //! C0(i32), //! C1 { x: i32 } //! } //! ``` //! //! The `i32`s in `B` and `C0` don't have an identifier, so the //! `Option<ident>`s would be `None` for them. //! //! In the static cases, the structure is summarised, either into the just //! spans of the fields or a list of spans and the field idents (for tuple //! structs and record structs, respectively), or a list of these, for //! enums (one for each variant). For empty struct and empty enum //! variants, it is represented as a count of 0. //! //! # "`cs`" functions //! //! The `cs_...` functions ("combine substructure) are designed to //! make life easier by providing some pre-made recipes for common //! threads; mostly calling the function being derived on all the //! arguments and then combining them back together in some way (or //! letting the user chose that). They are not meant to be the only //! way to handle the structures that this code creates. //! //! # Examples //! //! The following simplified `PartialEq` is used for in-code examples: //! //! ```rust //! trait PartialEq { //! fn eq(&self, other: &Self); //! } //! impl PartialEq for i32 { //! fn eq(&self, other: &i32) -> bool { //! *self == *other //! } //! } //! ``` //! //! Some examples of the values of `SubstructureFields` follow, using the //! above `PartialEq`, `A`, `B` and `C`. //! //! ## Structs //! //! When generating the `expr` for the `A` impl, the `SubstructureFields` is //! //! ```{.text} //! Struct(vec![FieldInfo { //! span: <span of x> //! name: Some(<ident of x>), //! self_: <expr for &self.x>, //! other: vec![<expr for &other.x] //! }]) //! ``` //! //! For the `B` impl, called with `B(a)` and `B(b)`, //! //! ```{.text} //! Struct(vec![FieldInfo { //! span: <span of `i32`>, //! name: None, //! self_: <expr for &a> //! other: vec![<expr for &b>] //! }]) //! ``` //! //! ## Enums //! //! When generating the `expr` for a call with `self == C0(a)` and `other //! == C0(b)`, the SubstructureFields is //! //! ```{.text} //! EnumMatching(0, <ast::Variant for C0>, //! vec![FieldInfo { //! span: <span of i32> //! name: None, //! self_: <expr for &a>, //! other: vec![<expr for &b>] //! }]) //! ``` //! //! For `C1 {x}` and `C1 {x}`, //! //! ```{.text} //! EnumMatching(1, <ast::Variant for C1>, //! vec![FieldInfo { //! span: <span of x> //! name: Some(<ident of x>), //! self_: <expr for &self.x>, //! other: vec![<expr for &other.x>] //! }]) //! ``` //! //! For `C0(a)` and `C1 {x}` , //! //! ```{.text} //! EnumNonMatchingCollapsed( //! vec![<ident of self>, <ident of __arg_1>], //! &[<ast::Variant for C0>, <ast::Variant for C1>], //! &[<ident for self index value>, <ident of __arg_1 index value>]) //! ``` //! //! It is the same for when the arguments are flipped to `C1 {x}` and //! `C0(a)`; the only difference is what the values of the identifiers //! <ident for self index value> and <ident of __arg_1 index value> will //! be in the generated code. //! //! `EnumNonMatchingCollapsed` deliberately provides far less information //! than is generally available for a given pair of variants; see #15375 //! for discussion. //! //! ## Static //! //! A static method on the types above would result in, //! //! ```{.text} //! StaticStruct(<ast::StructDef of A>, Named(vec![(<ident of x>, <span of x>)])) //! //! StaticStruct(<ast::StructDef of B>, Unnamed(vec![<span of x>])) //! //! StaticEnum(<ast::EnumDef of C>, //! vec![(<ident of C0>, <span of C0>, Unnamed(vec![<span of i32>])), //! (<ident of C1>, <span of C1>, Named(vec![(<ident of x>, <span of x>)]))]) //! ``` pub use self::StaticFields::*; pub use self::SubstructureFields::*; use self::StructType::*; use std::cell::RefCell; use std::vec; use abi::Abi; use abi; use ast; use ast::{EnumDef, Expr, Ident, Generics, StructDef}; use ast_util; use attr; use attr::AttrMetaMethods; use ext::base::{ExtCtxt, Annotatable}; use ext::build::AstBuilder; use codemap::{self, DUMMY_SP}; use codemap::Span; use diagnostic::SpanHandler; use fold::MoveMap; use owned_slice::OwnedSlice; use parse::token::InternedString; use parse::token::special_idents; use ptr::P; use self::ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty}; pub mod ty; pub struct TraitDef<'a> { /// The span for the current #[derive(Foo)] header. pub span: Span, pub attributes: Vec<ast::Attribute>, /// Path of the trait, including any type parameters pub path: Path<'a>, /// Additional bounds required of any type parameters of the type, /// other than the current trait pub additional_bounds: Vec<Ty<'a>>, /// Any extra lifetimes and/or bounds, e.g. `D: serialize::Decoder` pub generics: LifetimeBounds<'a>, pub methods: Vec<MethodDef<'a>>, pub associated_types: Vec<(ast::Ident, Ty<'a>)>, } pub struct MethodDef<'a> { /// name of the method pub name: &'a str, /// List of generics, e.g. `R: rand::Rng` pub generics: LifetimeBounds<'a>, /// Whether there is a self argument (outer Option) i.e. whether /// this is a static function, and whether it is a pointer (inner /// Option) pub explicit_self: Option<Option<PtrTy<'a>>>, /// Arguments other than the self argument pub args: Vec<Ty<'a>>, /// Return type pub ret_ty: Ty<'a>, pub attributes: Vec<ast::Attribute>, // Is it an `unsafe fn`? pub is_unsafe: bool, pub combine_substructure: RefCell<CombineSubstructureFunc<'a>>, } /// All the data about the data structure/method being derived upon. pub struct Substructure<'a> { /// ident of self pub type_ident: Ident, /// ident of the method pub method_ident: Ident, /// dereferenced access to any `Self_` or `Ptr(Self_, _)` arguments pub self_args: &'a [P<Expr>], /// verbatim access to any other arguments pub nonself_args: &'a [P<Expr>], pub fields: &'a SubstructureFields<'a> } /// Summary of the relevant parts of a struct/enum field. pub struct FieldInfo<'a> { pub span: Span, /// None for tuple structs/normal enum variants, Some for normal /// structs/struct enum variants. pub name: Option<Ident>, /// The expression corresponding to this field of `self` /// (specifically, a reference to it). pub self_: P<Expr>, /// The expressions corresponding to references to this field in /// the other `Self` arguments. pub other: Vec<P<Expr>>, /// The attributes on the field pub attrs: &'a [ast::Attribute], } /// Fields for a static method pub enum StaticFields { /// Tuple structs/enum variants like this. Unnamed(Vec<Span>), /// Normal structs/struct variants. Named(Vec<(Ident, Span)>), } /// A summary of the possible sets of fields. pub enum SubstructureFields<'a> { Struct(Vec<FieldInfo<'a>>), /// Matching variants of the enum: variant index, ast::Variant, /// fields: the field name is only non-`None` in the case of a struct /// variant. EnumMatching(usize, &'a ast::Variant, Vec<FieldInfo<'a>>), /// Non-matching variants of the enum, but with all state hidden from /// the consequent code. The first component holds `Ident`s for all of /// the `Self` arguments; the second component is a slice of all of the /// variants for the enum itself, and the third component is a list of /// `Ident`s bound to the variant index values for each of the actual /// input `Self` arguments. EnumNonMatchingCollapsed(Vec<Ident>, &'a [P<ast::Variant>], &'a [Ident]), /// A static method where `Self` is a struct. StaticStruct(&'a ast::StructDef, StaticFields), /// A static method where `Self` is an enum. StaticEnum(&'a ast::EnumDef, Vec<(Ident, Span, StaticFields)>), } /// Combine the values of all the fields together. The last argument is /// all the fields of all the structures. pub type CombineSubstructureFunc<'a> = Box<FnMut(&mut ExtCtxt, Span, &Substructure) -> P<Expr> + 'a>; /// Deal with non-matching enum variants. The tuple is a list of /// identifiers (one for each `Self` argument, which could be any of the /// variants since they have been collapsed together) and the identifiers /// holding the variant index value for each of the `Self` arguments. The /// last argument is all the non-`Self` args of the method being derived. pub type EnumNonMatchCollapsedFunc<'a> = Box<FnMut(&mut ExtCtxt, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>; pub fn combine_substructure<'a>(f: CombineSubstructureFunc<'a>) -> RefCell<CombineSubstructureFunc<'a>> { RefCell::new(f) } /// This method helps to extract all the type parameters referenced from a /// type. For a type parameter `<T>`, it looks for either a `TyPath` that /// is not global and starts with `T`, or a `TyQPath`. fn find_type_parameters(ty: &ast::Ty, ty_param_names: &[ast::Name]) -> Vec<P<ast::Ty>> { use visit; struct Visitor<'a> { ty_param_names: &'a [ast::Name], types: Vec<P<ast::Ty>>, } impl<'a> visit::Visitor<'a> for Visitor<'a> { fn visit_ty(&mut self, ty: &'a ast::Ty) { match ty.node { ast::TyPath(_, ref path) if !path.global => { match path.segments.first() { Some(segment) => { if self.ty_param_names.contains(&segment.identifier.name) { self.types.push(P(ty.clone())); } } None => {} } } _ => {} } visit::walk_ty(self, ty) } } let mut visitor = Visitor { ty_param_names: ty_param_names, types: Vec::new(), }; visit::Visitor::visit_ty(&mut visitor, ty); visitor.types } impl<'a> TraitDef<'a> { pub fn expand(&self, cx: &mut ExtCtxt, mitem: &ast::MetaItem, item: &'a Annotatable, push: &mut FnMut(Annotatable)) { match *item { Annotatable::Item(ref item) => { let newitem = match item.node { ast::ItemStruct(ref struct_def, ref generics) => { self.expand_struct_def(cx, &struct_def, item.ident, generics) } ast::ItemEnum(ref enum_def, ref generics) => { self.expand_enum_def(cx, enum_def, &item.attrs, item.ident, generics) } _ => { cx.span_err(mitem.span, "`derive` may only be applied to structs and enums"); return; } }; // Keep the lint attributes of the previous item to control how the // generated implementations are linted let mut attrs = newitem.attrs.clone(); attrs.extend(item.attrs.iter().filter(|a| { match &a.name()[..] { "allow" | "warn" | "deny" | "forbid" => true, _ => false, } }).cloned()); push(Annotatable::Item(P(ast::Item { attrs: attrs, ..(*newitem).clone() }))) } _ => { cx.span_err(mitem.span, "`derive` may only be applied to structs and enums"); } } } /// Given that we are deriving a trait `DerivedTrait` for a type like: /// /// ```ignore /// struct Struct<'a, ..., 'z, A, B: DeclaredTrait, C, ..., Z> where C: WhereTrait { /// a: A, /// b: B::Item, /// b1: <B as DeclaredTrait>::Item, /// c1: <C as WhereTrait>::Item, /// c2: Option<<C as WhereTrait>::Item>, /// ... /// } /// ``` /// /// create an impl like: /// /// ```ignore /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where /// C: WhereTrait, /// A: DerivedTrait + B1 + ... + BN, /// B: DerivedTrait + B1 + ... + BN, /// C: DerivedTrait + B1 + ... + BN, /// B::Item: DerivedTrait + B1 + ... + BN, /// <C as WhereTrait>::Item: DerivedTrait + B1 + ... + BN, /// ... /// { /// ... /// } /// ``` /// /// where B1, ..., BN are the bounds given by `bounds_paths`.'. Z is a phantom type, and /// therefore does not get bound by the derived trait. fn create_derived_impl(&self, cx: &mut ExtCtxt, type_ident: Ident, generics: &Generics, field_tys: Vec<P<ast::Ty>>, methods: Vec<P<ast::ImplItem>>) -> P<ast::Item> { let trait_path = self.path.to_path(cx, self.span, type_ident, generics); // Transform associated types from `deriving::ty::Ty` into `ast::ImplItem` let associated_types = self.associated_types.iter().map(|&(ident, ref type_def)| { P(ast::ImplItem { id: ast::DUMMY_NODE_ID, span: self.span, ident: ident, vis: ast::Inherited, attrs: Vec::new(), node: ast::TypeImplItem(type_def.to_ty(cx, self.span, type_ident, generics )), }) }); let Generics { mut lifetimes, ty_params, mut where_clause } = self.generics.to_generics(cx, self.span, type_ident, generics); let mut ty_params = ty_params.into_vec(); // Copy the lifetimes lifetimes.extend(generics.lifetimes.iter().cloned()); // Create the type parameters. ty_params.extend(generics.ty_params.iter().map(|ty_param| { // I don't think this can be moved out of the loop, since // a TyParamBound requires an ast id let mut bounds: Vec<_> = // extra restrictions on the generics parameters to the type being derived upon self.additional_bounds.iter().map(|p| { cx.typarambound(p.to_path(cx, self.span, type_ident, generics)) }).collect(); // require the current trait bounds.push(cx.typarambound(trait_path.clone())); // also add in any bounds from the declaration for declared_bound in ty_param.bounds.iter() { bounds.push((*declared_bound).clone()); } cx.typaram(self.span, ty_param.ident, OwnedSlice::from_vec(bounds), None) })); // and similarly for where clauses where_clause.predicates.extend(generics.where_clause.predicates.iter().map(|clause| { match *clause { ast::WherePredicate::BoundPredicate(ref wb) => { ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate { span: self.span, bound_lifetimes: wb.bound_lifetimes.clone(), bounded_ty: wb.bounded_ty.clone(), bounds: OwnedSlice::from_vec(wb.bounds.iter().cloned().collect()) }) } ast::WherePredicate::RegionPredicate(ref rb) => { ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate { span: self.span, lifetime: rb.lifetime, bounds: rb.bounds.iter().cloned().collect() }) } ast::WherePredicate::EqPredicate(ref we) => { ast::WherePredicate::EqPredicate(ast::WhereEqPredicate { id: ast::DUMMY_NODE_ID, span: self.span, path: we.path.clone(), ty: we.ty.clone() }) } } })); if !ty_params.is_empty() { let ty_param_names: Vec<ast::Name> = ty_params.iter() .map(|ty_param| ty_param.ident.name) .collect();<|fim▁hole|> for field_ty in field_tys { let tys = find_type_parameters(&*field_ty, &ty_param_names); for ty in tys { let mut bounds: Vec<_> = self.additional_bounds.iter().map(|p| { cx.typarambound(p.to_path(cx, self.span, type_ident, generics)) }).collect(); // require the current trait bounds.push(cx.typarambound(trait_path.clone())); let predicate = ast::WhereBoundPredicate { span: self.span, bound_lifetimes: vec![], bounded_ty: ty, bounds: OwnedSlice::from_vec(bounds), }; let predicate = ast::WherePredicate::BoundPredicate(predicate); where_clause.predicates.push(predicate); } } } let trait_generics = Generics { lifetimes: lifetimes, ty_params: OwnedSlice::from_vec(ty_params), where_clause: where_clause }; // Create the reference to the trait. let trait_ref = cx.trait_ref(trait_path); // Create the type parameters on the `self` path. let self_ty_params = generics.ty_params.map(|ty_param| { cx.ty_ident(self.span, ty_param.ident) }); let self_lifetimes: Vec<ast::Lifetime> = generics.lifetimes .iter() .map(|ld| ld.lifetime) .collect(); // Create the type of `self`. let self_type = cx.ty_path( cx.path_all(self.span, false, vec!( type_ident ), self_lifetimes, self_ty_params.into_vec(), Vec::new())); let attr = cx.attribute( self.span, cx.meta_word(self.span, InternedString::new("automatically_derived"))); // Just mark it now since we know that it'll end up used downstream attr::mark_used(&attr); let opt_trait_ref = Some(trait_ref); let ident = ast_util::impl_pretty_name(&opt_trait_ref, Some(&*self_type)); let mut a = vec![attr]; a.extend(self.attributes.iter().cloned()); cx.item( self.span, ident, a, ast::ItemImpl(ast::Unsafety::Normal, ast::ImplPolarity::Positive, trait_generics, opt_trait_ref, self_type, methods.into_iter().chain(associated_types).collect())) } fn expand_struct_def(&self, cx: &mut ExtCtxt, struct_def: &'a StructDef, type_ident: Ident, generics: &Generics) -> P<ast::Item> { let field_tys: Vec<P<ast::Ty>> = struct_def.fields.iter() .map(|field| field.node.ty.clone()) .collect(); let methods = self.methods.iter().map(|method_def| { let (explicit_self, self_args, nonself_args, tys) = method_def.split_self_nonself_args( cx, self, type_ident, generics); let body = if method_def.is_static() { method_def.expand_static_struct_method_body( cx, self, struct_def, type_ident, &self_args[..], &nonself_args[..]) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, &self_args[..], &nonself_args[..]) }; method_def.create_method(cx, self, type_ident, generics, abi::Rust, explicit_self, tys, body) }).collect(); self.create_derived_impl(cx, type_ident, generics, field_tys, methods) } fn expand_enum_def(&self, cx: &mut ExtCtxt, enum_def: &'a EnumDef, type_attrs: &[ast::Attribute], type_ident: Ident, generics: &Generics) -> P<ast::Item> { let mut field_tys = Vec::new(); for variant in &enum_def.variants { match variant.node.kind { ast::VariantKind::TupleVariantKind(ref args) => { field_tys.extend(args.iter() .map(|arg| arg.ty.clone())); } ast::VariantKind::StructVariantKind(ref args) => { field_tys.extend(args.fields.iter() .map(|field| field.node.ty.clone())); } } } let methods = self.methods.iter().map(|method_def| { let (explicit_self, self_args, nonself_args, tys) = method_def.split_self_nonself_args(cx, self, type_ident, generics); let body = if method_def.is_static() { method_def.expand_static_enum_method_body( cx, self, enum_def, type_ident, &self_args[..], &nonself_args[..]) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_attrs, type_ident, self_args, &nonself_args[..]) }; method_def.create_method(cx, self, type_ident, generics, abi::Rust, explicit_self, tys, body) }).collect(); self.create_derived_impl(cx, type_ident, generics, field_tys, methods) } } fn find_repr_type_name(diagnostic: &SpanHandler, type_attrs: &[ast::Attribute]) -> &'static str { let mut repr_type_name = "i32"; for a in type_attrs { for r in &attr::find_repr_attrs(diagnostic, a) { repr_type_name = match *r { attr::ReprAny | attr::ReprPacked => continue, attr::ReprExtern => "i32", attr::ReprInt(_, attr::SignedInt(ast::TyIs)) => "isize", attr::ReprInt(_, attr::SignedInt(ast::TyI8)) => "i8", attr::ReprInt(_, attr::SignedInt(ast::TyI16)) => "i16", attr::ReprInt(_, attr::SignedInt(ast::TyI32)) => "i32", attr::ReprInt(_, attr::SignedInt(ast::TyI64)) => "i64", attr::ReprInt(_, attr::UnsignedInt(ast::TyUs)) => "usize", attr::ReprInt(_, attr::UnsignedInt(ast::TyU8)) => "u8", attr::ReprInt(_, attr::UnsignedInt(ast::TyU16)) => "u16", attr::ReprInt(_, attr::UnsignedInt(ast::TyU32)) => "u32", attr::ReprInt(_, attr::UnsignedInt(ast::TyU64)) => "u64", } } } repr_type_name } impl<'a> MethodDef<'a> { fn call_substructure_method(&self, cx: &mut ExtCtxt, trait_: &TraitDef, type_ident: Ident, self_args: &[P<Expr>], nonself_args: &[P<Expr>], fields: &SubstructureFields) -> P<Expr> { let substructure = Substructure { type_ident: type_ident, method_ident: cx.ident_of(self.name), self_args: self_args, nonself_args: nonself_args, fields: fields }; let mut f = self.combine_substructure.borrow_mut(); let f: &mut CombineSubstructureFunc = &mut *f; f(cx, trait_.span, &substructure) } fn get_ret_ty(&self, cx: &mut ExtCtxt, trait_: &TraitDef, generics: &Generics, type_ident: Ident) -> P<ast::Ty> { self.ret_ty.to_ty(cx, trait_.span, type_ident, generics) } fn is_static(&self) -> bool { self.explicit_self.is_none() } fn split_self_nonself_args(&self, cx: &mut ExtCtxt, trait_: &TraitDef, type_ident: Ident, generics: &Generics) -> (ast::ExplicitSelf, Vec<P<Expr>>, Vec<P<Expr>>, Vec<(Ident, P<ast::Ty>)>) { let mut self_args = Vec::new(); let mut nonself_args = Vec::new(); let mut arg_tys = Vec::new(); let mut nonstatic = false; let ast_explicit_self = match self.explicit_self { Some(ref self_ptr) => { let (self_expr, explicit_self) = ty::get_explicit_self(cx, trait_.span, self_ptr); self_args.push(self_expr); nonstatic = true; explicit_self } None => codemap::respan(trait_.span, ast::SelfStatic), }; for (i, ty) in self.args.iter().enumerate() { let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics); let ident = cx.ident_of(&format!("__arg_{}", i)); arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(trait_.span, ident); match *ty { // for static methods, just treat any Self // arguments as a normal arg Self_ if nonstatic => { self_args.push(arg_expr); } Ptr(ref ty, _) if **ty == Self_ && nonstatic => { self_args.push(cx.expr_deref(trait_.span, arg_expr)) } _ => { nonself_args.push(arg_expr); } } } (ast_explicit_self, self_args, nonself_args, arg_tys) } fn create_method(&self, cx: &mut ExtCtxt, trait_: &TraitDef, type_ident: Ident, generics: &Generics, abi: Abi, explicit_self: ast::ExplicitSelf, arg_types: Vec<(Ident, P<ast::Ty>)> , body: P<Expr>) -> P<ast::ImplItem> { // create the generics that aren't for Self let fn_generics = self.generics.to_generics(cx, trait_.span, type_ident, generics); let self_arg = match explicit_self.node { ast::SelfStatic => None, // creating fresh self id _ => Some(ast::Arg::new_self(trait_.span, ast::MutImmutable, special_idents::self_)) }; let args = { let args = arg_types.into_iter().map(|(name, ty)| { cx.arg(trait_.span, name, ty) }); self_arg.into_iter().chain(args).collect() }; let ret_type = self.get_ret_ty(cx, trait_, generics, type_ident); let method_ident = cx.ident_of(self.name); let fn_decl = cx.fn_decl(args, ret_type); let body_block = cx.block_expr(body); let unsafety = if self.is_unsafe { ast::Unsafety::Unsafe } else { ast::Unsafety::Normal }; // Create the method. P(ast::ImplItem { id: ast::DUMMY_NODE_ID, attrs: self.attributes.clone(), span: trait_.span, vis: ast::Inherited, ident: method_ident, node: ast::MethodImplItem(ast::MethodSig { generics: fn_generics, abi: abi, explicit_self: explicit_self, unsafety: unsafety, constness: ast::Constness::NotConst, decl: fn_decl }, body_block) }) } /// ``` /// #[derive(PartialEq)] /// struct A { x: i32, y: i32 } /// /// // equivalent to: /// impl PartialEq for A { /// fn eq(&self, __arg_1: &A) -> bool { /// match *self { /// A {x: ref __self_0_0, y: ref __self_0_1} => { /// match *__arg_1 { /// A {x: ref __self_1_0, y: ref __self_1_1} => { /// __self_0_0.eq(__self_1_0) && __self_0_1.eq(__self_1_1) /// } /// } /// } /// } /// } /// } /// ``` fn expand_struct_method_body<'b>(&self, cx: &mut ExtCtxt, trait_: &TraitDef<'b>, struct_def: &'b StructDef, type_ident: Ident, self_args: &[P<Expr>], nonself_args: &[P<Expr>]) -> P<Expr> { let mut raw_fields = Vec::new(); // Vec<[fields of self], // [fields of next Self arg], [etc]> let mut patterns = Vec::new(); for i in 0..self_args.len() { let struct_path= cx.path(DUMMY_SP, vec!( type_ident )); let (pat, ident_expr) = trait_.create_struct_pattern(cx, struct_path, struct_def, &format!("__self_{}", i), ast::MutImmutable); patterns.push(pat); raw_fields.push(ident_expr); } // transpose raw_fields let fields = if !raw_fields.is_empty() { let mut raw_fields = raw_fields.into_iter().map(|v| v.into_iter()); let first_field = raw_fields.next().unwrap(); let mut other_fields: Vec<vec::IntoIter<_>> = raw_fields.collect(); first_field.map(|(span, opt_id, field, attrs)| { FieldInfo { span: span, name: opt_id, self_: field, other: other_fields.iter_mut().map(|l| { match l.next().unwrap() { (_, _, ex, _) => ex } }).collect(), attrs: attrs, } }).collect() } else { cx.span_bug(trait_.span, "no self arguments to non-static method in generic \ `derive`") }; // body of the inner most destructuring match let mut body = self.call_substructure_method( cx, trait_, type_ident, self_args, nonself_args, &Struct(fields)); // make a series of nested matches, to destructure the // structs. This is actually right-to-left, but it shouldn't // matter. for (arg_expr, pat) in self_args.iter().zip(patterns) { body = cx.expr_match(trait_.span, arg_expr.clone(), vec!( cx.arm(trait_.span, vec!(pat.clone()), body) )) } body } fn expand_static_struct_method_body(&self, cx: &mut ExtCtxt, trait_: &TraitDef, struct_def: &StructDef, type_ident: Ident, self_args: &[P<Expr>], nonself_args: &[P<Expr>]) -> P<Expr> { let summary = trait_.summarise_struct(cx, struct_def); self.call_substructure_method(cx, trait_, type_ident, self_args, nonself_args, &StaticStruct(struct_def, summary)) } /// ``` /// #[derive(PartialEq)] /// enum A { /// A1, /// A2(i32) /// } /// /// // is equivalent to /// /// impl PartialEq for A { /// fn eq(&self, __arg_1: &A) -> ::bool { /// match (&*self, &*__arg_1) { /// (&A1, &A1) => true, /// (&A2(ref __self_0), /// &A2(ref __arg_1_0)) => (*__self_0).eq(&(*__arg_1_0)), /// _ => { /// let __self_vi = match *self { A1(..) => 0, A2(..) => 1 }; /// let __arg_1_vi = match *__arg_1 { A1(..) => 0, A2(..) => 1 }; /// false /// } /// } /// } /// } /// ``` /// /// (Of course `__self_vi` and `__arg_1_vi` are unused for /// `PartialEq`, and those subcomputations will hopefully be removed /// as their results are unused. The point of `__self_vi` and /// `__arg_1_vi` is for `PartialOrd`; see #15503.) fn expand_enum_method_body<'b>(&self, cx: &mut ExtCtxt, trait_: &TraitDef<'b>, enum_def: &'b EnumDef, type_attrs: &[ast::Attribute], type_ident: Ident, self_args: Vec<P<Expr>>, nonself_args: &[P<Expr>]) -> P<Expr> { self.build_enum_match_tuple( cx, trait_, enum_def, type_attrs, type_ident, self_args, nonself_args) } /// Creates a match for a tuple of all `self_args`, where either all /// variants match, or it falls into a catch-all for when one variant /// does not match. /// There are N + 1 cases because is a case for each of the N /// variants where all of the variants match, and one catch-all for /// when one does not match. /// The catch-all handler is provided access the variant index values /// for each of the self-args, carried in precomputed variables. (Nota /// bene: the variant index values are not necessarily the /// discriminant values. See issue #15523.) /// ```{.text} /// match (this, that, ...) { /// (Variant1, Variant1, Variant1) => ... // delegate Matching on Variant1 /// (Variant2, Variant2, Variant2) => ... // delegate Matching on Variant2 /// ... /// _ => { /// let __this_vi = match this { Variant1 => 0, Variant2 => 1, ... }; /// let __that_vi = match that { Variant1 => 0, Variant2 => 1, ... }; /// ... // catch-all remainder can inspect above variant index values. /// } /// } /// ``` fn build_enum_match_tuple<'b>( &self, cx: &mut ExtCtxt, trait_: &TraitDef<'b>, enum_def: &'b EnumDef, type_attrs: &[ast::Attribute], type_ident: Ident, self_args: Vec<P<Expr>>, nonself_args: &[P<Expr>]) -> P<Expr> { let sp = trait_.span; let variants = &enum_def.variants; let self_arg_names = self_args.iter().enumerate() .map(|(arg_count, _self_arg)| { if arg_count == 0 { "__self".to_string() } else { format!("__arg_{}", arg_count) } }) .collect::<Vec<String>>(); let self_arg_idents = self_arg_names.iter() .map(|name|cx.ident_of(&name[..])) .collect::<Vec<ast::Ident>>(); // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to an int // value corresponding to its discriminant. let vi_idents: Vec<ast::Ident> = self_arg_names.iter() .map(|name| { let vi_suffix = format!("{}_vi", &name[..]); cx.ident_of(&vi_suffix[..]) }) .collect::<Vec<ast::Ident>>(); // Builds, via callback to call_substructure_method, the // delegated expression that handles the catch-all case, // using `__variants_tuple` to drive logic if necessary. let catch_all_substructure = EnumNonMatchingCollapsed( self_arg_idents, &variants[..], &vi_idents[..]); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 // (Variant2, Variant2, ...) => Body2 // ... // where each tuple has length = self_args.len() let mut match_arms: Vec<ast::Arm> = variants.iter().enumerate() .map(|(index, variant)| { let mk_self_pat = |cx: &mut ExtCtxt, self_arg_name: &str| { let (p, idents) = trait_.create_enum_variant_pattern(cx, type_ident, &**variant, self_arg_name, ast::MutImmutable); (cx.pat(sp, ast::PatRegion(p, ast::MutImmutable)), idents) }; // A single arm has form (&VariantK, &VariantK, ...) => BodyK // (see "Final wrinkle" note below for why.) let mut subpats = Vec::with_capacity(self_arg_names.len()); let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); let first_self_pat_idents = { let (p, idents) = mk_self_pat(cx, &self_arg_names[0]); subpats.push(p); idents }; for self_arg_name in &self_arg_names[1..] { let (p, idents) = mk_self_pat(cx, &self_arg_name[..]); subpats.push(p); self_pats_idents.push(idents); } // Here is the pat = `(&VariantK, &VariantK, ...)` let single_pat = cx.pat_tuple(sp, subpats); // For the BodyK, we need to delegate to our caller, // passing it an EnumMatching to indicate which case // we are in. // All of the Self args have the same variant in these // cases. So we transpose the info in self_pats_idents // to gather the getter expressions together, in the // form that EnumMatching expects. // The transposition is driven by walking across the // arg fields of the variant for the first self pat. let field_tuples = first_self_pat_idents.into_iter().enumerate() // For each arg field of self, pull out its getter expr ... .map(|(field_index, (sp, opt_ident, self_getter_expr, attrs))| { // ... but FieldInfo also wants getter expr // for matching other arguments of Self type; // so walk across the *other* self_pats_idents // and pull out getter for same field in each // of them (using `field_index` tracked above). // That is the heart of the transposition. let others = self_pats_idents.iter().map(|fields| { let (_, _opt_ident, ref other_getter_expr, _) = fields[field_index]; // All Self args have same variant, so // opt_idents are the same. (Assert // here to make it self-evident that // it is okay to ignore `_opt_ident`.) assert!(opt_ident == _opt_ident); other_getter_expr.clone() }).collect::<Vec<P<Expr>>>(); FieldInfo { span: sp, name: opt_ident, self_: self_getter_expr, other: others, attrs: attrs, } }).collect::<Vec<FieldInfo>>(); // Now, for some given VariantK, we have built up // expressions for referencing every field of every // Self arg, assuming all are instances of VariantK. // Build up code associated with such a case. let substructure = EnumMatching(index, &**variant, field_tuples); let arm_expr = self.call_substructure_method( cx, trait_, type_ident, &self_args[..], nonself_args, &substructure); cx.arm(sp, vec![single_pat], arm_expr) }).collect(); // We will usually need the catch-all after matching the // tuples `(VariantK, VariantK, ...)` for each VariantK of the // enum. But: // // * when there is only one Self arg, the arms above suffice // (and the deriving we call back into may not be prepared to // handle EnumNonMatchCollapsed), and, // // * when the enum has only one variant, the single arm that // is already present always suffices. // // * In either of the two cases above, if we *did* add a // catch-all `_` match, it would trigger the // unreachable-pattern error. // if variants.len() > 1 && self_args.len() > 1 { // Build a series of let statements mapping each self_arg // to its discriminant value. If this is a C-style enum // with a specific repr type, then casts the values to // that type. Otherwise casts to `i32` (the default repr // type). // // i.e. for `enum E<T> { A, B(1), C(T, T) }`, and a deriving // with three Self args, builds three statements: // // ``` // let __self0_vi = unsafe { // std::intrinsics::discriminant_value(&self) } as i32; // let __self1_vi = unsafe { // std::intrinsics::discriminant_value(&__arg1) } as i32; // let __self2_vi = unsafe { // std::intrinsics::discriminant_value(&__arg2) } as i32; // ``` let mut index_let_stmts: Vec<P<ast::Stmt>> = Vec::new(); let target_type_name = find_repr_type_name(&cx.parse_sess.span_diagnostic, type_attrs); for (&ident, self_arg) in vi_idents.iter().zip(&self_args) { let path = vec![cx.ident_of_std("core"), cx.ident_of("intrinsics"), cx.ident_of("discriminant_value")]; let call = cx.expr_call_global( sp, path, vec![cx.expr_addr_of(sp, self_arg.clone())]); let variant_value = cx.expr_block(P(ast::Block { stmts: vec![], expr: Some(call), id: ast::DUMMY_NODE_ID, rules: ast::UnsafeBlock(ast::CompilerGenerated), span: sp })); let target_ty = cx.ty_ident(sp, cx.ident_of(target_type_name)); let variant_disr = cx.expr_cast(sp, variant_value, target_ty); let let_stmt = cx.stmt_let(sp, false, ident, variant_disr); index_let_stmts.push(let_stmt); } let arm_expr = self.call_substructure_method( cx, trait_, type_ident, &self_args[..], nonself_args, &catch_all_substructure); // Builds the expression: // { // let __self0_vi = ...; // let __self1_vi = ...; // ... // <delegated expression referring to __self0_vi, et al.> // } let arm_expr = cx.expr_block( cx.block_all(sp, index_let_stmts, Some(arm_expr))); // Builds arm: // _ => { let __self0_vi = ...; // let __self1_vi = ...; // ... // <delegated expression as above> } let catch_all_match_arm = cx.arm(sp, vec![cx.pat_wild(sp)], arm_expr); match_arms.push(catch_all_match_arm); } else if variants.is_empty() { // As an additional wrinkle, For a zero-variant enum A, // currently the compiler // will accept `fn (a: &Self) { match *a { } }` // but rejects `fn (a: &Self) { match (&*a,) { } }` // as well as `fn (a: &Self) { match ( *a,) { } }` // // This means that the strategy of building up a tuple of // all Self arguments fails when Self is a zero variant // enum: rustc rejects the expanded program, even though // the actual code tends to be impossible to execute (at // least safely), according to the type system. // // The most expedient fix for this is to just let the // code fall through to the catch-all. But even this is // error-prone, since the catch-all as defined above would // generate code like this: // // _ => { let __self0 = match *self { }; // let __self1 = match *__arg_0 { }; // <catch-all-expr> } // // Which is yields bindings for variables which type // inference cannot resolve to unique types. // // One option to the above might be to add explicit type // annotations. But the *only* reason to go down that path // would be to try to make the expanded output consistent // with the case when the number of enum variants >= 1. // // That just isn't worth it. In fact, trying to generate // sensible code for *any* deriving on a zero-variant enum // does not make sense. But at the same time, for now, we // do not want to cause a compile failure just because the // user happened to attach a deriving to their // zero-variant enum. // // Instead, just generate a failing expression for the // zero variant case, skipping matches and also skipping // delegating back to the end user code entirely. // // (See also #4499 and #12609; note that some of the // discussions there influence what choice we make here; // e.g. if we feature-gate `match x { ... }` when x refers // to an uninhabited type (e.g. a zero-variant enum or a // type holding such an enum), but do not feature-gate // zero-variant enums themselves, then attempting to // derive Debug on such a type could here generate code // that needs the feature gate enabled.) return cx.expr_unreachable(sp); } // Final wrinkle: the self_args are expressions that deref // down to desired l-values, but we cannot actually deref // them when they are fed as r-values into a tuple // expression; here add a layer of borrowing, turning // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. let borrowed_self_args = self_args.move_map(|self_arg| cx.expr_addr_of(sp, self_arg)); let match_arg = cx.expr(sp, ast::ExprTup(borrowed_self_args)); cx.expr_match(sp, match_arg, match_arms) } fn expand_static_enum_method_body(&self, cx: &mut ExtCtxt, trait_: &TraitDef, enum_def: &EnumDef, type_ident: Ident, self_args: &[P<Expr>], nonself_args: &[P<Expr>]) -> P<Expr> { let summary = enum_def.variants.iter().map(|v| { let ident = v.node.name; let summary = match v.node.kind { ast::TupleVariantKind(ref args) => { Unnamed(args.iter().map(|va| trait_.set_expn_info(cx, va.ty.span)).collect()) } ast::StructVariantKind(ref struct_def) => { trait_.summarise_struct(cx, &**struct_def) } }; (ident, v.span, summary) }).collect(); self.call_substructure_method(cx, trait_, type_ident, self_args, nonself_args, &StaticEnum(enum_def, summary)) } } #[derive(PartialEq)] // dogfooding! enum StructType { Unknown, Record, Tuple } // general helper methods. impl<'a> TraitDef<'a> { fn set_expn_info(&self, cx: &mut ExtCtxt, mut to_set: Span) -> Span { let trait_name = match self.path.path.last() { None => cx.span_bug(self.span, "trait with empty path in generic `derive`"), Some(name) => *name }; to_set.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { call_site: to_set, callee: codemap::NameAndSpan { name: format!("derive({})", trait_name), format: codemap::MacroAttribute, span: Some(self.span), allow_internal_unstable: false, } }); to_set } fn summarise_struct(&self, cx: &mut ExtCtxt, struct_def: &StructDef) -> StaticFields { let mut named_idents = Vec::new(); let mut just_spans = Vec::new(); for field in struct_def.fields.iter(){ let sp = self.set_expn_info(cx, field.span); match field.node.kind { ast::NamedField(ident, _) => named_idents.push((ident, sp)), ast::UnnamedField(..) => just_spans.push(sp), } } match (just_spans.is_empty(), named_idents.is_empty()) { (false, false) => cx.span_bug(self.span, "a struct with named and unnamed \ fields in generic `derive`"), // named fields (_, false) => Named(named_idents), // tuple structs (includes empty structs) (_, _) => Unnamed(just_spans) } } fn create_subpatterns(&self, cx: &mut ExtCtxt, field_paths: Vec<ast::SpannedIdent> , mutbl: ast::Mutability) -> Vec<P<ast::Pat>> { field_paths.iter().map(|path| { cx.pat(path.span, ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None)) }).collect() } fn create_struct_pattern(&self, cx: &mut ExtCtxt, struct_path: ast::Path, struct_def: &'a StructDef, prefix: &str, mutbl: ast::Mutability) -> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) { if struct_def.fields.is_empty() { return (cx.pat_enum(self.span, struct_path, vec![]), vec![]); } let mut paths = Vec::new(); let mut ident_expr = Vec::new(); let mut struct_type = Unknown; for (i, struct_field) in struct_def.fields.iter().enumerate() { let sp = self.set_expn_info(cx, struct_field.span); let opt_id = match struct_field.node.kind { ast::NamedField(ident, _) if (struct_type == Unknown || struct_type == Record) => { struct_type = Record; Some(ident) } ast::UnnamedField(..) if (struct_type == Unknown || struct_type == Tuple) => { struct_type = Tuple; None } _ => { cx.span_bug(sp, "a struct with named and unnamed fields in `derive`"); } }; let ident = cx.ident_of(&format!("{}_{}", prefix, i)); paths.push(codemap::Spanned{span: sp, node: ident}); let val = cx.expr( sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident))))); ident_expr.push((sp, opt_id, val, &struct_field.node.attrs[..])); } let subpats = self.create_subpatterns(cx, paths, mutbl); // struct_type is definitely not Unknown, since struct_def.fields // must be nonempty to reach here let pattern = if struct_type == Record { let field_pats = subpats.into_iter().zip(&ident_expr) .map(|(pat, &(_, id, _, _))| { // id is guaranteed to be Some codemap::Spanned { span: pat.span, node: ast::FieldPat { ident: id.unwrap(), pat: pat, is_shorthand: false }, } }).collect(); cx.pat_struct(self.span, struct_path, field_pats) } else { cx.pat_enum(self.span, struct_path, subpats) }; (pattern, ident_expr) } fn create_enum_variant_pattern(&self, cx: &mut ExtCtxt, enum_ident: ast::Ident, variant: &'a ast::Variant, prefix: &str, mutbl: ast::Mutability) -> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) { let variant_ident = variant.node.name; let variant_path = cx.path(variant.span, vec![enum_ident, variant_ident]); match variant.node.kind { ast::TupleVariantKind(ref variant_args) => { if variant_args.is_empty() { return (cx.pat_enum(variant.span, variant_path, vec![]), vec![]); } let mut paths = Vec::new(); let mut ident_expr: Vec<(_, _, _, &'a [ast::Attribute])> = Vec::new(); for (i, va) in variant_args.iter().enumerate() { let sp = self.set_expn_info(cx, va.ty.span); let ident = cx.ident_of(&format!("{}_{}", prefix, i)); let path1 = codemap::Spanned{span: sp, node: ident}; paths.push(path1); let expr_path = cx.expr_path(cx.path_ident(sp, ident)); let val = cx.expr(sp, ast::ExprParen(cx.expr_deref(sp, expr_path))); ident_expr.push((sp, None, val, &[])); } let subpats = self.create_subpatterns(cx, paths, mutbl); (cx.pat_enum(variant.span, variant_path, subpats), ident_expr) } ast::StructVariantKind(ref struct_def) => { self.create_struct_pattern(cx, variant_path, &**struct_def, prefix, mutbl) } } } } /* helpful premade recipes */ /// Fold the fields. `use_foldl` controls whether this is done /// left-to-right (`true`) or right-to-left (`false`). pub fn cs_fold<F>(use_foldl: bool, mut f: F, base: P<Expr>, mut enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) -> P<Expr> where F: FnMut(&mut ExtCtxt, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>, { match *substructure.fields { EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { if use_foldl { all_fields.iter().fold(base, |old, field| { f(cx, field.span, old, field.self_.clone(), &field.other) }) } else { all_fields.iter().rev().fold(base, |old, field| { f(cx, field.span, old, field.self_.clone(), &field.other) }) } }, EnumNonMatchingCollapsed(ref all_args, _, tuple) => enum_nonmatch_f(cx, trait_span, (&all_args[..], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") } } } /// Call the method that is being derived on all the fields, and then /// process the collected results. i.e. /// /// ``` /// f(cx, span, vec![self_1.method(__arg_1_1, __arg_2_1), /// self_2.method(__arg_1_2, __arg_2_2)]) /// ``` #[inline] pub fn cs_same_method<F>(f: F, mut enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) -> P<Expr> where F: FnOnce(&mut ExtCtxt, Span, Vec<P<Expr>>) -> P<Expr>, { match *substructure.fields { EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { // call self_n.method(other_1_n, other_2_n, ...) let called = all_fields.iter().map(|field| { cx.expr_method_call(field.span, field.self_.clone(), substructure.method_ident, field.other.iter() .map(|e| cx.expr_addr_of(field.span, e.clone())) .collect()) }).collect(); f(cx, trait_span, called) }, EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => enum_nonmatch_f(cx, trait_span, (&all_self_args[..], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") } } } /// Fold together the results of calling the derived method on all the /// fields. `use_foldl` controls whether this is done left-to-right /// (`true`) or right-to-left (`false`). #[inline] pub fn cs_same_method_fold<F>(use_foldl: bool, mut f: F, base: P<Expr>, enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) -> P<Expr> where F: FnMut(&mut ExtCtxt, Span, P<Expr>, P<Expr>) -> P<Expr>, { cs_same_method( |cx, span, vals| { if use_foldl { vals.into_iter().fold(base.clone(), |old, new| { f(cx, span, old, new) }) } else { vals.into_iter().rev().fold(base.clone(), |old, new| { f(cx, span, old, new) }) } }, enum_nonmatch_f, cx, trait_span, substructure) } /// Use a given binop to combine the result of calling the derived method /// on all the fields. #[inline] pub fn cs_binop(binop: ast::BinOp_, base: P<Expr>, enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) -> P<Expr> { cs_same_method_fold( true, // foldl is good enough |cx, span, old, new| { cx.expr_binary(span, binop, old, new) }, base, enum_nonmatch_f, cx, trait_span, substructure) } /// cs_binop with binop == or #[inline] pub fn cs_or(enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, span: Span, substructure: &Substructure) -> P<Expr> { cs_binop(ast::BiOr, cx.expr_bool(span, false), enum_nonmatch_f, cx, span, substructure) } /// cs_binop with binop == and #[inline] pub fn cs_and(enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, span: Span, substructure: &Substructure) -> P<Expr> { cs_binop(ast::BiAnd, cx.expr_bool(span, true), enum_nonmatch_f, cx, span, substructure) }<|fim▁end|>
<|file_name|>br.js<|end_file_name|><|fim▁begin|>/* * /MathJax/localization/br/br.js * * Copyright (c) 2009-2015 The MathJax Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ MathJax.Localization.addTranslation( "br", null, { menuTitle: "brezhoneg", version: "2.5.0", isLoaded: true, domains: { _: { version: "2.5.0", isLoaded: true, strings: { MathProcessingError: "Fazi o treta\u00F1 ar formulenn", MathError: "Fazi er formulenn", LoadFile: "O karga\u00F1 %1", Loading: "O karga\u00F1", LoadFailed: "N'eus ket bet gallet karga\u00F1 %1", ProcessMath: "Treta\u00F1 ar formulenno\u00F9 : %1%%", Processing: "O treta\u00F1", TypesetMath: "Aoza\u00F1 formulenno\u00F9 : %1%%", Typesetting: "Aoza\u00F1", MathJaxNotSupported: "Ne c'hall ket ho merdeer ober gant MathJax"<|fim▁hole|> } }, FontWarnings: {}, "HTML-CSS": {}, HelpDialog: {}, MathML: {}, MathMenu: {}, TeX: {} }, plural: function( a ) { if (a % 10 === 1 && !(a % 100 === 11 || a % 100 === 71 || a % 100 === 91)) { return 1 } if (a % 10 === 2 && !(a % 100 === 12 || a % 100 === 72 || a % 100 === 92)) { return 2 } if ((a % 10 === 3 || a % 10 === 4 || a % 10 === 9) && !(10 <= a % 100 && a % 100 <= 19 || 70 <= a % 100 && a % 100 <= 79 || 90 <= a % 100 && a % 100 <= 99)) { return 3 } if (a !== 0 && a % 1000000 === 0) { return 4 } return 5 }, number: function( a ) { return a } } ); MathJax.Ajax.loadComplete( "[MathJax]/localization/br/br.js" );<|fim▁end|>
<|file_name|>contingency.py<|end_file_name|><|fim▁begin|>import math import numpy as np from Orange import data def _get_variable(variable, dat, attr_name, expected_type=None, expected_name=""): failed = False if isinstance(variable, data.Variable): datvar = getattr(dat, "variable", None) if datvar is not None and datvar is not variable: raise ValueError("variable does not match the variable" "in the data") elif hasattr(dat, "domain"): variable = dat.domain[variable] elif hasattr(dat, attr_name): variable = dat.variable else: failed = True if failed or (expected_type is not None and not isinstance(variable, expected_type)): if not expected_type or isinstance(variable, data.Variable): raise ValueError( "expected %s variable not %s" % (expected_name, variable)) else: raise ValueError("expected %s, not '%s'" % (expected_type.__name__, type(variable).__name__)) return variable class Discrete(np.ndarray): def __new__(cls, dat=None, col_variable=None, row_variable=None, unknowns=None): if isinstance(dat, data.Storage): if unknowns is not None: raise TypeError( "incompatible arguments (data storage and 'unknowns'") return cls.from_data(dat, col_variable, row_variable) if row_variable is not None: row_variable = _get_variable(row_variable, dat, "row_variable") rows = len(row_variable.values) else: rows = dat.shape[0] if col_variable is not None: col_variable = _get_variable(col_variable, dat, "col_variable") cols = len(col_variable.values) else: cols = dat.shape[1] self = super().__new__(cls, (rows, cols)) self.row_variable = row_variable self.col_variable = col_variable if dat is None: self[:] = 0 self.unknowns = unknowns or 0 else: self[...] = dat self.unknowns = (unknowns if unknowns is not None else getattr(dat, "unknowns", 0)) return self @classmethod def from_data(cls, data, col_variable, row_variable=None): if row_variable is None: row_variable = data.domain.class_var if row_variable is None: raise ValueError("row_variable needs to be specified (data " "has no class)") row_variable = _get_variable(row_variable, data, "row_variable") col_variable = _get_variable(col_variable, data, "col_variable") try: dist, unknowns = data._compute_contingency( [col_variable], row_variable)[0] self = super().__new__(cls, dist.shape) self[...] = dist self.unknowns = unknowns except NotImplementedError: self = np.zeros( (len(row_variable.values), len(col_variable.values))) self.unknowns = 0 rind = data.domain.index(row_variable) cind = data.domain.index(col_variable) for row in data: rval, cval = row[rind], row[cind] if math.isnan(rval): continue w = row.weight if math.isnan(cval): self.unknowns[cval] += w else: self[rval, cval] += w self.row_variable = row_variable self.col_variable = col_variable return self def __eq__(self, other): return np.array_equal(self, other) and ( not hasattr(other, "unknowns") or np.array_equal(self.unknowns, other.unknowns)) def __getitem__(self, index): if isinstance(index, str): if len(self.shape) == 2: # contingency index = self.row_variable.to_val(index) contingency_row = super().__getitem__(index) contingency_row.col_variable = self.col_variable return contingency_row else: # Contingency row column = self.strides == self.base.strides[:1] if column: index = self.row_variable.to_val(index) else: index = self.col_variable.to_val(index) elif isinstance(index, tuple): if isinstance(index[0], str): index = (self.row_variable.to_val(index[0]), index[1]) if isinstance(index[1], str): index = (index[0], self.col_variable.to_val(index[1])) result = super().__getitem__(index) if result.strides: result.col_variable = self.col_variable result.row_variable = self.row_variable return result <|fim▁hole|> if isinstance(index, str): index = self.row_variable.to_val(index) elif isinstance(index, tuple): if isinstance(index[0], str): index = (self.row_variable.to_val(index[0]), index[1]) if isinstance(index[1], str): index = (index[0], self.col_variable.to_val(index[1])) super().__setitem__(index, value) def normalize(self, axis=None): t = np.sum(self, axis=axis) if t > 1e-6: self[:] /= t if axis is None or axis == 1: self.unknowns /= t class Continuous: def __init__(self, dat=None, col_variable=None, row_variable=None, unknowns=None): if isinstance(dat, data.Storage): if unknowns is not None: raise TypeError( "incompatible arguments (data storage and 'unknowns'") return self.from_data(dat, col_variable, row_variable) if row_variable is not None: row_variable = _get_variable(row_variable, dat, "row_variable") if col_variable is not None: col_variable = _get_variable(col_variable, dat, "col_variable") self.values, self.counts = dat self.row_variable = row_variable self.col_variable = col_variable if unknowns is not None: self.unknowns = unknowns elif row_variable: self.unknowns = np.zeros(len(row_variable.values)) else: self.unknowns = None def from_data(self, data, col_variable, row_variable=None): if row_variable is None: row_variable = data.domain.class_var if row_variable is None: raise ValueError("row_variable needs to be specified (data" "has no class)") self.row_variable = _get_variable(row_variable, data, "row_variable") self.col_variable = _get_variable(col_variable, data, "col_variable") try: (self.values, self.counts), self.unknowns = data._compute_contingency( [col_variable], row_variable)[0] except NotImplementedError: raise NotImplementedError("Fallback method for computation of " "contingencies is not implemented yet") def __eq__(self, other): return (np.array_equal(self.values, other.values) and np.array_equal(self.counts, other.counts) and (not hasattr(other, "unknowns") or np.array_equal(self.unknowns, other.unknowns))) def __getitem__(self, index): """ Return contingencies for a given class value. """ if isinstance(index, (str, float)): index = self.row_variable.to_val(index) C = self.counts[index] ind = C > 0 return np.vstack((self.values[ind], C[ind])) def __len__(self): return self.counts.shape[0] def __setitem__(self, index, value): raise NotImplementedError("Setting individual class contingencies is " "not implemented yet. Set .values and .counts.") def normalize(self, axis=None): if axis is None: t = sum(np.sum(x[:, 1]) for x in self) if t > 1e-6: for x in self: x[:, 1] /= t elif axis != 1: raise ValueError("contingencies can be normalized only with axis=1" " or without axis") else: for i, x in enumerate(self): t = np.sum(x[:, 1]) if t > 1e-6: x[:, 1] /= t self.unknowns[i] /= t else: if self.unknowns[i] > 1e-6: self.unknowns[i] = 1 def get_contingency(dat, col_variable, row_variable=None, unknowns=None): variable = _get_variable(col_variable, dat, "col_variable") if isinstance(variable, data.DiscreteVariable): return Discrete(dat, col_variable, row_variable, unknowns) elif isinstance(variable, data.ContinuousVariable): return Continuous(dat, col_variable, row_variable, unknowns) else: raise TypeError("cannot compute distribution of '%s'" % type(variable).__name__) def get_contingencies(dat, skipDiscrete=False, skipContinuous=False): vars = dat.domain.attributes row_var = dat.domain.class_var if row_var is None: raise ValueError("data has no target variable") if skipDiscrete: if skipContinuous: return [] columns = [i for i, var in enumerate(vars) if isinstance(var, data.ContinuousVariable)] elif skipContinuous: columns = [i for i, var in enumerate(vars) if isinstance(var, data.DiscreteVariable)] else: columns = None try: dist_unks = dat._compute_contingency(columns) if columns is None: columns = np.arange(len(vars)) contigs = [] for col, (cont, unks) in zip(columns, dist_unks): contigs.append(get_contingency(cont, vars[col], row_var, unks)) except NotImplementedError: if columns is None: columns = range(len(vars)) contigs = [get_contingency(dat, i) for i in columns] return contigs<|fim▁end|>
def __setitem__(self, index, value):
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. //! General IO module. //! //! Example usage for creating a network service and adding an IO handler: //! //! ```rust //! extern crate ethcore_io; //! use ethcore_io::*; //! use std::sync::Arc; //! //! struct MyHandler; //! //! #[derive(Clone)] //! struct MyMessage { //! data: u32 //! } //! //! impl IoHandler<MyMessage> for MyHandler { //! fn initialize(&self, io: &IoContext<MyMessage>) { //! io.register_timer(0, 1000).unwrap(); //! } //! //! fn timeout(&self, _io: &IoContext<MyMessage>, timer: TimerToken) { //! println!("Timeout {}", timer); //! } //! //! fn message(&self, _io: &IoContext<MyMessage>, message: &MyMessage) { //! println!("Message {}", message.data); //! } //! } //! //! fn main () { //! let mut service = IoService::<MyMessage>::start().expect("Error creating network service"); //! service.register_handler(Arc::new(MyHandler)).unwrap(); //! //! // Wait for quit condition //! // ... //! // Drop the service //! } //! ``` extern crate mio; #[macro_use] extern crate log as rlog; extern crate slab; extern crate crossbeam; extern crate parking_lot; mod service; mod worker; mod panics; use mio::{Token}; use mio::deprecated::{EventLoop, NotifyError}; use std::fmt; pub use worker::LOCAL_STACK_SIZE; #[derive(Debug)] /// IO Error pub enum IoError { /// Low level error from mio crate Mio(::std::io::Error), /// Error concerning the Rust standard library's IO subsystem. StdIo(::std::io::Error), } impl fmt::Display for IoError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // just defer to the std implementation for now. // we can refine the formatting when more variants are added. match *self { IoError::Mio(ref std_err) => std_err.fmt(f), IoError::StdIo(ref std_err) => std_err.fmt(f), } } } impl From<::std::io::Error> for IoError { fn from(err: ::std::io::Error) -> IoError { IoError::StdIo(err) } } impl<Message> From<NotifyError<service::IoMessage<Message>>> for IoError where Message: Send + Clone { fn from(_err: NotifyError<service::IoMessage<Message>>) -> IoError { IoError::Mio(::std::io::Error::new(::std::io::ErrorKind::ConnectionAborted, "Network IO notification error")) } } /// Generic IO handler. /// All the handler function are called from within IO event loop. /// `Message` type is used as notification data pub trait IoHandler<Message>: Send + Sync where Message: Send + Sync + Clone + 'static { /// Initialize the handler fn initialize(&self, _io: &IoContext<Message>) {} /// Timer function called after a timeout created with `HandlerIo::timeout`. fn timeout(&self, _io: &IoContext<Message>, _timer: TimerToken) {} /// Called when a broadcasted message is received. The message can only be sent from a different IO handler. fn message(&self, _io: &IoContext<Message>, _message: &Message) {} /// Called when an IO stream gets closed fn stream_hup(&self, _io: &IoContext<Message>, _stream: StreamToken) {} /// Called when an IO stream can be read from fn stream_readable(&self, _io: &IoContext<Message>, _stream: StreamToken) {} /// Called when an IO stream can be written to fn stream_writable(&self, _io: &IoContext<Message>, _stream: StreamToken) {} /// Register a new stream with the event loop fn register_stream(&self, _stream: StreamToken, _reg: Token, _event_loop: &mut EventLoop<IoManager<Message>>) {} /// Re-register a stream with the event loop fn update_stream(&self, _stream: StreamToken, _reg: Token, _event_loop: &mut EventLoop<IoManager<Message>>) {} /// Deregister a stream. Called whenstream is removed from event loop fn deregister_stream(&self, _stream: StreamToken, _event_loop: &mut EventLoop<IoManager<Message>>) {}<|fim▁hole|>pub use service::TimerToken; pub use service::StreamToken; pub use service::IoContext; pub use service::IoService; pub use service::IoChannel; pub use service::IoManager; pub use service::TOKENS_PER_HANDLER; pub use panics::{PanicHandler, MayPanic, OnPanicListener, ForwardPanic}; #[cfg(test)] mod tests { use std::sync::Arc; use super::*; struct MyHandler; #[derive(Clone)] struct MyMessage { data: u32 } impl IoHandler<MyMessage> for MyHandler { fn initialize(&self, io: &IoContext<MyMessage>) { io.register_timer(0, 1000).unwrap(); } fn timeout(&self, _io: &IoContext<MyMessage>, timer: TimerToken) { println!("Timeout {}", timer); } fn message(&self, _io: &IoContext<MyMessage>, message: &MyMessage) { println!("Message {}", message.data); } } #[test] fn test_service_register_handler () { let service = IoService::<MyMessage>::start().expect("Error creating network service"); service.register_handler(Arc::new(MyHandler)).unwrap(); } }<|fim▁end|>
}
<|file_name|>wikipedia.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2 # -*- coding: utf-8 -*- # https://en.wikipedia.org/w/index.php?title=List_of_computing_and_IT_abbreviations&action=edit import re, urllib2 from collections import defaultdict from BeautifulSoup import BeautifulSoup pull = lambda url: urllib2.urlopen(urllib2.Request(url)) wikip = lambda article: pull('https://en.wikipedia.org/w/index.php?title=%s&action=edit' % article) # todo: List_of_file_formats def stock(): ad = defaultdict(list) for line in open('acronyms'): if '\t' not in line: continue line = line.strip() a,d = line.split('\t') ad[a].append(d) for line in open('acronyms.comp'): if '\t' not in line: continue line = line.strip() a,d = line.split('\t') ad[a].append(d) return ad def exists(key, value, lut): key = key.upper() if key not in lut: return False value = value.upper() return any(v.upper()==value for v in lut[key]) def computing_abbrev(): "This parser is very brittle, but the input is very well formed" wikip = open # uncomment for local debug html = wikip('List_of_computing_and_IT_abbreviations').read() soup = BeautifulSoup(html) text = soup.textarea.contents[0] ad = defaultdict(list) for pair in re.findall('\* \[\[.*—.*', str(text)): try: a,_,d = pair.partition('—') a = a[4:].rpartition('|')[-1].replace(']]', '') d = d.replace('[[', '').replace(']]', '').replace('—', ' - ') ad[a].append(d.strip()) except: #print 'failed on', pair continue return ad def main(): "build all the new lists"<|fim▁hole|> tech = open('acronyms.computing', 'w') tech.write('$ArchLinux: wikipedia computer abbrevs 2018-05-31\n\n') for a,ds in sorted(ad.items()): for d in ds: if exists(a, d, stk): continue tech.write('%s\t%s\n'% (a.upper(), d)) tech.close() if __name__ == '__main__': main()<|fim▁end|>
# okay, there is just the one for now ad = computing_abbrev() stk = stock()
<|file_name|>serverLeavRoomTest.js<|end_file_name|><|fim▁begin|>//TODO : socket.ioコネクション処理を1.0推奨の非同期方式にする describe('serverクラスのテスト', function() { var SERVER_PORT = process.env.PORT || 3000; var SERVER_URL = 'http://localhost:'+SERVER_PORT; var assert = require('chai').assert; var io = require('socket.io-client'); var server = require('../../../server/server.js'); var http = require('http'); var dbMock = require('./../testData/dbMock.js')(); var app; var testServer; var option = { 'forceNew' : true }; beforeEach(function() { app = http.createServer().listen(SERVER_PORT); testServer = server({ httpServer : app, dao : dbMock<|fim▁hole|> afterEach(function() { app.close(); }); describe('退室系テスト',function(){ it('入室後に退室する',function(done){ var client = io(SERVER_URL, option); client.on('connect',doAuth); function doAuth() { client.emit('auth',{ userId : '[email protected]' }); client.once('successAuth',enterRoom); } function enterRoom() { client.emit('enterRoom',{ roomId : 0 }); client.on('succesEnterRoom',leaveRoom); } function leaveRoom() { client.emit('leaveRoom'); client.on('successLeaveRoom',done); } }); }); });<|fim▁end|>
}); });
<|file_name|>euler-169-sum-of-powers-of-2.py<|end_file_name|><|fim▁begin|>""" sum(2 * 2**i for i in range(i)) == 2 * (2**i - 1) == n i == log_2(n // 2 + 1) """ from math import ceil, log import time def count_ways(n, current_power=None, memo=None): if memo is None: memo = {} if current_power is None: current_power = ceil(log(n // 2 + 1, 2)) key = (n, current_power) if key in memo:<|fim▁hole|> return memo[key] current_term = 2 ** current_power max_available = 2 * (2 ** (current_power + 1) - 1) assert n <= max_available next_max_available = 2 * (2 ** current_power - 1) ans = 0 if n >= 2 * current_term: if n == 2 * current_term: ans += 1 else: ans += count_ways(n - 2 * current_term, current_power - 1, memo) if n >= current_term: if n == current_term: ans += 1 elif n - current_term <= next_max_available: ans += count_ways(n - current_term, current_power - 1, memo) if n <= next_max_available: ans += count_ways(n, current_power - 1, memo) memo[key] = ans return ans t0 = time.time() print(count_ways(10 ** 25)) t1 = time.time() print('Total time:', (t1 - t0) * 1000, 'ms')<|fim▁end|>
<|file_name|>carousel_impl.js<|end_file_name|><|fim▁begin|>window.addEvent("domready", function() { var carousel = new iCarousel("carousel_content", { idPrevious: "carousel_prev", idNext: "carousel_next", idToggle: "undefined", item: { klass: "carouselitem_right", size: 265 }, animation: { type: "scroll", duration: 700, amount: 1 } }); $$('.carousel_header a').each(function (el,index){ el.addEvent("click", function(event){ new Event(event).stop(); carousel.goTo(index); $$('.carousel_header a').removeClass('active');<|fim▁hole|><|fim▁end|>
$('carousel_link'+index).addClass('active'); }); }); });
<|file_name|>put.rs<|end_file_name|><|fim▁begin|>extern crate curl; extern crate rustc_serialize; use self::curl::http; pub fn new(server: &str, port: &str, key: &str, data: &str, verbose: bool) { // build url from input values let url = format!("http://{}:{}/v1/kv/{}", server, port, key); // verbose: print out the connection url string if verbose { println!("Attempting connection to {} with {}", &url, &data); } // make connection<|fim▁hole|> // expect a 200 code or error with return code if resp.get_code() != 200 { println!("Unable to handle HTTP response code {}", resp.get_code()); }; }<|fim▁end|>
let resp = match http::handle().put(url, data).exec() { Ok(resp) => resp, Err(err) => panic!("error putting k/v. {}", err), };
<|file_name|>decorated_type_id.hpp<|end_file_name|><|fim▁begin|>// Copyright David Abrahams 2002. Permission to copy, use, // modify, sell and distribute this software is granted provided this // copyright notice appears in all copies. This software is provided // "as is" without express or implied warranty, and with no claim as // to its suitability for any purpose. #ifndef DECORATED_TYPE_ID_DWA2002517_HPP # define DECORATED_TYPE_ID_DWA2002517_HPP # include <boost/python/type_id.hpp> # include <boost/python/detail/indirect_traits.hpp> # include <boost/type_traits/cv_traits.hpp> namespace boost { namespace python { namespace detail { struct decorated_type_info : totally_ordered<decorated_type_info> { enum decoration { const_ = 0x1, volatile_ = 0x2, reference = 0x4 }; decorated_type_info(type_info, decoration = decoration()); inline bool operator<(decorated_type_info const& rhs) const; inline bool operator==(decorated_type_info const& rhs) const; friend BOOST_PYTHON_DECL std::ostream& operator<<(std::ostream&, decorated_type_info const&); operator type_info const&() const; private: // type typedef type_info base_id_t; private: // data members decoration m_decoration; base_id_t m_base_type; }; template <class T> inline decorated_type_info decorated_type_id(boost::type<T>* = 0) { return decorated_type_info( type_id<T>() , decorated_type_info::decoration( (is_const<T>::value || python::detail::is_reference_to_const<T>::value ? decorated_type_info::const_ : 0) | (is_volatile<T>::value || python::detail::is_reference_to_volatile<T>::value ? decorated_type_info::volatile_ : 0) | (is_reference<T>::value ? decorated_type_info::reference : 0) ) ); } inline decorated_type_info::decorated_type_info(type_info base_t, decoration decoration) : m_decoration(decoration) , m_base_type(base_t) { } <|fim▁hole|> && m_base_type < rhs.m_base_type; } inline bool decorated_type_info::operator==(decorated_type_info const& rhs) const { return m_decoration == rhs.m_decoration && m_base_type == rhs.m_base_type; } inline decorated_type_info::operator type_info const&() const { return m_base_type; } BOOST_PYTHON_DECL std::ostream& operator<<(std::ostream&, decorated_type_info const&); }}} // namespace boost::python::detail #endif // DECORATED_TYPE_ID_DWA2002517_HPP<|fim▁end|>
inline bool decorated_type_info::operator<(decorated_type_info const& rhs) const { return m_decoration < rhs.m_decoration || m_decoration == rhs.m_decoration
<|file_name|>WebODF.java<|end_file_name|><|fim▁begin|>/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.webodf; import android.os.Bundle; import org.apache.cordova.*; public class WebODF extends CordovaActivity { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); super.init(); // Set by <content src="index.html" /> in config.xml super.loadUrl(Config.getStartUrl());<|fim▁hole|> //super.loadUrl("file:///android_asset/www/index.html"); } }<|fim▁end|>
<|file_name|>GGraph.cpp<|end_file_name|><|fim▁begin|>// ---------------------------------------------------------------------- // CMP experimental environment // // (c) Nikita Nikitin, UPC LSI // [email protected] // // File GGraph.cpp // Created May 03, 2011 // ---------------------------------------------------------------------- #include "GGraph.hpp" #include "GVertex.hpp" #include "GDEdge.hpp" #include <iostream> using std::cout; using std::endl; namespace cmpex { //======================================================================= /* * Constructors and destructor */ GGraph::GGraph() : vCnt_ (0), eCnt_ (0) {} GGraph::~GGraph() { for (VCIter it = vertices_.begin(); it != vertices_.end(); ++it) { delete *it; } for (ECIter it = edges_.begin(); it != edges_.end(); ++it) { delete *it; } } //=======================================================================<|fim▁hole|>GVertex * GGraph::CreateVertex ( int idx ) { cout << "Vertex created" << endl; return new GVertex(idx, idx%ColNum(), idx/ColNum()); } //======================================================================= /* * Factory method for edge: creates GDEdge object. */ GDEdge * GGraph::CreateEdge ( int idx, GVertex * src, GVertex * dst, bool horizontal ) { cout << "Edge created" << endl; return new GDEdge(idx, src, dst, horizontal); } //======================================================================= /* * Initializer method, creates required number of cols and rows. */ void GGraph::Init ( int colNum, int rowNum ) { colNum_ = colNum; rowNum_ = rowNum; int vNum = colNum * rowNum; for (int i = 0; i < vNum; ++i) { AddVertex(CreateVertex(i)); } ConnectVertices(); } //======================================================================= /* * Create edges and connect vertices. */ void GGraph::ConnectVertices () { for (VCIter it = vertices_.begin(); it != vertices_.end(); ++it) { GVertex * v = *it; // create horizontal edges if ( v->Idx()%ColNum() != ColNum()-1 ) { GVertex * cv = vertices_[v->Idx() + 1]; GDEdge * eout = CreateEdge(edges_.size(), v, cv, true); GDEdge * ein = CreateEdge(edges_.size()+1, cv, v, true); v->EastOut(eout); v->EastIn(ein); cv->WestOut(ein); cv->WestIn(eout); AddEdge(eout); AddEdge(ein); } // create vertical edges if ( v->Idx()/ColNum() != RowNum()-1 ) { GVertex * cv = vertices_[v->Idx() + ColNum()]; GDEdge * eout = CreateEdge(edges_.size(), v, cv, true); GDEdge * ein = CreateEdge(edges_.size()+1, cv, v, true); v->SouthOut(eout); v->SouthIn(ein); cv->NorthOut(ein); cv->NorthIn(eout); AddEdge(eout); AddEdge(ein); } } } //======================================================================= /* * Prints graph. */ void GGraph::Print () const { for (VCIter it = vertices_.begin(); it != vertices_.end(); ++it) { GVertex * v = *it; std::cout << "Vertex " << v->Idx() << "; neighbours:"; if (v->NorthOut()) { std::cout << " North " << v->NorthOut()->Dst()->Idx() << ";"; } if (v->WestOut()) { std::cout << " West " << v->WestOut()->Dst()->Idx() << ";"; } if (v->SouthOut()) { std::cout << " South " << v->SouthOut()->Dst()->Idx() << ";"; } if (v->EastOut()) { std::cout << " East " << v->EastOut()->Dst()->Idx() << ";"; } std::cout << std::endl; } } //======================================================================= }; // namespace cmpex<|fim▁end|>
/* * Factory method for vertex: creates GVertex object. */
<|file_name|>format.rs<|end_file_name|><|fim▁begin|>use std::fmt; use std::cell::RefCell; /// Format all iterator elements lazily, separated by `sep`. /// /// The format value can only be formatted once, after that the iterator is /// exhausted. /// /// See [`.format_with()`](../trait.Itertools.html#method.format_with) for more information. #[derive(Clone)] pub struct FormatWith<'a, I, F> { sep: &'a str, /// FormatWith uses interior mutability because Display::fmt takes &self. inner: RefCell<Option<(I, F)>>, } /// Format all iterator elements lazily, separated by `sep`. /// /// The format value can only be formatted once, after that the iterator is /// exhausted. /// /// See [`.format()`](../trait.Itertools.html#method.format) /// for more information. #[derive(Clone)] pub struct Format<'a, I> { sep: &'a str, /// Format uses interior mutability because Display::fmt takes &self. inner: RefCell<Option<I>>, } pub fn new_format<'a, I, F>(iter: I, separator: &'a str, f: F) -> FormatWith<'a, I, F> where I: Iterator, F: FnMut(I::Item, &mut dyn FnMut(&dyn fmt::Display) -> fmt::Result) -> fmt::Result { FormatWith { sep: separator, inner: RefCell::new(Some((iter, f))), } } <|fim▁hole|> sep: separator, inner: RefCell::new(Some(iter)), } } impl<'a, I, F> fmt::Display for FormatWith<'a, I, F> where I: Iterator, F: FnMut(I::Item, &mut dyn FnMut(&dyn fmt::Display) -> fmt::Result) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (mut iter, mut format) = match self.inner.borrow_mut().take() { Some(t) => t, None => panic!("FormatWith: was already formatted once"), }; if let Some(fst) = iter.next() { format(fst, &mut |disp: &dyn fmt::Display| disp.fmt(f))?; for elt in iter { if self.sep.len() > 0 { f.write_str(self.sep)?; } format(elt, &mut |disp: &dyn fmt::Display| disp.fmt(f))?; } } Ok(()) } } impl<'a, I> Format<'a, I> where I: Iterator, { fn format<F>(&self, f: &mut fmt::Formatter, mut cb: F) -> fmt::Result where F: FnMut(&I::Item, &mut fmt::Formatter) -> fmt::Result, { let mut iter = match self.inner.borrow_mut().take() { Some(t) => t, None => panic!("Format: was already formatted once"), }; if let Some(fst) = iter.next() { cb(&fst, f)?; for elt in iter { if self.sep.len() > 0 { f.write_str(self.sep)?; } cb(&elt, f)?; } } Ok(()) } } macro_rules! impl_format { ($($fmt_trait:ident)*) => { $( impl<'a, I> fmt::$fmt_trait for Format<'a, I> where I: Iterator, I::Item: fmt::$fmt_trait, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.format(f, fmt::$fmt_trait::fmt) } } )* } } impl_format!{Display Debug UpperExp LowerExp UpperHex LowerHex Octal Binary Pointer}<|fim▁end|>
pub fn new_format_default<'a, I>(iter: I, separator: &'a str) -> Format<'a, I> where I: Iterator, { Format {
<|file_name|>AlertTypesListDTO.java<|end_file_name|><|fim▁begin|>package org.wso2.carbon.apimgt.rest.api.publisher.v1.dto; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import java.util.ArrayList; import java.util.List; import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.AlertTypeDTO; import javax.validation.constraints.*; import io.swagger.annotations.*; import java.util.Objects; import javax.xml.bind.annotation.*; import org.wso2.carbon.apimgt.rest.api.util.annotations.Scope; public class AlertTypesListDTO { private Integer count = null; private List<AlertTypeDTO> alerts = new ArrayList<>(); /** * The number of alerts **/ public AlertTypesListDTO count(Integer count) { this.count = count; return this; } <|fim▁hole|> @JsonProperty("count") public Integer getCount() { return count; } public void setCount(Integer count) { this.count = count; } /** **/ public AlertTypesListDTO alerts(List<AlertTypeDTO> alerts) { this.alerts = alerts; return this; } @ApiModelProperty(value = "") @JsonProperty("alerts") public List<AlertTypeDTO> getAlerts() { return alerts; } public void setAlerts(List<AlertTypeDTO> alerts) { this.alerts = alerts; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AlertTypesListDTO alertTypesList = (AlertTypesListDTO) o; return Objects.equals(count, alertTypesList.count) && Objects.equals(alerts, alertTypesList.alerts); } @Override public int hashCode() { return Objects.hash(count, alerts); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class AlertTypesListDTO {\n"); sb.append(" count: ").append(toIndentedString(count)).append("\n"); sb.append(" alerts: ").append(toIndentedString(alerts)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }<|fim▁end|>
@ApiModelProperty(example = "3", value = "The number of alerts")
<|file_name|>simulation.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 SensorLab, Jozef Stefan Institute http://sensorlab.ijs.si # # Written by Tomaz Solc, [email protected] # # This work has been partially funded by the European Community through the # 7th Framework Programme project CREW (FP7-ICT-2009-258301). # # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along with # this program. If not, see http://www.gnu.org/licenses/ import logging import Queue import random import time from spectrumwars.testbed import TestbedBase, RadioBase, RadioTimeout, RadioError, TestbedError, RadioPacket log = logging.getLogger(__name__) class Radio(RadioBase): RECEIVE_TIMEOUT = 2. def __init__(self, addr, dispatcher, send_delay): super(Radio, self).__init__() self.addr = addr self.neighbor = None self.dispatcher = dispatcher self.q = Queue.Queue() self.frequency = 0 self.bandwidth = 0 self.send_delay = send_delay def _recv(self, addr, bindata, frequency, bandwidth): if self.frequency == frequency and self.bandwidth == bandwidth and self.addr == addr: self.q.put(bindata) def set_configuration(self, frequency, bandwidth, power):<|fim▁hole|> self.bandwidth = bandwidth def binsend(self, bindata): self.dispatcher(self.neighbor, bindata, self.frequency, self.bandwidth) time.sleep(self.send_delay) def binrecv(self, timeout=None): if timeout is None: timeout = self.RECEIVE_TIMEOUT try: bindata = self.q.get(True, timeout) except Queue.Empty: raise RadioTimeout else: return bindata class Testbed(TestbedBase): RADIO_CLASS = Radio def __init__(self, send_delay=.1, frequency_range=64, bandwidth_range=10, power_range=10, packet_size=1024): self.send_delay = float(send_delay) self.frequency_range = int(frequency_range) self.bandwidth_range = int(bandwidth_range) self.power_range = int(power_range) self.RADIO_CLASS.PACKET_SIZE = int(packet_size) + 1 self.radios = [] # for each channel, we keep the timestamp of the last # transmission. we use this for simulated spectrum sensing and # for detecting collisions. self.channels = [0] * self.frequency_range self.i = 0 def _get_radio(self): r = Radio(self.i, self._dispatcher, self.send_delay) self.radios.append(r) self.i += 1 return r def _dispatcher(self, addr, bindata, frequency, bandwidth): now = self.time() has_collision = (now - self.channels[frequency]) > self.send_delay self.channels[frequency] = now if has_collision: # note that when packets collide, the first one goes # through while the later ones fail. this is different # than in reality: all should fail. But this would # be complicated to implement in practice. for radio in self.radios: radio._recv(addr, bindata, frequency, bandwidth) else: log.debug("packet collision detected on channel %d" % (frequency,)) def get_radio_pair(self): dst = self._get_radio() src = self._get_radio() dst.neighbor = src.addr src.neighbor = dst.addr return dst, src def get_spectrum(self): spectrum = [] now = self.time() for time in self.channels: if now - time < .5: p = random.randint(-40, -20) else: p = random.randint(-90, -80) spectrum.append(p) return tuple(spectrum) def get_frequency_range(self): return self.frequency_range def get_bandwidth_range(self): return self.bandwidth_range def get_power_range(self): return self.power_range<|fim▁end|>
self.frequency = frequency
<|file_name|>test_mda.py<|end_file_name|><|fim▁begin|>import unittest from pyxt.mda import * from pyxt.chargen import CharacterGeneratorMock class MDATests(unittest.TestCase): def setUp(self): self.cg = CharacterGeneratorMock(width = 9, height = 14) self.mda = MonochromeDisplayAdapter(self.cg) # Hijack reset so it doesn't call into Pygame during the tests. self.reset_count = 0 self.mda.reset = self.reset_testable def reset_testable(self): self.reset_count += 1 def test_ports_list(self): self.assertEqual(self.mda.get_ports_list(), [0x03B0, 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, 0x03BB]) def test_get_memory_size(self): self.assertEqual(self.mda.get_memory_size(), 4096) def test_initial_state(self): self.assertEqual(self.mda.control_reg, 0x00) self.assertEqual(self.mda.control_reg, 0x00) self.assertEqual(self.mda.screen, None) self.assertEqual(self.mda.char_generator, self.cg) self.assertEqual(len(self.mda.video_ram), 4096) def test_mem_write_byte_updates_video_ram(self): self.mda.mem_write_byte(0x0000, 0x41) self.assertEqual(self.mda.video_ram[0x0000], 0x41) def test_mem_write_byte_calls_char_generator_top_left(self): self.mda.mem_write_byte(0x0000, 0x41) self.assertEqual(self.cg.last_blit, (None, (0, 0), 0x41, MDA_GREEN, MDA_BLACK)) def test_mem_write_byte_calls_char_generator_bottom_right(self): self.mda.mem_write_byte(3998, 0xFF) self.assertEqual(self.cg.last_blit, (None, (711, 336), 0xFF, MDA_GREEN, MDA_BLACK)) def test_mem_write_byte_char_before_attribute(self): self.mda.mem_write_byte(3998, 0xFF) self.assertEqual(self.cg.last_blit, (None, (711, 336), 0xFF, MDA_GREEN, MDA_BLACK)) self.mda.mem_write_byte(3999, MDA_ATTR_INTENSITY) self.assertEqual(self.cg.last_blit, (None, (711, 336), 0xFF, MDA_BRIGHT_GREEN, MDA_BLACK)) def test_mem_write_byte_attribute_before_char(self): self.mda.mem_write_byte(3999, MDA_ATTR_INTENSITY) self.assertEqual(self.cg.last_blit, (None, (711, 336), 0x00, MDA_BRIGHT_GREEN, MDA_BLACK)) self.mda.mem_write_byte(3998, 0xFF) self.assertEqual(self.cg.last_blit, (None, (711, 336), 0xFF, MDA_BRIGHT_GREEN, MDA_BLACK)) def test_mem_write_byte_write_off_screen(self): self.mda.mem_write_byte(4000, 0xFF) self.assertEqual(self.cg.last_blit, None) def test_mem_read_byte(self): self.mda.video_ram[77] = 0xA5 self.assertEqual(self.mda.mem_read_byte(77), 0xA5) def test_mem_read_byte_off_screen(self): self.assertEqual(self.mda.mem_read_byte(4000), 0x00) @unittest.skip("We need to initialize Pygame exactly once at startup.") def test_reset_on_high_resolution_enable(self): self.assertEqual(self.reset_count, 0) self.mda.io_write_byte(0x3B8, 0x01) self.assertEqual(self.reset_count, 1) # Second write shouldn't call reset again. self.mda.io_write_byte(0x3B8, 0x01) self.assertEqual(self.reset_count, 1) def test_mem_write_word_at_top_left(self): self.mda.mem_write_word(0x0000, 0x0841) # 'A' with intensity. self.assertEqual(self.mda.video_ram[0x0000], 0x41) self.assertEqual(self.mda.video_ram[0x0001], 0x08) self.assertEqual(self.cg.last_blit, (None, (0, 0), 0x41, MDA_BRIGHT_GREEN, MDA_BLACK)) def test_mem_write_word_at_bottom_right(self): self.mda.mem_write_word(3998, 0x085A) # 'Z' with intensity. self.assertEqual(self.mda.video_ram[3998], 0x5A) self.assertEqual(self.mda.video_ram[3999], 0x08) self.assertEqual(self.cg.last_blit, (None, (711, 336), 0x5A, MDA_BRIGHT_GREEN, MDA_BLACK)) <|fim▁hole|> def test_mem_write_word_at_bottom_right_just_past(self): self.mda.mem_write_word(3999, 0xFF08) # 'Z' with intensity. self.assertEqual(self.mda.video_ram[3998], 0x00) # Should be unmodified. self.assertEqual(self.mda.video_ram[3999], 0x08) self.assertEqual(self.cg.last_blit, (None, (711, 336), 0x00, MDA_BRIGHT_GREEN, MDA_BLACK)) def test_mem_read_word(self): self.mda.video_ram[0x0000] = 0x41 self.mda.video_ram[0x0001] = 0x08 self.assertEqual(self.mda.mem_read_word(0x0000), 0x0841) def test_mem_read_word_just_past_the_end(self): self.mda.video_ram[3998] = 0x12 self.mda.video_ram[3999] = 0x34 self.assertEqual(self.mda.mem_read_word(3999), 0x0034) def test_horizontal_retrace_toggles(self): self.assertEqual(self.mda.io_read_byte(0x3BA), 0xF0) self.assertEqual(self.mda.io_read_byte(0x3BA), 0xF1) self.assertEqual(self.mda.io_read_byte(0x3BA), 0xF0) def test_current_pixel_updates_on_status_read(self): self.assertEqual(self.mda.current_pixel, [0, 0]) self.mda.io_read_byte(0x3BA) self.assertEqual(self.mda.current_pixel, [1, 0]) def test_current_pixel_wraps_right(self): self.mda.current_pixel = [719, 0] self.mda.io_read_byte(0x3BA) self.assertEqual(self.mda.current_pixel, [0, 1]) def test_current_pixel_wraps_bottom(self): self.mda.current_pixel = [719, 349] self.mda.io_read_byte(0x3BA) self.assertEqual(self.mda.current_pixel, [0, 0])<|fim▁end|>
<|file_name|>get-debug.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|>
oid sha256:8a773d7f07d6a2ce3c6379427f1a5aa12f7545b4da9579eae9b6a31ec13a11b7 size 43336
<|file_name|>constants.py<|end_file_name|><|fim▁begin|>DEBUG = 0 # cardinal diretions directions = ("left","up","right","down") # logic maxExamined = 75000 # maximum number of tries when solving maxMoves = 19 # maximum number of moves cullFrequency = 75000 # number of tries per cull update cullCutoff = 1.2 # fraction of average to cull # grid size gridRows = 5 gridColumns = 6 # text strings<|fim▁hole|>textChoosePaint = "Choose a color to paint:" textSolve = "Solve" textTitle = "Puzzle and Dragons Helper" # orbs orbDefault = "light" orbDefaultConfig = ("heal","light","wood","wood","fire","light","dark","heal","wood","water","heal","dark","fire","light","light","fire","fire","wood","heal","wood","dark","wood","water","light","light","dark","heal","heal","fire","dark") orbDefaultStrength = 100 orbList = ("heal","fire","water","wood","light","dark") # orb image URLs orbImageURL = dict(light="img/light.png", dark="img/dark.png", fire="img/fire.png", water="img/water.png", wood="img/wood.png", heal="img/heal.png", bg="img/bgOrb.png" ); # TKinter styles tkButtonInactive = "flat" tkButtonActive = "groove" tkButtonBorder = 3 tkOrbStrengthEntryWidth = 7<|fim▁end|>
textCalculateCurrentCombos = "Calculate Damage" textClose = "Close" textDamageDisplayAmount = "Total: "
<|file_name|>intention_test.go<|end_file_name|><|fim▁begin|>package state import ( "testing" "time" "github.com/hashicorp/consul/agent/structs" "github.com/hashicorp/go-memdb" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestStore_IntentionGet_none(t *testing.T) { assert := assert.New(t) s := testStateStore(t) // Querying with no results returns nil. ws := memdb.NewWatchSet() idx, res, err := s.IntentionGet(ws, testUUID()) assert.Equal(uint64(1), idx) assert.Nil(res) assert.Nil(err) } func TestStore_IntentionSetGet_basic(t *testing.T) { assert := assert.New(t) s := testStateStore(t) // Call Get to populate the watch set ws := memdb.NewWatchSet() _, _, err := s.IntentionGet(ws, testUUID()) assert.Nil(err) // Build a valid intention ixn := &structs.Intention{ ID: testUUID(), SourceNS: "default", SourceName: "*", DestinationNS: "default", DestinationName: "web", Meta: map[string]string{}, } // Inserting a with empty ID is disallowed. assert.NoError(s.IntentionSet(1, ixn)) // Make sure the index got updated. assert.Equal(uint64(1), s.maxIndex(intentionsTableName)) assert.True(watchFired(ws), "watch fired") // Read it back out and verify it. expected := &structs.Intention{ ID: ixn.ID, SourceNS: "default", SourceName: "*", DestinationNS: "default", DestinationName: "web", Meta: map[string]string{}, RaftIndex: structs.RaftIndex{ CreateIndex: 1, ModifyIndex: 1, }, } expected.UpdatePrecedence() ws = memdb.NewWatchSet() idx, actual, err := s.IntentionGet(ws, ixn.ID) assert.NoError(err) assert.Equal(expected.CreateIndex, idx) assert.Equal(expected, actual) // Change a value and test updating ixn.SourceNS = "foo" assert.NoError(s.IntentionSet(2, ixn)) // Change a value that isn't in the unique 4 tuple and check we don't // incorrectly consider this a duplicate when updating. ixn.Action = structs.IntentionActionDeny assert.NoError(s.IntentionSet(2, ixn)) // Make sure the index got updated. assert.Equal(uint64(2), s.maxIndex(intentionsTableName)) assert.True(watchFired(ws), "watch fired") // Read it back and verify the data was updated expected.SourceNS = ixn.SourceNS expected.Action = structs.IntentionActionDeny expected.ModifyIndex = 2 ws = memdb.NewWatchSet() idx, actual, err = s.IntentionGet(ws, ixn.ID) assert.NoError(err) assert.Equal(expected.ModifyIndex, idx) assert.Equal(expected, actual) // Attempt to insert another intention with duplicate 4-tuple ixn = &structs.Intention{ ID: testUUID(), SourceNS: "default", SourceName: "*", DestinationNS: "default", DestinationName: "web", Meta: map[string]string{}, } // Duplicate 4-tuple should cause an error ws = memdb.NewWatchSet() assert.Error(s.IntentionSet(3, ixn)) // Make sure the index did NOT get updated. assert.Equal(uint64(2), s.maxIndex(intentionsTableName)) assert.False(watchFired(ws), "watch not fired") } func TestStore_IntentionSet_emptyId(t *testing.T) { assert := assert.New(t) s := testStateStore(t) ws := memdb.NewWatchSet() _, _, err := s.IntentionGet(ws, testUUID()) assert.NoError(err) // Inserting a with empty ID is disallowed. err = s.IntentionSet(1, &structs.Intention{}) assert.Error(err) assert.Contains(err.Error(), ErrMissingIntentionID.Error()) // Index is not updated if nothing is saved. assert.Equal(s.maxIndex(intentionsTableName), uint64(0)) assert.False(watchFired(ws), "watch fired") } func TestStore_IntentionSet_updateCreatedAt(t *testing.T) { assert := assert.New(t) s := testStateStore(t) // Build a valid intention now := time.Now() ixn := structs.Intention{ ID: testUUID(), CreatedAt: now, } // Insert assert.NoError(s.IntentionSet(1, &ixn)) // Change a value and test updating ixnUpdate := ixn ixnUpdate.CreatedAt = now.Add(10 * time.Second) assert.NoError(s.IntentionSet(2, &ixnUpdate)) // Read it back and verify _, actual, err := s.IntentionGet(nil, ixn.ID) assert.NoError(err) assert.Equal(now, actual.CreatedAt) } func TestStore_IntentionSet_metaNil(t *testing.T) { assert := assert.New(t) s := testStateStore(t) // Build a valid intention ixn := structs.Intention{ ID: testUUID(), } // Insert assert.NoError(s.IntentionSet(1, &ixn)) // Read it back and verify _, actual, err := s.IntentionGet(nil, ixn.ID) assert.NoError(err) assert.NotNil(actual.Meta) } func TestStore_IntentionSet_metaSet(t *testing.T) { assert := assert.New(t) s := testStateStore(t) // Build a valid intention ixn := structs.Intention{ ID: testUUID(), Meta: map[string]string{"foo": "bar"}, } // Insert assert.NoError(s.IntentionSet(1, &ixn)) // Read it back and verify _, actual, err := s.IntentionGet(nil, ixn.ID) assert.NoError(err) assert.Equal(ixn.Meta, actual.Meta) } func TestStore_IntentionDelete(t *testing.T) { assert := assert.New(t) s := testStateStore(t) // Call Get to populate the watch set ws := memdb.NewWatchSet() _, _, err := s.IntentionGet(ws, testUUID()) assert.NoError(err) // Create ixn := &structs.Intention{ID: testUUID()} assert.NoError(s.IntentionSet(1, ixn)) // Make sure the index got updated. assert.Equal(s.maxIndex(intentionsTableName), uint64(1)) assert.True(watchFired(ws), "watch fired") // Delete assert.NoError(s.IntentionDelete(2, ixn.ID)) // Make sure the index got updated. assert.Equal(s.maxIndex(intentionsTableName), uint64(2)) assert.True(watchFired(ws), "watch fired") // Sanity check to make sure it's not there. idx, actual, err := s.IntentionGet(nil, ixn.ID) assert.NoError(err) assert.Equal(idx, uint64(2)) assert.Nil(actual) } func TestStore_IntentionsList(t *testing.T) { assert := assert.New(t) s := testStateStore(t) // Querying with no results returns nil. ws := memdb.NewWatchSet() idx, res, err := s.Intentions(ws) assert.NoError(err) assert.Nil(res) assert.Equal(uint64(1), idx) // Create some intentions ixns := structs.Intentions{ &structs.Intention{ ID: testUUID(), Meta: map[string]string{}, }, &structs.Intention{ ID: testUUID(), Meta: map[string]string{}, }, } // Force deterministic sort order ixns[0].ID = "a" + ixns[0].ID[1:] ixns[1].ID = "b" + ixns[1].ID[1:] // Create for i, ixn := range ixns { assert.NoError(s.IntentionSet(uint64(1+i), ixn)) } assert.True(watchFired(ws), "watch fired") // Read it back and verify. expected := structs.Intentions{ &structs.Intention{ ID: ixns[0].ID, Meta: map[string]string{}, RaftIndex: structs.RaftIndex{ CreateIndex: 1, ModifyIndex: 1, }, }, &structs.Intention{ ID: ixns[1].ID, Meta: map[string]string{}, RaftIndex: structs.RaftIndex{ CreateIndex: 2, ModifyIndex: 2, }, }, } for i := range expected { expected[i].UpdatePrecedence() // to match what is returned... } idx, actual, err := s.Intentions(nil) assert.NoError(err) assert.Equal(idx, uint64(2)) assert.Equal(expected, actual) } // Test the matrix of match logic. // // Note that this doesn't need to test the intention sort logic exhaustively // since this is tested in their sort implementation in the structs. func TestStore_IntentionMatch_table(t *testing.T) { type testCase struct { Name string Insert [][]string // List of intentions to insert Query [][]string // List of intentions to match Expected [][][]string // List of matches, where each match is a list of intentions } cases := []testCase{ { "single exact namespace/name", [][]string{ {"foo", "*"}, {"foo", "bar"}, {"foo", "baz"}, // shouldn't match {"bar", "bar"}, // shouldn't match {"bar", "*"}, // shouldn't match {"*", "*"}, }, [][]string{ {"foo", "bar"}, }, [][][]string{ { {"foo", "bar"}, {"foo", "*"}, {"*", "*"}, }, }, }, { "multiple exact namespace/name", [][]string{ {"foo", "*"}, {"foo", "bar"}, {"foo", "baz"}, // shouldn't match {"bar", "bar"}, {"bar", "*"}, }, [][]string{ {"foo", "bar"}, {"bar", "bar"}, }, [][][]string{ { {"foo", "bar"}, {"foo", "*"}, }, { {"bar", "bar"}, {"bar", "*"},<|fim▁hole|> { "single exact namespace/name with duplicate destinations", [][]string{ // 4-tuple specifies src and destination to test duplicate destinations // with different sources. We flip them around to test in both // directions. The first pair are the ones searched on in both cases so // the duplicates need to be there. {"foo", "bar", "foo", "*"}, {"foo", "bar", "bar", "*"}, {"*", "*", "*", "*"}, }, [][]string{ {"foo", "bar"}, }, [][][]string{ { // Note the first two have the same precedence so we rely on arbitrary // lexicographical tie-break behavior. {"foo", "bar", "bar", "*"}, {"foo", "bar", "foo", "*"}, {"*", "*", "*", "*"}, }, }, }, } // testRunner implements the test for a single case, but can be // parameterized to run for both source and destination so we can // test both cases. testRunner := func(t *testing.T, tc testCase, typ structs.IntentionMatchType) { // Insert the set assert := assert.New(t) s := testStateStore(t) var idx uint64 = 1 for _, v := range tc.Insert { ixn := &structs.Intention{ID: testUUID()} switch typ { case structs.IntentionMatchDestination: ixn.DestinationNS = v[0] ixn.DestinationName = v[1] if len(v) == 4 { ixn.SourceNS = v[2] ixn.SourceName = v[3] } case structs.IntentionMatchSource: ixn.SourceNS = v[0] ixn.SourceName = v[1] if len(v) == 4 { ixn.DestinationNS = v[2] ixn.DestinationName = v[3] } } assert.NoError(s.IntentionSet(idx, ixn)) idx++ } // Build the arguments args := &structs.IntentionQueryMatch{Type: typ} for _, q := range tc.Query { args.Entries = append(args.Entries, structs.IntentionMatchEntry{ Namespace: q[0], Name: q[1], }) } // Match _, matches, err := s.IntentionMatch(nil, args) assert.NoError(err) // Should have equal lengths require.Len(t, matches, len(tc.Expected)) // Verify matches for i, expected := range tc.Expected { var actual [][]string for _, ixn := range matches[i] { switch typ { case structs.IntentionMatchDestination: if len(expected) > 1 && len(expected[0]) == 4 { actual = append(actual, []string{ ixn.DestinationNS, ixn.DestinationName, ixn.SourceNS, ixn.SourceName, }) } else { actual = append(actual, []string{ixn.DestinationNS, ixn.DestinationName}) } case structs.IntentionMatchSource: if len(expected) > 1 && len(expected[0]) == 4 { actual = append(actual, []string{ ixn.SourceNS, ixn.SourceName, ixn.DestinationNS, ixn.DestinationName, }) } else { actual = append(actual, []string{ixn.SourceNS, ixn.SourceName}) } } } assert.Equal(expected, actual) } } for _, tc := range cases { t.Run(tc.Name+" (destination)", func(t *testing.T) { testRunner(t, tc, structs.IntentionMatchDestination) }) t.Run(tc.Name+" (source)", func(t *testing.T) { testRunner(t, tc, structs.IntentionMatchSource) }) } } func TestStore_Intention_Snapshot_Restore(t *testing.T) { assert := assert.New(t) s := testStateStore(t) // Create some intentions. ixns := structs.Intentions{ &structs.Intention{ DestinationName: "foo", }, &structs.Intention{ DestinationName: "bar", }, &structs.Intention{ DestinationName: "baz", }, } // Force the sort order of the UUIDs before we create them so the // order is deterministic. id := testUUID() ixns[0].ID = "a" + id[1:] ixns[1].ID = "b" + id[1:] ixns[2].ID = "c" + id[1:] // Now create for i, ixn := range ixns { assert.NoError(s.IntentionSet(uint64(4+i), ixn)) } // Snapshot the queries. snap := s.Snapshot() defer snap.Close() // Alter the real state store. assert.NoError(s.IntentionDelete(7, ixns[0].ID)) // Verify the snapshot. assert.Equal(snap.LastIndex(), uint64(6)) // Expect them sorted in insertion order expected := structs.Intentions{ &structs.Intention{ ID: ixns[0].ID, DestinationName: "foo", Meta: map[string]string{}, RaftIndex: structs.RaftIndex{ CreateIndex: 4, ModifyIndex: 4, }, }, &structs.Intention{ ID: ixns[1].ID, DestinationName: "bar", Meta: map[string]string{}, RaftIndex: structs.RaftIndex{ CreateIndex: 5, ModifyIndex: 5, }, }, &structs.Intention{ ID: ixns[2].ID, DestinationName: "baz", Meta: map[string]string{}, RaftIndex: structs.RaftIndex{ CreateIndex: 6, ModifyIndex: 6, }, }, } for i := range expected { expected[i].UpdatePrecedence() // to match what is returned... } dump, err := snap.Intentions() assert.NoError(err) assert.Equal(expected, dump) // Restore the values into a new state store. func() { s := testStateStore(t) restore := s.Restore() for _, ixn := range dump { assert.NoError(restore.Intention(ixn)) } restore.Commit() // Read the restored values back out and verify that they match. Note that // Intentions are returned precedence sorted unlike the snapshot so we need // to rearrange the expected slice some. expected[0], expected[1], expected[2] = expected[1], expected[2], expected[0] idx, actual, err := s.Intentions(nil) assert.NoError(err) assert.Equal(idx, uint64(6)) assert.Equal(expected, actual) }() }<|fim▁end|>
}, }, },
<|file_name|>ad_src_tf_phase_misfit.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """ An implementation of the time frequency phase misfit and adjoint source after Fichtner et al. (2008). :copyright: Lion Krischer ([email protected]), 2013 :license: GNU General Public License, Version 3 (http://www.gnu.org/copyleft/gpl.html) """ import warnings import numexpr as ne import numpy as np import obspy from obspy.signal.interpolation import lanczos_interpolation from lasif import LASIFAdjointSourceCalculationError from lasif.adjoint_sources import time_frequency, utils eps = np.spacing(1) def adsrc_tf_phase_misfit(t, data, synthetic, min_period, max_period, plot=False, max_criterion=7.0): """ :rtype: dictionary :returns: Return a dictionary with three keys: * adjoint_source: The calculated adjoint source as a numpy array * misfit: The misfit value * messages: A list of strings giving additional hints to what happened in the calculation. """ # Assumes that t starts at 0. Pad your data if that is not the case - # Parts with zeros are essentially skipped making it fairly efficient. assert t[0] == 0 messages = [] # Internal sampling interval. Some explanations for this "magic" number. # LASIF's preprocessing allows no frequency content with smaller periods # than min_period / 2.2 (see function_templates/preprocesssing_function.py # for details). Assuming most users don't change this, this is equal to # the Nyquist frequency and the largest possible sampling interval to # catch everything is min_period / 4.4. # # The current choice is historic as changing does (very slightly) chance # the calculated misfit and we don't want to disturb inversions in # progress. The difference is likely minimal in any case. We might have # same aliasing into the lower frequencies but the filters coupled with # the TF-domain weighting will get rid of them in essentially all # realistically occurring cases. dt_new = max(float(int(min_period / 3.0)), t[1] - t[0]) # New time axis ti = utils.matlab_range(t[0], t[-1], dt_new) # Make sure its odd - that avoid having to deal with some issues # regarding frequency bin interpolation. Now positive and negative # frequencies will always be all symmetric. Data is assumed to be # tapered in any case so no problem are to be expected. if not len(ti) % 2: ti = ti[:-1] # Interpolate both signals to the new time axis - this massively speeds # up the whole procedure as most signals are highly oversampled. The # adjoint source at the end is re-interpolated to the original sampling # points. original_data = data original_synthetic = synthetic data = lanczos_interpolation( data=data, old_start=t[0], old_dt=t[1] - t[0], new_start=t[0], new_dt=dt_new, new_npts=len(ti), a=8, window="blackmann") synthetic = lanczos_interpolation( data=synthetic, old_start=t[0], old_dt=t[1] - t[0], new_start=t[0], new_dt=dt_new, new_npts=len(ti), a=8, window="blackmann") original_time = t t = ti # ------------------------------------------------------------------------- # Compute time-frequency representations # Window width is twice the minimal period. width = 2.0 * min_period # Compute time-frequency representation of the cross-correlation _, _, tf_cc = time_frequency.time_frequency_cc_difference( t, data, synthetic, width) # Compute the time-frequency representation of the synthetic tau, nu, tf_synth = time_frequency.time_frequency_transform(t, synthetic, width) # ------------------------------------------------------------------------- # compute tf window and weighting function # noise taper: down-weight tf amplitudes that are very low tf_cc_abs = np.abs(tf_cc) m = tf_cc_abs.max() / 10.0 # NOQA weight = ne.evaluate("1.0 - exp(-(tf_cc_abs ** 2) / (m ** 2))") nu_t = nu.T # highpass filter (periods longer than max_period are suppressed # exponentially) weight *= (1.0 - np.exp(-(nu_t * max_period) ** 2)) # lowpass filter (periods shorter than min_period are suppressed # exponentially) nu_t_large = np.zeros(nu_t.shape) nu_t_small = np.zeros(nu_t.shape) thres = (nu_t <= 1.0 / min_period) nu_t_large[np.invert(thres)] = 1.0 nu_t_small[thres] = 1.0 weight *= (np.exp(-10.0 * np.abs(nu_t * min_period - 1.0)) * nu_t_large + nu_t_small) # normalisation weight /= weight.max() # computation of phase difference, make quality checks and misfit --------- # Compute the phase difference. # DP = np.imag(np.log(m + tf_cc / (2 * m + np.abs(tf_cc)))) DP = np.angle(tf_cc) # Attempt to detect phase jumps by taking the derivatives in time and # frequency direction. 0.7 is an emperical value. abs_weighted_DP = np.abs(weight * DP) _x = abs_weighted_DP.max() # NOQA test_field = ne.evaluate("weight * DP / _x") criterion_1 = np.sum([np.abs(np.diff(test_field, axis=0)) > 0.7]) criterion_2 = np.sum([np.abs(np.diff(test_field, axis=1)) > 0.7]) criterion = np.sum([criterion_1, criterion_2]) # Compute the phase misfit dnu = nu[1] - nu[0] i = ne.evaluate("sum(weight ** 2 * DP ** 2)") # inserted by Nienke Blom, 22-11-2016 weighted_DP = ne.evaluate("weight * DP") phasediff_integral = float(ne.evaluate("sum(weighted_DP * dnu * dt_new)")) mean_delay = np.mean(weighted_DP) wDP = weighted_DP.flatten() wDP_thresh = wDP[abs(wDP) > 0.1 * max(wDP, key=lambda x: abs(x))] median_delay = np.median(wDP_thresh) max_delay = max(wDP, key=lambda x: abs(x)) phase_misfit = np.sqrt(i * dt_new * dnu) # Sanity check. Should not occur. if np.isnan(phase_misfit): msg = "The phase misfit is NaN." raise LASIFAdjointSourceCalculationError(msg) # The misfit can still be computed, even if not adjoint source is # available. if criterion > max_criterion: warning = ("Possible phase jump detected. Misfit included. No " "adjoint source computed. Criterion: %.1f - Max allowed " "criterion: %.1f" % (criterion, max_criterion)) warnings.warn(warning) messages.append(warning) ret_dict = { "adjoint_source": None, "misfit_value": phase_misfit, "details": {"messages": messages, #"weighted_DP": weighted_DP, #"weight": weight, #"DP": DP, "mean_delay": mean_delay, # added NAB 30-8-2017 "phasediff_integral": phasediff_integral, # added NAB 22-11-2016, edited 30-8-2017 "median_delay": median_delay, # added NAB 22-11-2016, edited 30-8-2017 "max_delay": max_delay} # added NAB 31-8-2017 } return ret_dict # Make kernel for the inverse tf transform idp = ne.evaluate( "weight ** 2 * DP * tf_synth / (m + abs(tf_synth) ** 2)") # Invert tf transform and make adjoint source ad_src, it, I = time_frequency.itfa(tau, idp, width) # Interpolate both signals to the new time axis ad_src = lanczos_interpolation( # Pad with a couple of zeros in case some where lost in all # these resampling operations. The first sample should not # change the time. data=np.concatenate([ad_src.imag, np.zeros(100)]), old_start=tau[0], old_dt=tau[1] - tau[0], new_start=original_time[0], new_dt=original_time[1] - original_time[0], new_npts=len(original_time), a=8, window="blackmann") # Divide by the misfit and change sign. ad_src /= (phase_misfit + eps) ad_src = -1.0 * np.diff(ad_src) / (t[1] - t[0]) # Taper at both ends. Exploit ObsPy to not have to deal with all the # nasty things. ad_src = \ obspy.Trace(ad_src).taper(max_percentage=0.05, type="hann").data # Reverse time and add a leading zero so the adjoint source has the # same length as the input time series. ad_src = ad_src[::-1] ad_src = np.concatenate([[0.0], ad_src]) # Plot if requested. ------------------------------------------------------ if plot: import matplotlib as mpl import matplotlib.pyplot as plt plt.style.use("seaborn-whitegrid") from lasif.colors import get_colormap if isinstance(plot, mpl.figure.Figure): fig = plot else: fig = plt.gcf() # Manually set-up the axes for full control. l, b, w, h = 0.1, 0.05, 0.80, 0.22 rect = l, b + 3 * h, w, h waveforms_axis = fig.add_axes(rect) rect = l, b + h, w, 2 * h tf_axis = fig.add_axes(rect) rect = l, b, w, h adj_src_axis = fig.add_axes(rect) rect = l + w + 0.02, b, 1.0 - (l + w + 0.02) - 0.05, 4 * h cm_axis = fig.add_axes(rect) # Plot the weighted phase difference. weighted_phase_difference = (DP * weight).transpose() mappable = tf_axis.pcolormesh( tau, nu, weighted_phase_difference, vmin=-1.0, vmax=1.0, cmap=get_colormap("tomo_full_scale_linear_lightness_r"), shading="gouraud", zorder=-10) tf_axis.grid(True) tf_axis.grid(True, which='minor', axis='both', linestyle='-', color='k') cm = fig.colorbar(mappable, cax=cm_axis) cm.set_label("Phase difference in radian", fontsize="large") # Various texts on the time frequency domain plot.<|fim▁hole|> fontsize="large", color="#C25734", fontweight=900, verticalalignment="bottom", horizontalalignment="right") txt = "Weighted Phase Difference - red is a phase advance of the " \ "synthetics" tf_axis.text(x=0.99, y=0.95, s=txt, fontsize="large", color="0.1", transform=tf_axis.transAxes, verticalalignment="top", horizontalalignment="right") if messages: message = "\n".join(messages) tf_axis.text(x=0.99, y=0.98, s=message, transform=tf_axis.transAxes, bbox=dict(facecolor='red', alpha=0.8), verticalalignment="top", horizontalalignment="right") # Adjoint source. adj_src_axis.plot(original_time, ad_src[::-1], color="0.1", lw=2, label="Adjoint source (non-time-reversed)") adj_src_axis.legend() # Waveforms. waveforms_axis.plot(original_time, original_data, color="0.1", lw=2, label="Observed") waveforms_axis.plot(original_time, original_synthetic, color="#C11E11", lw=2, label="Synthetic") waveforms_axis.legend() # Set limits for all axes. tf_axis.set_ylim(0, 2.0 / min_period) tf_axis.set_xlim(0, tau[-1]) adj_src_axis.set_xlim(0, tau[-1]) waveforms_axis.set_xlim(0, tau[-1]) waveforms_axis.set_ylabel("Velocity [m/s]", fontsize="large") tf_axis.set_ylabel("Period [s]", fontsize="large") adj_src_axis.set_xlabel("Seconds since event", fontsize="large") # Hack to keep ticklines but remove the ticks - there is probably a # better way to do this. waveforms_axis.set_xticklabels([ "" for _i in waveforms_axis.get_xticks()]) tf_axis.set_xticklabels(["" for _i in tf_axis.get_xticks()]) _l = tf_axis.get_ylim() _r = _l[1] - _l[0] _t = tf_axis.get_yticks() _t = _t[(_l[0] + 0.1 * _r < _t) & (_t < _l[1] - 0.1 * _r)] tf_axis.set_yticks(_t) tf_axis.set_yticklabels(["%.1fs" % (1.0 / _i) for _i in _t]) waveforms_axis.get_yaxis().set_label_coords(-0.08, 0.5) tf_axis.get_yaxis().set_label_coords(-0.08, 0.5) fig.suptitle("Time Frequency Phase Misfit and Adjoint Source", fontsize="xx-large") ret_dict = { "adjoint_source": ad_src, "misfit_value": phase_misfit, "details": {"messages": messages, #"weighted_DP": weighted_DP, #"weight": weight, #"DP": DP, "mean_delay": mean_delay, # added NAB 30-8-2017 "phasediff_integral": phasediff_integral, # added NAB 22-11-2016, edited 30-8-2017 "median_delay": median_delay, # added NAB 22-11-2016, edited 30-8-2017 "max_delay": max_delay} # added NAB 31-8-2017 } # print "the phasedifference integral is "+str(ret_dict['details']['phasediff_integral']) return ret_dict<|fim▁end|>
text = "Misfit: %.4f" % phase_misfit tf_axis.text(x=0.99, y=0.02, s=text, transform=tf_axis.transAxes,
<|file_name|>classes_75.js<|end_file_name|><|fim▁begin|>var searchData=<|fim▁hole|> ['utils',['utils',['../classsrc_1_1tests_1_1utils_1_1utils_1_1utils.html',1,'src::tests::utils::utils']]] ];<|fim▁end|>
[
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>error_chain! {<|fim▁hole|><|fim▁end|>
}
<|file_name|>bibexport_method_fieldexporter_webinterface.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ## $Id: webmessage_webinterface.py,v 1.13 2008/03/12 16:48:08 tibor Exp $ ## ## This file is part of Invenio. ## Copyright (C) 2009, 2010, 2011 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """FieldExporter web interface""" __revision__ = "$Id: webmessage_webinterface.py,v 1.13 2008/03/12 16:48:08 tibor Exp $" __lastupdated__ = """$Date: 2008/03/12 16:48:08 $""" import re from invenio.webpage import page from invenio.webinterface_handler import WebInterfaceDirectory, \ wash_urlargd from invenio.urlutils import redirect_to_url from invenio.config import CFG_SITE_URL, \ CFG_SITE_SECURE_URL from invenio.dateutils import convert_datestruct_to_datetext, \ convert_datetext_to_datestruct from invenio.messages import gettext_set_language from invenio.bibexport_method_fieldexporter import get_css, \ get_navigation_menu, \ perform_request_edit_query, \ perform_request_edit_job, \ perform_request_jobs, \ perform_request_new_job, \ perform_request_save_job, \ perform_request_delete_jobs, \ perform_request_run_jobs, \ perform_request_job_queries, \ perform_request_new_query, \ perform_request_save_query, \ perform_request_delete_queries, \ perform_request_run_queries, \ perform_request_job_history, \ perform_request_job_results, \ perform_request_display_job_result, \ perform_request_download_job_result, \ AccessDeniedError from invenio.bibexport_method_fieldexporter_dblayer import Job, \ Query, \ JobResult from invenio.webuser import collect_user_info, \ page_not_authorized from invenio.access_control_engine import acc_authorize_action class WebInterfaceFieldExporterPages(WebInterfaceDirectory): """Defines the set of /fieldexporter pages.""" _exports = ["", "jobs", "edit_job", "job_queries", "edit_query", "job_results", "display_job_result", "download_job_result", "history", "not_authorized"] # constats containig URL to the pages _EXPORT_URL = "%s/exporter" % (CFG_SITE_URL, ) _JOBS_URL = "%s/exporter/jobs" % (CFG_SITE_URL, ) _EDIT_JOB_URL = "%s/exporter/edit_job" % (CFG_SITE_URL, ) _EDIT_QUERY_URL = "%s/exporter/edit_query" % (CFG_SITE_URL, ) _JOB_QUERIES_URL = "%s/exporter/job_queries" % (CFG_SITE_URL, ) _JOB_HISTORY_URL = "%s/exporter/history" % (CFG_SITE_URL, ) _NOT_AUTHORIZED_URL = "%s/exporter/not_authorized" % (CFG_SITE_URL, ) _LOGIN_URL = "%s/youraccount/login" % (CFG_SITE_SECURE_URL,) _NAVTRAIL_EXPORT = """<a href="/exporter" class="navtrail">Export</a>""" def index(self, req, form): """ The function called by default""" redirect_to_url(req, self._JOB_HISTORY_URL) __call__ = index def jobs(self, req, form): """Displays all the jobs of a given user and allows creation, deletion and execution of jobs""" argd = wash_urlargd(form, { "new_button": (str, ""), "run_button": (str, ""), "delete_button": (str, ""), "selected_jobs": (list, "") }) <|fim▁hole|> self._check_user_credentials(req, language) user_id = self._get_user_id(req) try: # if the form is submitted through some of the buttons # we should perform the appropriate action if argd["new_button"]: self._redirect_to_page(req, self._EDIT_JOB_URL, language) elif argd["delete_button"]: job_ids = argd["selected_jobs"] perform_request_delete_jobs(job_ids = job_ids, user_id = user_id, language = language) elif argd["run_button"]: job_ids = argd["selected_jobs"] perform_request_run_jobs(job_ids = job_ids, user_id = user_id, language = language) self._redirect_to_page(req, self._JOB_HISTORY_URL, language) user_id = self._get_user_id(req) body = perform_request_jobs(user_id = user_id, language = language) except AccessDeniedError: self._redirect_to_not_authorised_page(req, language) return page(title = _("Export Job Overview"), metaheaderadd = get_css(), body = body, req = req, navmenuid = "fieldexporter", titleprologue = get_navigation_menu(language), navtrail = self._NAVTRAIL_EXPORT, language = language) def edit_job(self, req, form): """Edits an existing job or creates a new one""" # Create an empty job and use its default values # to init missing parameters job = Job() argd = wash_urlargd(form, {"job_name": (str, job.get_name()), "output_directory": (str, job.get_output_directory()), "job_frequency": (int, job.get_frequency()), "output_format": (int, job.get_output_format()), "last_run": (str, convert_datestruct_to_datetext(job.get_last_run())), "id": (int, job.get_id()), "save_button": (str, ""), "cancel_button": (str, ""), "edit_queries_button": (str, "") }) language = argd["ln"] # load the right message language _ = gettext_set_language(language) self._check_user_credentials(req, language) user_id = self._get_user_id(req) job_id = argd["id"] job = Job(job_id = job_id, name = argd["job_name"], frequency = argd["job_frequency"], output_format = argd["output_format"], last_run = convert_datetext_to_datestruct(argd["last_run"]), output_directory = argd["output_directory"]) try: if argd["cancel_button"]: self._redirect_to_page(req, self._JOBS_URL, language) elif argd["save_button"]: perform_request_save_job(job = job, user_id = user_id, language = language) self._redirect_to_page(req, self._JOBS_URL, language) elif argd["edit_queries_button"]: result_job_id = perform_request_save_job(job = job, user_id = user_id, language = language) edit_queries_url = "%s?job_id=%s" % (self._JOB_QUERIES_URL, result_job_id) self._redirect_to_page(req, edit_queries_url, language) elif Job.ID_MISSING == job_id: title = _("New Export Job") body = perform_request_new_job(language = language) else: title = _("Edit Export Job") body = perform_request_edit_job(job_id = job_id, user_id = user_id, language = language) except AccessDeniedError: self._redirect_to_not_authorised_page(req, language) return page(title = title, metaheaderadd=get_css(), body = body, req = req, navmenuid = "fieldexporter", titleprologue = get_navigation_menu(language), navtrail = self._NAVTRAIL_EXPORT, language = language) def job_queries(self, req, form): """Allows edition and manipulations of the queries of a job""" argd = wash_urlargd(form, { "new_button": (str, ""), "run_button": (str, ""), "delete_button": (str, ""), "selected_queries": (list, ""), "job_id": (int, -1) }) # load the right message language language = argd["ln"] _ = gettext_set_language(language) self._check_user_credentials(req, language) user_id = self._get_user_id(req) job_id = argd["job_id"] try: # if the form is submitted through some of the buttons # we should perform the appropriate action if argd["new_button"]: new_query_url = "%s?job_id=%s" % (self._EDIT_QUERY_URL, job_id) self._redirect_to_page(req, new_query_url, language) if argd["delete_button"]: query_ids = argd["selected_queries"] perform_request_delete_queries(query_ids = query_ids, user_id = user_id, language = language) if argd["run_button"]: title = _("Query Results") query_ids = argd["selected_queries"] body = perform_request_run_queries(query_ids = query_ids, user_id = user_id, job_id = job_id, language = language) else: title = _("Export Job Queries") body = perform_request_job_queries(job_id = job_id, user_id = user_id, language = language) except AccessDeniedError: self._redirect_to_not_authorised_page(req, language) return page(title = title, metaheaderadd=get_css(), body = body, req = req, navmenuid = "fieldexporter", titleprologue = get_navigation_menu(language), navtrail = self._NAVTRAIL_EXPORT, language = language) def edit_query(self, req, form): """Edits an existing query or creates a new one""" # Create an empty job and use its default values # to init missing parameters query = Query() name = query.get_name() output_fields = ", ".join(query.get_output_fields()) search_criteria = query.get_search_criteria() comment = query.get_comment() query_id = query.get_id() argd = wash_urlargd(form, {"name": (str, name), "search_criteria": (str, search_criteria), "output_fields": (str, output_fields), "comment": (str, comment), "id": (int, query_id), "job_id": (int, Job.ID_MISSING), "save_button": (str, ""), "cancel_button": (str, "") }) # load the right message language language = argd["ln"] _ = gettext_set_language(language) self._check_user_credentials(req, language) user_id = self._get_user_id(req) query_id = argd["id"] job_id = argd["job_id"] current_job_queries_url = "%s?job_id=%s" %(self._JOB_QUERIES_URL, job_id) try: if argd["cancel_button"]: self._redirect_to_page(req, current_job_queries_url, language) elif argd["save_button"]: name = argd["name"] search_criteria = argd["search_criteria"] comment = argd["comment"] # split the text entered by user to different fields outoutput_fields_text = argd["output_fields"] re_split_pattern = re.compile(r',\s*') output_fields = re_split_pattern.split(outoutput_fields_text) query = Query(query_id, name, search_criteria, comment, output_fields) perform_request_save_query(query = query, job_id = job_id, user_id = user_id, language = language) self._redirect_to_page(req, current_job_queries_url, language) elif Query.ID_MISSING == query_id: title = _("New Query") body = perform_request_new_query(job_id = job_id, user_id = user_id, language = language) else: title = _("Edit Query") body = perform_request_edit_query(query_id = query_id, job_id = job_id, user_id = user_id, language = language) except AccessDeniedError: self._redirect_to_not_authorised_page(req, language) return page(title = title, metaheaderadd=get_css(), body = body, req = req, navmenuid = "fieldexporter", titleprologue = get_navigation_menu(language), navtrail = self._NAVTRAIL_EXPORT, language = language) def job_results(self, req, form): """Displays information about the results of a job""" argd = wash_urlargd(form, { "result_id": (int, -1) }) # load the right message language language = argd["ln"] _ = gettext_set_language(language) self._check_user_credentials(req, language) user_id = self._get_user_id(req) job_result_id = argd["result_id"] title = _("Export Job Results") try: body = perform_request_job_results(job_result_id = job_result_id, user_id = user_id, language = language) except AccessDeniedError: self._redirect_to_not_authorised_page(req, language) return page(title = title, metaheaderadd = get_css(), body = body, req = req, navmenuid = "fieldexporter", titleprologue = get_navigation_menu(language), navtrail = self._NAVTRAIL_EXPORT, language = language) def display_job_result(self, req, form): """Displays the results of a job""" argd = wash_urlargd(form, { "result_id": (int, JobResult.ID_MISSING), "output_format" : (int, Job.OUTPUT_FORMAT_MISSING) }) # load the right message language language = argd["ln"] _ = gettext_set_language(language) self._check_user_credentials(req, language) user_id = self._get_user_id(req) job_result_id = argd["result_id"] output_format = argd["output_format"] title = _("Export Job Result") try: body = perform_request_display_job_result(job_result_id = job_result_id, output_format = output_format, user_id = user_id, language = language) except AccessDeniedError: self._redirect_to_not_authorised_page(req, language) return page(title = title, metaheaderadd = get_css(), body = body, req = req, navmenuid = "fieldexporter", titleprologue = get_navigation_menu(language), navtrail = self._NAVTRAIL_EXPORT, language = language) def download_job_result(self, req, form): """Returns to the browser zip file containing the job result""" argd = wash_urlargd(form, { "result_id" : (int, JobResult.ID_MISSING), "output_format" : (int, Job.OUTPUT_FORMAT_MISSING) }) # load the right message language language = argd["ln"] job_result_id = argd["result_id"] output_format = argd["output_format"] user_id = self._get_user_id(req) _ = gettext_set_language(language) self._check_user_credentials(req, language) title = _("Export Job Result") try: perform_request_download_job_result(req = req, job_result_id = job_result_id, output_format = output_format, user_id = user_id, language = language) except AccessDeniedError: self._redirect_to_not_authorised_page(req, language) def history(self, req, form): """Displays history of the jobs""" argd = wash_urlargd(form, {}) # load the right message language language = argd["ln"] _ = gettext_set_language(language) self._check_user_credentials(req, language) title = _("Export Job History") user_id = self._get_user_id(req) body = perform_request_job_history(user_id, language) return page(title = title, metaheaderadd = get_css(), body = body, req = req, navmenuid = "fieldexporter", titleprologue = get_navigation_menu(language), navtrail = self._NAVTRAIL_EXPORT, language = language) def not_authorized(self, req, form): """Displays page telling the user that he is not authorised to access the resource""" argd = wash_urlargd(form, {}) # load the right message language language = argd["ln"] _ = gettext_set_language(language) text = _("You are not authorised to access this resource.") return page_not_authorized(req = req, ln = language, text = text) def _check_user_credentials(self, req, language): """Check if the user is allowed to use field exporter @param req: request as received from apache @param language: the language of the page """ user_info = collect_user_info(req) #redirect guests to login page if "1" == user_info["guest"]: referer_url = "%s?ln=%s" % (self._EXPORT_URL, language) redirect_url = "%s?ln=%s&referer=%s" % (self._LOGIN_URL, language, referer_url) redirect_to_url(req, redirect_url) #redirect unauthorized user to not_authorized page (auth_code, auth_msg) = acc_authorize_action(user_info, 'cfgbibexport') if 0 != auth_code: self._redirect_to_not_authorised_page(req, language) def _redirect_to_not_authorised_page(self, req, language): """Redirects user to page telling him that he is not authorised to do something @param req: request as received from apache @param language: the language of the page """ self._redirect_to_page(req, self._NOT_AUTHORIZED_URL, language) def _redirect_to_page(self, req, url, language): """Redirects user to a page with the given URL and language. @param req: request as received from apache @param language: the language of the page @param url: url to redirect to """ # check which symbol to use for appending the parameters # if this is the only parameter use ? if -1 == url.find("?"): append_symbol = "?" # if there are other parameters already appended, use & else: append_symbol = "&" redirect_url = "%s%sln=%s" % (url, append_symbol, language) redirect_to_url(req, redirect_url) def _get_user_id(self, req): """Return the identifier of the currently loged user. @param req: request as received from apache @return: identifier of currently logged user """ user_info = collect_user_info(req) user_id = user_info['uid'] return user_id<|fim▁end|>
# load the right message language language = argd["ln"] _ = gettext_set_language(language)
<|file_name|>removed-syntax-with-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at<|fim▁hole|>// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-flags: -Z parse-only fn removed_with() { struct S { foo: (), bar: (), } let a = S { foo: (), bar: () }; let b = S { foo: () with a }; //~^ ERROR expected one of `,`, `.`, `?`, `}`, or an operator, found `with` }<|fim▁end|>
// http://rust-lang.org/COPYRIGHT.
<|file_name|>contextlib.py<|end_file_name|><|fim▁begin|># Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: contextlib import sys from functools import wraps from warnings import warn __all__ = ['contextmanager', 'nested', 'closing'] class GeneratorContextManager(object): def __init__(self, gen): self.gen = gen def __enter__(self): try: return self.gen.next() except StopIteration: raise RuntimeError("generator didn't yield") def __exit__(self, type, value, traceback): if type is None: try: self.gen.next() except StopIteration: return raise RuntimeError("generator didn't stop") else: if value is None: value = type() try: self.gen.throw(type, value, traceback) raise RuntimeError("generator didn't stop after throw()") except StopIteration as exc: return exc is not value except: if sys.exc_info()[1] is not value: raise return def contextmanager(func): @wraps(func) def helper(*args, **kwds): return GeneratorContextManager(func(*args, **kwds)) <|fim▁hole|> @contextmanager def nested(*managers): warn('With-statements now directly support multiple context managers', DeprecationWarning, 3) exits = [] vars = [] exc = (None, None, None) try: for mgr in managers: exit = mgr.__exit__ enter = mgr.__enter__ vars.append(enter()) exits.append(exit) yield vars except: exc = sys.exc_info() finally: while exits: exit = exits.pop() try: if exit(*exc): exc = (None, None, None) except: exc = sys.exc_info() if exc != (None, None, None): raise exc[0], exc[1], exc[2] return class closing(object): def __init__(self, thing): self.thing = thing def __enter__(self): return self.thing def __exit__(self, *exc_info): self.thing.close()<|fim▁end|>
return helper
<|file_name|>htmlobjectelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::attr::Attr; use dom::bindings::cell::DOMRefCell; use dom::bindings::codegen::Bindings::HTMLObjectElementBinding; use dom::bindings::codegen::Bindings::HTMLObjectElementBinding::HTMLObjectElementMethods; use dom::bindings::codegen::InheritTypes::HTMLObjectElementDerived; use dom::bindings::codegen::InheritTypes::{ElementCast, HTMLElementCast}; use dom::bindings::js::Root; use dom::document::Document; use dom::element::{AttributeMutation, ElementTypeId}; use dom::eventtarget::{EventTarget, EventTargetTypeId}; use dom::htmlelement::{HTMLElement, HTMLElementTypeId}; use dom::htmlformelement::{FormControl, HTMLFormElement}; use dom::node::{Node, NodeTypeId, window_from_node}; use dom::validitystate::ValidityState; use dom::virtualmethods::VirtualMethods; use net_traits::image::base::Image; use std::sync::Arc; use util::str::DOMString; #[dom_struct] pub struct HTMLObjectElement { htmlelement: HTMLElement, image: DOMRefCell<Option<Arc<Image>>>, } impl HTMLObjectElementDerived for EventTarget { fn is_htmlobjectelement(&self) -> bool { *self.type_id() == EventTargetTypeId::Node( NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLObjectElement))) } } impl HTMLObjectElement { fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLObjectElement { HTMLObjectElement { htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLObjectElement, localName, prefix, document), image: DOMRefCell::new(None), } } #[allow(unrooted_must_root)] pub fn new(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> Root<HTMLObjectElement> {<|fim▁hole|> let element = HTMLObjectElement::new_inherited(localName, prefix, document); Node::reflect_node(box element, document, HTMLObjectElementBinding::Wrap) } } trait ProcessDataURL { fn process_data_url(&self); } impl<'a> ProcessDataURL for &'a HTMLObjectElement { // Makes the local `data` member match the status of the `data` attribute and starts /// prefetching the image. This method must be called after `data` is changed. fn process_data_url(&self) { let elem = ElementCast::from_ref(*self); // TODO: support other values match (elem.get_attribute(&ns!(""), &atom!("type")), elem.get_attribute(&ns!(""), &atom!("data"))) { (None, Some(_uri)) => { // TODO(gw): Prefetch the image here. } _ => { } } } } pub fn is_image_data(uri: &str) -> bool { static TYPES: &'static [&'static str] = &["data:image/png", "data:image/gif", "data:image/jpeg"]; TYPES.iter().any(|&type_| uri.starts_with(type_)) } impl HTMLObjectElementMethods for HTMLObjectElement { // https://html.spec.whatwg.org/multipage/#dom-cva-validity fn Validity(&self) -> Root<ValidityState> { let window = window_from_node(self); ValidityState::new(window.r()) } // https://html.spec.whatwg.org/multipage/#dom-object-type make_getter!(Type); // https://html.spec.whatwg.org/multipage/#dom-object-type make_setter!(SetType, "type"); // https://html.spec.whatwg.org/multipage#dom-fae-form fn GetForm(&self) -> Option<Root<HTMLFormElement>> { self.form_owner() } } impl VirtualMethods for HTMLObjectElement { fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> { let htmlelement: &HTMLElement = HTMLElementCast::from_ref(self); Some(htmlelement as &VirtualMethods) } fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) { self.super_type().unwrap().attribute_mutated(attr, mutation); match attr.local_name() { &atom!(data) => { if let AttributeMutation::Set(_) = mutation { self.process_data_url(); } }, _ => {}, } } } impl FormControl for HTMLObjectElement {}<|fim▁end|>
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import os import sys def readable_dir(prospective_dir):<|fim▁hole|> returns true if all these three are the case :param prospective_dir: path to the directory""" if prospective_dir is not None: if not os.path.isdir(prospective_dir): raise Exception("readable_dir:{0} is not a valid path".format(prospective_dir)) if os.access(prospective_dir, os.R_OK): return prospective_dir else: raise Exception("readable_dir:{0} is not a readable dir".format(prospective_dir)) def find_plugins(plugin_dir): """Finds all python files in the specified path and imports them. This is needed, if we want to detect automatically, which datastore and parser we can apply :param plugin_dir: path to the plugin directory""" plugin_files = [x[:-3] for x in os.listdir(plugin_dir) if x.endswith(".py")] sys.path.insert(0, plugin_dir) for plugin in plugin_files: __import__(plugin) def get_immediate_subdirectories(a_dir): """ Helper method, which gets the **immediate** subdirectories of a path. Is helpful, if one want to create a parser, which looks if certain folders are there. :param a_dir: directory from which **immediate** subdirectories should be listed """ return [name for name in os.listdir(a_dir) if os.path.isdir(os.path.join(a_dir, name))]<|fim▁end|>
""" Function that checks if a path is a directory, if it exists and if it is accessible and only
<|file_name|>FuzzingDialog.py<|end_file_name|><|fim▁begin|>import math from PyQt5.QtCore import Qt, pyqtSlot from PyQt5.QtGui import QCloseEvent from PyQt5.QtWidgets import QDialog, QInputDialog from urh import settings from urh.models.FuzzingTableModel import FuzzingTableModel from urh.signalprocessing.ProtocoLabel import ProtocolLabel from urh.signalprocessing.ProtocolAnalyzerContainer import ProtocolAnalyzerContainer from urh.ui.ui_fuzzing import Ui_FuzzingDialog class FuzzingDialog(QDialog): def __init__(self, protocol: ProtocolAnalyzerContainer, label_index: int, msg_index: int, proto_view: int, parent=None): super().__init__(parent) self.ui = Ui_FuzzingDialog() self.ui.setupUi(self) self.setAttribute(Qt.WA_DeleteOnClose) self.setWindowFlags(Qt.Window) self.protocol = protocol msg_index = msg_index if msg_index != -1 else 0 self.ui.spinBoxFuzzMessage.setValue(msg_index + 1) self.ui.spinBoxFuzzMessage.setMinimum(1) self.ui.spinBoxFuzzMessage.setMaximum(self.protocol.num_messages) self.ui.comboBoxFuzzingLabel.addItems([l.name for l in self.message.message_type]) self.ui.comboBoxFuzzingLabel.setCurrentIndex(label_index) self.proto_view = proto_view self.fuzz_table_model = FuzzingTableModel(self.current_label, proto_view) self.fuzz_table_model.remove_duplicates = self.ui.chkBRemoveDuplicates.isChecked() self.ui.tblFuzzingValues.setModel(self.fuzz_table_model) self.fuzz_table_model.update() self.ui.spinBoxFuzzingStart.setValue(self.current_label_start + 1) self.ui.spinBoxFuzzingEnd.setValue(self.current_label_end) self.ui.spinBoxFuzzingStart.setMaximum(len(self.message_data)) self.ui.spinBoxFuzzingEnd.setMaximum(len(self.message_data)) self.update_message_data_string() self.ui.tblFuzzingValues.resize_me() self.create_connects() self.restoreGeometry(settings.read("{}/geometry".format(self.__class__.__name__), type=bytes)) @property def message(self): return self.protocol.messages[int(self.ui.spinBoxFuzzMessage.value() - 1)] @property def current_label_index(self): return self.ui.comboBoxFuzzingLabel.currentIndex() @property def current_label(self) -> ProtocolLabel: if len(self.message.message_type) == 0: return None cur_label = self.message.message_type[self.current_label_index].get_copy() self.message.message_type[self.current_label_index] = cur_label cur_label.fuzz_values = [fv for fv in cur_label.fuzz_values if fv] # Remove empty strings if len(cur_label.fuzz_values) == 0: cur_label.fuzz_values.append(self.message.plain_bits_str[cur_label.start:cur_label.end]) return cur_label @property def current_label_start(self): if self.current_label and self.message: return self.message.get_label_range(self.current_label, self.proto_view, False)[0] else: return -1 @property def current_label_end(self): if self.current_label and self.message: return self.message.get_label_range(self.current_label, self.proto_view, False)[1] else: return -1 @property def message_data(self): if self.proto_view == 0: return self.message.plain_bits_str elif self.proto_view == 1: return self.message.plain_hex_str elif self.proto_view == 2: return self.message.plain_ascii_str else: return None def create_connects(self): self.ui.spinBoxFuzzingStart.valueChanged.connect(self.on_fuzzing_start_changed) self.ui.spinBoxFuzzingEnd.valueChanged.connect(self.on_fuzzing_end_changed) self.ui.comboBoxFuzzingLabel.currentIndexChanged.connect(self.on_combo_box_fuzzing_label_current_index_changed) self.ui.btnRepeatValues.clicked.connect(self.on_btn_repeat_values_clicked) self.ui.btnAddRow.clicked.connect(self.on_btn_add_row_clicked) self.ui.btnDelRow.clicked.connect(self.on_btn_del_row_clicked) self.ui.tblFuzzingValues.deletion_wanted.connect(self.delete_lines) self.ui.chkBRemoveDuplicates.stateChanged.connect(self.on_remove_duplicates_state_changed) self.ui.sBAddRangeStart.valueChanged.connect(self.on_fuzzing_range_start_changed) self.ui.sBAddRangeEnd.valueChanged.connect(self.on_fuzzing_range_end_changed) self.ui.checkBoxLowerBound.stateChanged.connect(self.on_lower_bound_checked_changed) self.ui.checkBoxUpperBound.stateChanged.connect(self.on_upper_bound_checked_changed) self.ui.spinBoxLowerBound.valueChanged.connect(self.on_lower_bound_changed) self.ui.spinBoxUpperBound.valueChanged.connect(self.on_upper_bound_changed) self.ui.spinBoxRandomMinimum.valueChanged.connect(self.on_random_range_min_changed) self.ui.spinBoxRandomMaximum.valueChanged.connect(self.on_random_range_max_changed) self.ui.spinBoxFuzzMessage.valueChanged.connect(self.on_fuzz_msg_changed) self.ui.btnAddFuzzingValues.clicked.connect(self.on_btn_add_fuzzing_values_clicked) self.ui.comboBoxFuzzingLabel.editTextChanged.connect(self.set_current_label_name) def update_message_data_string(self): fuz_start = self.current_label_start fuz_end = self.current_label_end num_proto_bits = 10 num_fuz_bits = 16 proto_start = fuz_start - num_proto_bits preambel = "... " if proto_start <= 0: proto_start = 0 preambel = "" proto_end = fuz_end + num_proto_bits postambel = " ..." if proto_end >= len(self.message_data) - 1: proto_end = len(self.message_data) - 1 postambel = "" fuzamble = "" if fuz_end - fuz_start > num_fuz_bits: fuz_end = fuz_start + num_fuz_bits fuzamble = "..." self.ui.lPreBits.setText(preambel + self.message_data[proto_start:self.current_label_start]) self.ui.lFuzzedBits.setText(self.message_data[fuz_start:fuz_end] + fuzamble) self.ui.lPostBits.setText(self.message_data[self.current_label_end:proto_end] + postambel) self.set_add_spinboxes_maximum_on_label_change() def closeEvent(self, event: QCloseEvent): settings.write("{}/geometry".format(self.__class__.__name__), self.saveGeometry()) super().closeEvent(event) @pyqtSlot(int) def on_fuzzing_start_changed(self, value: int): self.ui.spinBoxFuzzingEnd.setMinimum(self.ui.spinBoxFuzzingStart.value()) new_start = self.message.convert_index(value - 1, self.proto_view, 0, False)[0] self.current_label.start = new_start self.current_label.fuzz_values[:] = [] self.update_message_data_string() self.fuzz_table_model.update() self.ui.tblFuzzingValues.resize_me() @pyqtSlot(int) def on_fuzzing_end_changed(self, value: int): self.ui.spinBoxFuzzingStart.setMaximum(self.ui.spinBoxFuzzingEnd.value()) new_end = self.message.convert_index(value - 1, self.proto_view, 0, False)[1] + 1 self.current_label.end = new_end self.current_label.fuzz_values[:] = [] self.update_message_data_string() self.fuzz_table_model.update() self.ui.tblFuzzingValues.resize_me() @pyqtSlot(int) def on_combo_box_fuzzing_label_current_index_changed(self, index: int): self.fuzz_table_model.fuzzing_label = self.current_label self.fuzz_table_model.update() self.update_message_data_string() self.ui.tblFuzzingValues.resize_me() self.ui.spinBoxFuzzingStart.blockSignals(True) self.ui.spinBoxFuzzingStart.setValue(self.current_label_start + 1) self.ui.spinBoxFuzzingStart.blockSignals(False) self.ui.spinBoxFuzzingEnd.blockSignals(True) self.ui.spinBoxFuzzingEnd.setValue(self.current_label_end) self.ui.spinBoxFuzzingEnd.blockSignals(False) @pyqtSlot() def on_btn_add_row_clicked(self): self.current_label.add_fuzz_value() self.fuzz_table_model.update() @pyqtSlot() def on_btn_del_row_clicked(self): min_row, max_row, _, _ = self.ui.tblFuzzingValues.selection_range() self.delete_lines(min_row, max_row) @pyqtSlot(int, int) def delete_lines(self, min_row, max_row): if min_row == -1: self.current_label.fuzz_values = self.current_label.fuzz_values[:-1] else: self.current_label.fuzz_values = self.current_label.fuzz_values[:min_row] + self.current_label.fuzz_values[ max_row + 1:] _ = self.current_label # if user deleted all, this will restore a fuzz value self.fuzz_table_model.update() @pyqtSlot() def on_remove_duplicates_state_changed(self): self.fuzz_table_model.remove_duplicates = self.ui.chkBRemoveDuplicates.isChecked() self.fuzz_table_model.update() self.remove_duplicates() @pyqtSlot() def set_add_spinboxes_maximum_on_label_change(self): nbits = self.current_label.end - self.current_label.start # Use Bit Start/End for maximum calc. if nbits >= 32: nbits = 31 max_val = 2 ** nbits - 1 self.ui.sBAddRangeStart.setMaximum(max_val - 1) self.ui.sBAddRangeEnd.setMaximum(max_val) self.ui.sBAddRangeEnd.setValue(max_val) self.ui.sBAddRangeStep.setMaximum(max_val) self.ui.spinBoxLowerBound.setMaximum(max_val - 1) self.ui.spinBoxUpperBound.setMaximum(max_val) self.ui.spinBoxUpperBound.setValue(max_val) self.ui.spinBoxBoundaryNumber.setMaximum(int(max_val / 2) + 1) self.ui.spinBoxRandomMinimum.setMaximum(max_val - 1) self.ui.spinBoxRandomMaximum.setMaximum(max_val) self.ui.spinBoxRandomMaximum.setValue(max_val) @pyqtSlot(int) def on_fuzzing_range_start_changed(self, value: int): self.ui.sBAddRangeEnd.setMinimum(value) self.ui.sBAddRangeStep.setMaximum(self.ui.sBAddRangeEnd.value() - value) @pyqtSlot(int) def on_fuzzing_range_end_changed(self, value: int): self.ui.sBAddRangeStart.setMaximum(value - 1) self.ui.sBAddRangeStep.setMaximum(value - self.ui.sBAddRangeStart.value()) @pyqtSlot() def on_lower_bound_checked_changed(self): if self.ui.checkBoxLowerBound.isChecked(): self.ui.spinBoxLowerBound.setEnabled(True) self.ui.spinBoxBoundaryNumber.setEnabled(True) elif not self.ui.checkBoxUpperBound.isChecked(): self.ui.spinBoxLowerBound.setEnabled(False) self.ui.spinBoxBoundaryNumber.setEnabled(False) else: self.ui.spinBoxLowerBound.setEnabled(False) @pyqtSlot() def on_upper_bound_checked_changed(self): if self.ui.checkBoxUpperBound.isChecked(): self.ui.spinBoxUpperBound.setEnabled(True) self.ui.spinBoxBoundaryNumber.setEnabled(True) elif not self.ui.checkBoxLowerBound.isChecked(): self.ui.spinBoxUpperBound.setEnabled(False) self.ui.spinBoxBoundaryNumber.setEnabled(False) else: self.ui.spinBoxUpperBound.setEnabled(False) @pyqtSlot() def on_lower_bound_changed(self): self.ui.spinBoxUpperBound.setMinimum(self.ui.spinBoxLowerBound.value()) self.ui.spinBoxBoundaryNumber.setMaximum(math.ceil((self.ui.spinBoxUpperBound.value() - self.ui.spinBoxLowerBound.value()) / 2)) @pyqtSlot() def on_upper_bound_changed(self): self.ui.spinBoxLowerBound.setMaximum(self.ui.spinBoxUpperBound.value() - 1) self.ui.spinBoxBoundaryNumber.setMaximum(math.ceil((self.ui.spinBoxUpperBound.value() - self.ui.spinBoxLowerBound.value()) / 2)) @pyqtSlot() def on_random_range_min_changed(self): self.ui.spinBoxRandomMaximum.setMinimum(self.ui.spinBoxRandomMinimum.value()) @pyqtSlot() def on_random_range_max_changed(self): self.ui.spinBoxRandomMinimum.setMaximum(self.ui.spinBoxRandomMaximum.value() - 1) @pyqtSlot() def on_btn_add_fuzzing_values_clicked(self): if self.ui.comboBoxStrategy.currentIndex() == 0: self.__add_fuzzing_range() elif self.ui.comboBoxStrategy.currentIndex() == 1: self.__add_fuzzing_boundaries() elif self.ui.comboBoxStrategy.currentIndex() == 2: self.__add_random_fuzzing_values() def __add_fuzzing_range(self): start = self.ui.sBAddRangeStart.value() end = self.ui.sBAddRangeEnd.value() step = self.ui.sBAddRangeStep.value() self.fuzz_table_model.add_range(start, end + 1, step) def __add_fuzzing_boundaries(self): lower_bound = -1 if self.ui.spinBoxLowerBound.isEnabled(): lower_bound = self.ui.spinBoxLowerBound.value() upper_bound = -1 if self.ui.spinBoxUpperBound.isEnabled(): upper_bound = self.ui.spinBoxUpperBound.value() num_vals = self.ui.spinBoxBoundaryNumber.value() self.fuzz_table_model.add_boundaries(lower_bound, upper_bound, num_vals) def __add_random_fuzzing_values(self):<|fim▁hole|> maximum = self.ui.spinBoxRandomMaximum.value() self.fuzz_table_model.add_random(n, minimum, maximum) def remove_duplicates(self): if self.ui.chkBRemoveDuplicates.isChecked(): for lbl in self.message.message_type: seq = lbl.fuzz_values[:] seen = set() add_seen = seen.add lbl.fuzz_values = [l for l in seq if not (l in seen or add_seen(l))] @pyqtSlot() def set_current_label_name(self): self.current_label.name = self.ui.comboBoxFuzzingLabel.currentText() self.ui.comboBoxFuzzingLabel.setItemText(self.ui.comboBoxFuzzingLabel.currentIndex(), self.current_label.name) @pyqtSlot(int) def on_fuzz_msg_changed(self, index: int): self.ui.comboBoxFuzzingLabel.setDisabled(False) sel_label_ind = self.ui.comboBoxFuzzingLabel.currentIndex() self.ui.comboBoxFuzzingLabel.blockSignals(True) self.ui.comboBoxFuzzingLabel.clear() if len(self.message.message_type) == 0: self.ui.comboBoxFuzzingLabel.setDisabled(True) return self.ui.comboBoxFuzzingLabel.addItems([lbl.name for lbl in self.message.message_type]) self.ui.comboBoxFuzzingLabel.blockSignals(False) if sel_label_ind < self.ui.comboBoxFuzzingLabel.count(): self.ui.comboBoxFuzzingLabel.setCurrentIndex(sel_label_ind) else: self.ui.comboBoxFuzzingLabel.setCurrentIndex(0) self.fuzz_table_model.fuzzing_label = self.current_label self.fuzz_table_model.update() self.update_message_data_string() @pyqtSlot() def on_btn_repeat_values_clicked(self): num_repeats, ok = QInputDialog.getInt(self, self.tr("How many times shall values be repeated?"), self.tr("Number of repeats:"), 1, 1) if ok: self.ui.chkBRemoveDuplicates.setChecked(False) min_row, max_row, _, _ = self.ui.tblFuzzingValues.selection_range() if min_row == -1: start, end = 0, len(self.current_label.fuzz_values) else: start, end = min_row, max_row + 1 self.fuzz_table_model.repeat_fuzzing_values(start, end, num_repeats)<|fim▁end|>
n = self.ui.spinBoxNumberRandom.value() minimum = self.ui.spinBoxRandomMinimum.value()
<|file_name|>core_test.go<|end_file_name|><|fim▁begin|><|fim▁hole|>import "testing" //totally trivial, I know func TestOperationList(t *testing.T) { var operations []Operation = operationList() if operationLength := len(operations); operationLength != 3 { t.Error("Expected 2 operations in list but got ", operationLength) } }<|fim▁end|>
package main
<|file_name|>GlassWidget.py<|end_file_name|><|fim▁begin|># Copyright 2006 James Tauber and contributors # Copyright (C) 2009 Luke Kenneth Casson Leighton <[email protected]> # Copyright (C) 2010 Serge Tarkovski <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pyjd from pyjamas import DOM from pyjamas import Window from pyjamas import Factory from __pyjamas__ import JS, doc from pyjamas.ui.SimplePanel import SimplePanel from pyjamas.ui.Widget import Widget from pyjamas.ui.MouseListener import MouseHandler from pyjamas.ui.RootPanel import RootPanel mousecapturer = None def getMouseCapturer(**kwargs): global mousecapturer if mousecapturer is None: mousecapturer = GlassWidget(**kwargs) # if mousecapturer has been overloaded with something # other than a GlassWidget (as in IE override) # just return None elif not isinstance(mousecapturer, GlassWidget): return None return mousecapturer def show(mousetarget, **kwargs): global mousecapturer mc = getMouseCapturer(**kwargs) mc.mousetarget = mousetarget if isinstance(mousetarget, MouseHandler): mc.mousehandler = True mc.show() def hide(): global mousecapturer mousecapturer.hide() class GlassWidget(Widget, MouseHandler): def __init__(self, **kwargs): self.glassListeners = [] self.showing = False self.mousehandler = False if not 'StyleName' in kwargs: kwargs['StyleName'] = "gwt-GlassWidget" if 'Element' in kwargs: element = kwargs.pop('Element') else: element = DOM.createDiv() self.setElement(element) Widget.__init__(self, **kwargs) MouseHandler.__init__(self) self.setzIndex(1000000) self.addMouseListener(self) def addGlassListener(self, listener): self.glassListeners.append(listener) def hide(self, autoClosed=False): self.showing = False self.hideGlass() DOM.removeEventPreview(self) RootPanel().remove(self) self.onHideImpl(self.getElement()) DOM.releaseCapture(self.getElement()) for listener in self.glassListeners: if hasattr(listener, 'onGlassHide'): listener.onGlassHide(self, autoClosed) else: listener(self, autoClosed) def _event_targets_popup(self, event): target = DOM.eventGetTarget(event) return target and DOM.isOrHasChild(self.getElement(), target) def onEventPreview(self, event): etype = DOM.eventGetType(event) if etype == "mousedown" or etype == "blur": if DOM.getCaptureElement() is not None: return True elif etype == "mouseup" or etype == "click" or \ etype == "mousemove" or etype == "dblclick": if DOM.getCaptureElement() is not None: return True return self._event_targets_popup(event) def onHideImpl(self, popup): pass def onShowImpl(self, popup): pass def removeGlassListener(self, listener): self.glassListeners.remove(listener) def setGlassPosition(self): top = Window.getScrollTop() left = Window.getScrollLeft() height = Window.getClientHeight() width = Window.getClientWidth() el = self.getElement() DOM.setStyleAttribute(el, "position", "absolute") DOM.setStyleAttribute(el, "left", "%s" % left if left == 0 else "%spx" % left) DOM.setStyleAttribute(el, "top", "%s" % top if top == 0 else "%spx" % top) DOM.setStyleAttribute(el, "height", "%spx" % (top + height)) DOM.setStyleAttribute(el, "width", "%spx" % (left + width)) # under pyjd glasswidget cannot be transparent, # otherwise it drops the mousecapture, so we have # to give it a 1% opaque background color if pyjd.is_desktop: # pyjd uses IE style opacity DOM.setStyleAttribute(el, "filter", "alpha(opacity=1)") # this is the Moz form of transparency DOM.setStyleAttribute(el, "background", "rgba(255,255,255,0.1)") def showGlass(self):<|fim▁hole|> def hideGlass(self): Window.removeWindowResizeListener(self) doc().body.removeChild(self.getElement()) def onWindowResized(self, width, height): self.setGlassPosition() def show(self): if self.showing: return self.showing = True self.showGlass() DOM.addEventPreview(self) RootPanel().add(self) self.onShowImpl(self.getElement()) DOM.setCapture(self.getElement()) def adjustMousePos(self, x, y): x += self.getAbsoluteLeft() - self.mousetarget.getAbsoluteLeft() y += self.getAbsoluteTop() - self.mousetarget.getAbsoluteTop() return x, y def onMouseDown(self, sender, x, y): x, y = self.adjustMousePos(x, y) if self.mousehandler: self.mousetarget.onBrowserEvent(DOM.eventGetCurrentEvent()) else: self.mousetarget.onMouseDown(sender, x, y) def onMouseEnter(self, sender): self.mousetarget.onMouseGlassEnter(sender) def onMouseLeave(self, sender): self.mousetarget.onMouseGlassLeave(sender) def onMouseMove(self, sender, x, y): x, y = self.adjustMousePos(x, y) if self.mousehandler: self.mousetarget.onBrowserEvent(DOM.eventGetCurrentEvent()) else: self.mousetarget.onMouseMove(sender, x, y) def onMouseUp(self, sender, x, y): x, y = self.adjustMousePos(x, y) if self.mousehandler: self.mousetarget.onBrowserEvent(DOM.eventGetCurrentEvent()) else: self.mousetarget.onMouseUp(sender, x, y) Factory.registerClass('pyjamas.ui.GlassWidget', 'GlassWidget', GlassWidget)<|fim▁end|>
self.setGlassPosition() doc().body.appendChild(self.getElement()) Window.addWindowResizeListener(self)
<|file_name|>fps.py<|end_file_name|><|fim▁begin|># Copyright 2016 ARM Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # class FpsProcessor(object): """ Provides common object for processing surfaceFlinger output for frame statistics. This processor returns the four frame statistics below: :FPS: Frames Per Second. This is the frame rate of the workload. :frames: The total number of frames rendered during the execution of<|fim▁hole|> workload. Janks are sudden shifts in frame rate. They result in a "stuttery" UI. See http://jankfree.org/jank-busters-io :not_at_vsync: The number of frames that did not render in a single vsync cycle. """ def __init__(self, data, action=None): """ data - a pandas.DataFrame object with frame data (e.g. frames.csv) action - output metrics names with additional action specifier """ self.data = data self.action = action def process(self, refresh_period, drop_threshold): # pylint: disable=too-many-locals """ Generate frame per second (fps) and associated metrics for workload. refresh_period - the vsync interval drop_threshold - data points below this fps will be dropped """ fps = float('nan') frame_count, janks, not_at_vsync = 0, 0, 0 vsync_interval = refresh_period # fiter out bogus frames. bogus_frames_filter = self.data.actual_present_time != 0x7fffffffffffffff actual_present_times = self.data.actual_present_time[bogus_frames_filter] actual_present_time_deltas = actual_present_times - actual_present_times.shift() actual_present_time_deltas = actual_present_time_deltas.drop(0) vsyncs_to_compose = actual_present_time_deltas / vsync_interval vsyncs_to_compose.apply(lambda x: int(round(x, 0))) # drop values lower than drop_threshold FPS as real in-game frame # rate is unlikely to drop below that (except on loading screens # etc, which should not be factored in frame rate calculation). per_frame_fps = (1.0 / (vsyncs_to_compose * (vsync_interval / 1e9))) keep_filter = per_frame_fps > drop_threshold filtered_vsyncs_to_compose = vsyncs_to_compose[keep_filter] per_frame_fps.name = 'fps' if not filtered_vsyncs_to_compose.empty: total_vsyncs = filtered_vsyncs_to_compose.sum() frame_count = filtered_vsyncs_to_compose.size if total_vsyncs: fps = 1e9 * frame_count / (vsync_interval * total_vsyncs) janks = self._calc_janks(filtered_vsyncs_to_compose) not_at_vsync = self._calc_not_at_vsync(vsyncs_to_compose) metrics = (fps, frame_count, janks, not_at_vsync) return per_frame_fps, metrics @staticmethod def _calc_janks(filtered_vsyncs_to_compose): """ Internal method for calculating jank frames. """ pause_latency = 20 vtc_deltas = filtered_vsyncs_to_compose - filtered_vsyncs_to_compose.shift() vtc_deltas.index = range(0, vtc_deltas.size) vtc_deltas = vtc_deltas.drop(0).abs() janks = vtc_deltas.apply(lambda x: (pause_latency > x > 1.5) and 1 or 0).sum() return janks @staticmethod def _calc_not_at_vsync(vsyncs_to_compose): """ Internal method for calculating the number of frames that did not render in a single vsync cycle. """ epsilon = 0.0001 func = lambda x: (abs(x - 1.0) > epsilon) and 1 or 0 not_at_vsync = vsyncs_to_compose.apply(func).sum() return not_at_vsync<|fim▁end|>
the workload. :janks: The number of "janks" that occurred during execution of the
<|file_name|>client.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2017 Chef Software Inc. and/or applicable contributors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::io; use std::path::Path; use core::os::process::Pid; use ipc_channel::ipc::{IpcOneShotServer, IpcReceiver, IpcSender}; use protobuf; use protocol; use error::{Error, Result}; type Env = HashMap<String, String>; type IpcServer = IpcOneShotServer<Vec<u8>>; pub struct LauncherCli { tx: IpcSender<Vec<u8>>, rx: IpcReceiver<Vec<u8>>, } impl LauncherCli { pub fn connect(pipe: String) -> Result<Self> { let tx = IpcSender::connect(pipe).map_err(Error::Connect)?; let (ipc_srv, pipe) = IpcServer::new().map_err(Error::BadPipe)?; let mut cmd = protocol::Register::new(); cmd.set_pipe(pipe); Self::send(&tx, &cmd)?; let (rx, raw) = ipc_srv.accept().map_err(|_| Error::AcceptConn)?; Self::read::<protocol::NetOk>(&raw)?; Ok(LauncherCli { tx: tx, rx: rx }) } /// Read a launcher protocol message from a byte array fn read<T>(bytes: &[u8]) -> Result<T> where T: protobuf::MessageStatic, { let txn = protocol::NetTxn::from_bytes(bytes).map_err(Error::Deserialize)?; if txn.message_id() == "NetErr" { let err = txn.decode::<protocol::NetErr>() .map_err(Error::Deserialize)?; return Err(Error::Protocol(err)); } let msg = txn.decode::<T>().map_err(Error::Deserialize)?; Ok(msg) } /// Receive and read protocol message from an IpcReceiver fn recv<T>(rx: &IpcReceiver<Vec<u8>>) -> Result<T> where T: protobuf::MessageStatic, { match rx.recv() { Ok(bytes) => Self::read(&bytes), Err(err) => Err(Error::from(*err)), } } /// Send a command to a Launcher fn send<T>(tx: &IpcSender<Vec<u8>>, message: &T) -> Result<()> where T: protobuf::MessageStatic, { let txn = protocol::NetTxn::build(message).map_err(Error::Serialize)?; let bytes = txn.to_bytes().map_err(Error::Serialize)?; tx.send(bytes).map_err(Error::Send)?; Ok(()) } /// Receive and read protocol message from an IpcReceiver fn try_recv<T>(rx: &IpcReceiver<Vec<u8>>) -> Result<Option<T>> where T: protobuf::MessageStatic, { match rx.try_recv().map_err(|err| Error::from(*err)) { Ok(bytes) => { let msg = Self::read::<T>(&bytes)?; Ok(Some(msg)) } Err(Error::IPCIO(io::ErrorKind::WouldBlock)) => Ok(None), Err(err) => Err(err), } }<|fim▁hole|> Ok(Some(_)) | Err(Error::IPCIO(_)) => true, Ok(None) => false, Err(err) => panic!("Unexpected error checking for shutdown request, {}", err), } } /// Restart a running process with the same arguments pub fn restart(&self, pid: Pid) -> Result<Pid> { let mut msg = protocol::Restart::new(); msg.set_pid(pid.into()); Self::send(&self.tx, &msg)?; let reply = Self::recv::<protocol::SpawnOk>(&self.rx)?; Ok(reply.get_pid() as Pid) } /// Send a process spawn command to the connected Launcher /// /// `user` and `group` are string names, while `user_id` and /// `group_id` are numeric IDs. Newer versions of the Launcher can /// accept either, but prefer numeric IDs. pub fn spawn<I, B, U, G, P>( &self, id: I, bin: B, user: Option<U>, group: Option<G>, user_id: Option<u32>, group_id: Option<u32>, password: Option<P>, env: Env, ) -> Result<Pid> where I: ToString, B: AsRef<Path>, U: ToString, G: ToString, P: ToString, { let mut msg = protocol::Spawn::new(); msg.set_binary(bin.as_ref().to_path_buf().to_string_lossy().into_owned()); // On Windows, we only expect user to be Some. // // On Linux, we expect user_id and group_id to be Some, while // user and group may be either Some or None. Only the IDs are // used; names are only for backward compatibility with older // Launchers. if let Some(name) = user { msg.set_svc_user(name.to_string()); } if let Some(name) = group { msg.set_svc_group(name.to_string()); } if let Some(uid) = user_id { msg.set_svc_user_id(uid); } if let Some(gid) = group_id { msg.set_svc_group_id(gid); } // This is only for Windows if let Some(password) = password { msg.set_svc_password(password.to_string()); } msg.set_env(env); msg.set_id(id.to_string()); Self::send(&self.tx, &msg)?; let reply = Self::recv::<protocol::SpawnOk>(&self.rx)?; Ok(reply.get_pid() as Pid) } pub fn terminate(&self, pid: Pid) -> Result<i32> { let mut msg = protocol::Terminate::new(); msg.set_pid(pid.into()); Self::send(&self.tx, &msg)?; let reply = Self::recv::<protocol::TerminateOk>(&self.rx)?; Ok(reply.get_exit_code()) } }<|fim▁end|>
pub fn is_stopping(&self) -> bool { match Self::try_recv::<protocol::Shutdown>(&self.rx) {
<|file_name|>block_holder.py<|end_file_name|><|fim▁begin|>from biicode.common.model.cells import SimpleCell from biicode.common.model.dependency_set import DependencySet from biicode.common.model.brl.block_name import BlockName from biicode.common.model.symbolic.block_version_table import BlockVersionTable from biicode.common.model.blob import Blob from biicode.common.model.resource import Resource from biicode.common.model.content import Content from biicode.common.model.bii_type import TEXT from biicode.common.edition.bii_config import BiiConfig from biicode.common.model.symbolic.block_version import BlockVersion from biicode.common.exception import ConfigurationFileError, BiiException BIICODE_FILE = "biicode.conf" class BlockHolder(object): def __init__(self, block_name, resources): """ resources is iterable of resources or dict {CellName: Resource(Cell, Content)} """ assert isinstance(block_name, BlockName) self.block_name = block_name if isinstance(resources, dict): # we build the dict again to ensure the keys are CellName not BlockCellName self._resources = {r.cell_name: r for r in resources.itervalues()} else: self._resources = {r.cell_name: r for r in resources} self._simple_resources = None # iterable (list) of simple resources # configuration files self._config = None @property def config(self): if self._config is None: try: res = self._resources[BIICODE_FILE] content = res.content.load.bytes except KeyError: content = None try: self._config = BiiConfig(content) except ConfigurationFileError as e: raise ConfigurationFileError('%s/biicode.conf: Line %s' % (self.block_name, str(e))) return self._config @property<|fim▁hole|> def mains(self): return self.config.mains @property def tests(self): return self.config.tests @property def data(self): return self.config.data @property def paths(self): return self.config.paths @property def dependencies(self): return self.config.dependencies @property def requirements(self): return self.config.requirements @property def parent(self): if self.config.parent: if self.config.parent.block_name != self.block_name: raise BiiException("A block should have same BlockName as it's parent.\n" "%s's parent is %s" % (self.block_name, self.config.parent.block_name)) return self.config.parent return self.block_name.init_version() @property def includes(self): return self.config.includes @requirements.setter def requirements(self, block_version_table): assert isinstance(block_version_table, BlockVersionTable) self.config.requirements = block_version_table @parent.setter def parent(self, parent): """ Should be called only after publish and open """ assert isinstance(parent, BlockVersion) self.config.parent = parent def commit_config(self): ''' Returns: None if the config file didnt change. The config file Resource in case it was created or modified ''' new_content = self.config.dumps() if new_content: name = self.block_name + BIICODE_FILE new_res = Resource(SimpleCell(name, TEXT), Content(name, load=Blob(new_content), created=True)) self.add_resource(new_res) return new_res return None @property def cell_names(self): """ return CellNames """ return set(self._resources.keys()) @property def block_cell_names(self): """ return BlockCellNames """ return {self.block_name + name for name in self._resources} def __getitem__(self, key): return self._resources[key] @property def simple_resources(self): ''' Useful as most iterations are done over simple resources. If a block_name is given, the method returns only it's simple resources ''' if self._simple_resources is None: self._simple_resources = [x for x in self._resources.itervalues() if isinstance(x.cell, SimpleCell)] return self._simple_resources @property def resources(self): return self._resources def add_resource(self, resource): self._resources[resource.cell_name] = resource self._simple_resources = None def delete_resource(self, name): del self._resources[name] self._simple_resources = None def external_targets(self): '''return: a set(BlockCellNames) with cells not included''' return self._filter(lambda x, y: x != y) def internal_targets(self): '''return the internal targets as set(BlockCellNames) of dependencies of resources with "names" (NOT EXTERNAL) ''' return self._filter(lambda x, y: x == y) def _filter(self, compare): result = set() for resource in self.simple_resources: cell = resource.cell for target in cell.dependencies.targets: if compare(target.block_name, self.block_name): result.add(target) return result def unresolved(self): result = set() for cell, _ in self.simple_resources: result.update(cell.dependencies.unresolved) return result def translate_virtuals(self, block_cell_names): '''Handles pointing to virtual targets instead contained ones''' result = set() for block_cell_name in block_cell_names: assert block_cell_name.block_name == self.block_name, "%s != %s" % (block_cell_name, self.block_name) cell = self._resources[block_cell_name.cell_name].cell try: target = cell.container or cell.name except AttributeError: target = cell.name result.add(target) return result def deps(self, files=None): deps = DependencySet() for name, (cell, _) in self._resources.iteritems(): if files is None or self.block_name + name in files: if isinstance(cell, SimpleCell): deps.update(cell.dependencies) return deps<|fim▁end|>
<|file_name|>launcher.py<|end_file_name|><|fim▁begin|># # Copyright (C) 2014 # Sean Poyser ([email protected]) # # This Program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # This Program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License<|fim▁hole|># along with XBMC; see the file COPYING. If not, write to # the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. # http://www.gnu.org/copyleft/gpl.html # import xbmc xbmc.executebuiltin('RunAddon(plugin.program.super.favourites)') xbmc.executebuiltin('Dialog.Close(all, true)')<|fim▁end|>
<|file_name|>rpath.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use driver::session; use metadata::cstore; use metadata::filesearch; use std::hashmap::HashSet; use std::os; use std::uint; use std::util; use std::vec; fn not_win32(os: session::os) -> bool { os != session::os_win32 } pub fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> ~[~str] { let os = sess.targ_cfg.os; // No rpath on windows if os == session::os_win32 { return ~[]; } debug!("preparing the RPATH!"); let sysroot = sess.filesearch.sysroot(); let output = out_filename; let libs = cstore::get_used_crate_files(sess.cstore); // We don't currently rpath extern libraries, but we know // where rustrt is and we know every rust program needs it let libs = vec::append_one(libs, get_sysroot_absolute_rt_lib(sess)); let rpaths = get_rpaths(os, sysroot, output, libs, sess.opts.target_triple); rpaths_to_flags(rpaths) } fn get_sysroot_absolute_rt_lib(sess: session::Session) -> Path { let r = filesearch::relative_target_lib_path(sess.opts.target_triple); sess.filesearch.sysroot().push_rel(&r).push(os::dll_filename("rustrt")) } pub fn rpaths_to_flags(rpaths: &[Path]) -> ~[~str] { rpaths.iter().transform(|rpath| fmt!("-Wl,-rpath,%s",rpath.to_str())).collect() } fn get_rpaths(os: session::os, sysroot: &Path, output: &Path, libs: &[Path], target_triple: &str) -> ~[Path] { debug!("sysroot: %s", sysroot.to_str()); debug!("output: %s", output.to_str()); debug!("libs:"); for libs.iter().advance |libpath| { debug!(" %s", libpath.to_str()); } debug!("target_triple: %s", target_triple); // Use relative paths to the libraries. Binaries can be moved // as long as they maintain the relative relationship to the // crates they depend on. let rel_rpaths = get_rpaths_relative_to_output(os, output, libs); // Make backup absolute paths to the libraries. Binaries can // be moved as long as the crates they link against don't move. let abs_rpaths = get_absolute_rpaths(libs); // And a final backup rpath to the global library location. let fallback_rpaths = ~[get_install_prefix_rpath(target_triple)]; fn log_rpaths(desc: &str, rpaths: &[Path]) { debug!("%s rpaths:", desc); for rpaths.iter().advance |rpath| { debug!(" %s", rpath.to_str()); } } log_rpaths("relative", rel_rpaths); log_rpaths("absolute", abs_rpaths); log_rpaths("fallback", fallback_rpaths); let mut rpaths = rel_rpaths; rpaths.push_all(abs_rpaths); rpaths.push_all(fallback_rpaths); // Remove duplicates let rpaths = minimize_rpaths(rpaths); return rpaths; } fn get_rpaths_relative_to_output(os: session::os, output: &Path, libs: &[Path]) -> ~[Path] { libs.iter().transform(|a| get_rpath_relative_to_output(os, output, a)).collect() } pub fn get_rpath_relative_to_output(os: session::os, output: &Path, lib: &Path) -> Path { use std::os; assert!(not_win32(os)); // Mac doesn't appear to support $ORIGIN let prefix = match os { session::os_android | session::os_linux | session::os_freebsd => "$ORIGIN", session::os_macos => "@executable_path", session::os_win32 => util::unreachable() }; Path(prefix).push_rel(&get_relative_to(&os::make_absolute(output), &os::make_absolute(lib))) } // Find the relative path from one file to another pub fn get_relative_to(abs1: &Path, abs2: &Path) -> Path { assert!(abs1.is_absolute); assert!(abs2.is_absolute); let abs1 = abs1.normalize(); let abs2 = abs2.normalize(); debug!("finding relative path from %s to %s", abs1.to_str(), abs2.to_str()); let split1: &[~str] = abs1.components; let split2: &[~str] = abs2.components; let len1 = split1.len(); let len2 = split2.len(); assert!(len1 > 0); assert!(len2 > 0); let max_common_path = uint::min(len1, len2) - 1; let mut start_idx = 0; while start_idx < max_common_path && split1[start_idx] == split2[start_idx] { start_idx += 1; } let mut path = ~[]; for uint::range(start_idx, len1 - 1) |_i| { path.push(~".."); }; path.push_all(split2.slice(start_idx, len2 - 1)); return if !path.is_empty() { Path("").push_many(path) } else { Path(".") } } fn get_absolute_rpaths(libs: &[Path]) -> ~[Path] { libs.iter().transform(|a| get_absolute_rpath(a)).collect() } pub fn get_absolute_rpath(lib: &Path) -> Path { os::make_absolute(lib).dir_path() } pub fn get_install_prefix_rpath(target_triple: &str) -> Path { let install_prefix = env!("CFG_PREFIX"); if install_prefix == "" { fail!("rustc compiled without CFG_PREFIX environment variable"); } let tlib = filesearch::relative_target_lib_path(target_triple); os::make_absolute(&Path(install_prefix).push_rel(&tlib)) } pub fn minimize_rpaths(rpaths: &[Path]) -> ~[Path] { let mut set = HashSet::new(); let mut minimized = ~[]; for rpaths.iter().advance |rpath| { if set.insert(rpath.to_str()) { minimized.push(copy *rpath); } } minimized } #[cfg(unix, test)] mod test { use std::os; // FIXME(#2119): the outer attribute should be #[cfg(unix, test)], then // these redundant #[cfg(test)] blocks can be removed #[cfg(test)] #[cfg(test)] use back::rpath::{get_absolute_rpath, get_install_prefix_rpath}; use back::rpath::{get_relative_to, get_rpath_relative_to_output}; use back::rpath::{minimize_rpaths, rpaths_to_flags}; use driver::session; #[test] fn test_rpaths_to_flags() { let flags = rpaths_to_flags([Path("path1"), Path("path2")]); assert_eq!(flags, ~[~"-Wl,-rpath,path1", ~"-Wl,-rpath,path2"]); } #[test] fn test_prefix_rpath() { let res = get_install_prefix_rpath("triple"); let d = Path(env!("CFG_PREFIX")) .push_rel(&Path("lib/rustc/triple/lib")); debug!("test_prefix_path: %s vs. %s", res.to_str(), d.to_str()); assert!(res.to_str().ends_with(d.to_str())); } #[test] fn test_prefix_rpath_abs() { let res = get_install_prefix_rpath("triple"); assert!(res.is_absolute); } #[test] fn test_minimize1() { let res = minimize_rpaths([Path("rpath1"), Path("rpath2"), Path("rpath1")]); assert_eq!(res, ~[Path("rpath1"), Path("rpath2")]); } #[test] fn test_minimize2() { let res = minimize_rpaths([Path("1a"), Path("2"), Path("2"), Path("1a"), Path("4a"),Path("1a"), Path("2"), Path("3"), Path("4a"), Path("3")]); assert_eq!(res, ~[Path("1a"), Path("2"), Path("4a"), Path("3")]); } #[test] fn test_relative_to1() { let p1 = Path("/usr/bin/rustc"); let p2 = Path("/usr/lib/mylib"); let res = get_relative_to(&p1, &p2); assert_eq!(res, Path("../lib")); } #[test] fn test_relative_to2() { let p1 = Path("/usr/bin/rustc"); let p2 = Path("/usr/bin/../lib/mylib"); let res = get_relative_to(&p1, &p2); assert_eq!(res, Path("../lib")); } #[test] fn test_relative_to3() { let p1 = Path("/usr/bin/whatever/rustc"); let p2 = Path("/usr/lib/whatever/mylib"); let res = get_relative_to(&p1, &p2); assert_eq!(res, Path("../../lib/whatever")); } #[test] fn test_relative_to4() { let p1 = Path("/usr/bin/whatever/../rustc"); let p2 = Path("/usr/lib/whatever/mylib"); let res = get_relative_to(&p1, &p2); assert_eq!(res, Path("../lib/whatever")); } #[test] fn test_relative_to5() { let p1 = Path("/usr/bin/whatever/../rustc"); let p2 = Path("/usr/lib/whatever/../mylib"); let res = get_relative_to(&p1, &p2); assert_eq!(res, Path("../lib")); } #[test] fn test_relative_to6() { let p1 = Path("/1"); let p2 = Path("/2/3"); let res = get_relative_to(&p1, &p2); assert_eq!(res, Path("2")); } #[test] fn test_relative_to7() { let p1 = Path("/1/2"); let p2 = Path("/3"); let res = get_relative_to(&p1, &p2); assert_eq!(res, Path("..")); } #[test] fn test_relative_to8() { let p1 = Path("/home/brian/Dev/rust/build/").push_rel( &Path("stage2/lib/rustc/i686-unknown-linux-gnu/lib/librustc.so")); let p2 = Path("/home/brian/Dev/rust/build/stage2/bin/..").push_rel( &Path("lib/rustc/i686-unknown-linux-gnu/lib/libstd.so")); let res = get_relative_to(&p1, &p2); debug!("test_relative_tu8: %s vs. %s", res.to_str(), Path(".").to_str()); assert_eq!(res, Path(".")); } #[test] #[cfg(target_os = "linux")] #[cfg(target_os = "andorid")] fn test_rpath_relative() { let o = session::os_linux; let res = get_rpath_relative_to_output(o, &Path("bin/rustc"), &Path("lib/libstd.so")); assert_eq!(res.to_str(), ~"$ORIGIN/../lib"); } #[test] #[cfg(target_os = "freebsd")] fn test_rpath_relative() { let o = session::os_freebsd;<|fim▁hole|> assert_eq!(res.to_str(), ~"$ORIGIN/../lib"); } #[test] #[cfg(target_os = "macos")] fn test_rpath_relative() { // this is why refinements would be nice let o = session::os_macos; let res = get_rpath_relative_to_output(o, &Path("bin/rustc"), &Path("lib/libstd.so")); assert_eq!(res.to_str(), ~"@executable_path/../lib"); } #[test] fn test_get_absolute_rpath() { let res = get_absolute_rpath(&Path("lib/libstd.so")); debug!("test_get_absolute_rpath: %s vs. %s", res.to_str(), os::make_absolute(&Path("lib")).to_str()); assert_eq!(res, os::make_absolute(&Path("lib"))); } }<|fim▁end|>
let res = get_rpath_relative_to_output(o, &Path("bin/rustc"), &Path("lib/libstd.so"));
<|file_name|>runtests.py<|end_file_name|><|fim▁begin|>""" A standalone test runner script, configuring the minimum settings required for tests to execute. Re-use at your own risk: many Django applications will require different settings and/or templates to run their tests. """ import os import sys # Make sure the app is (at least temporarily) on the import path. APP_DIR = os.path.abspath(os.path.dirname(__file__)) sys.path.insert(0, APP_DIR) <|fim▁hole|> 'BASE_DIR': APP_DIR, 'INSTALLED_APPS': ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'registration', ), 'ROOT_URLCONF': 'registration.backends.default.urls', 'DATABASES': { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(APP_DIR, 'db.sqlite3'), }, }, 'MIDDLEWARE_CLASSES': ( 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ), 'SITE_ID': 1, 'TEMPLATE_DIRS': ( os.path.join(APP_DIR, 'tests/templates'), ), } def run_tests(): # Making Django run this way is a two-step process. First, call # settings.configure() to give Django settings to work with: from django.conf import settings settings.configure(**SETTINGS_DICT) # Then, call django.setup() to initialize the application cache # and other bits: import django if hasattr(django, 'setup'): django.setup() # Now we instantiate a test runner... from django.test.utils import get_runner TestRunner = get_runner(settings) # And then we run tests and return the results. test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['registration.tests']) sys.exit(bool(failures))<|fim▁end|>
# Minimum settings required for the app's tests. SETTINGS_DICT = {
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from nose.tools import (<|fim▁hole|> raises, ) from py3oauth2.utils import ( normalize_netloc, normalize_path, normalize_query, normalize_url, ) def test_normalize_url(): eq_(normalize_url('http://a/b/c/%7Bfoo%7D'), normalize_url('hTTP://a/./b/../b/%63/%7bfoo%7d')) @raises(ValueError) def test_normalize_url_unknown_scheme(): normalize_url('example://example.com/') @raises(ValueError) def test_normalize_url_fragment(): normalize_url('http://example.com/#foo') @raises(ValueError) def test_normalize_url_invalid_port(): normalize_url('https://example.com:1bb/#foo') def test_normalize_netloc(): eq_(normalize_netloc('eXamPLe.com', 80), 'example.com') eq_(normalize_netloc('user:[email protected]', 80), 'user:[email protected]') eq_(normalize_netloc('user:@example.com', 80), '[email protected]') eq_(normalize_netloc(':[email protected]', 80), ':[email protected]') eq_(normalize_netloc('example.com:443', 80), 'example.com:443') eq_(normalize_netloc('example.com:80', 80), 'example.com') eq_(normalize_netloc('example.com:', 80), 'example.com') def test_normalize_query(): eq_(normalize_query(''), '') eq_(normalize_query('b=c&a=b'), 'a=b&b=c') eq_(normalize_query('b&a=b'), 'a=b') eq_(normalize_query('b=&a=b'), 'a=b') eq_(normalize_query('b=%e3%81%84&a=%e3%81%82'), 'a=%E3%81%82&b=%E3%81%84') def test_normalize_path(): eq_(normalize_path(''), '/') eq_(normalize_path('//'), '/') eq_(normalize_path('/a//b'), '/a/b/') eq_(normalize_path('/a/./b'), '/a/b/') eq_(normalize_path('/a/foo/../b'), '/a/b/') eq_(normalize_path('/%e3%81%82%a%e3%81%84'), '/%E3%81%82%a%E3%81%84/') eq_(normalize_path('/%e3%81%82a%e3%81%84'), '/%E3%81%82a%E3%81%84/')<|fim▁end|>
eq_,
<|file_name|>myapp.py<|end_file_name|><|fim▁begin|># imports ## core <|fim▁hole|>import pprint import sys import StringIO ## 3rd party import cherrypy import requests ## local def full_path(*extra): return os.path.join(os.path.dirname(__file__), *extra) sys.path.insert(0, full_path()) import db logging.basicConfig() sorry = 'This is only for US Citizens. Sorry and thank you for your time.' class Root(object): @cherrypy.expose def index(self, tag): redirect_url = db.urls[tag] ip = cherrypy.request.headers['Remote-Addr'] request_url = 'http://ipinfo.io/{0}/country'.format(ip) r = requests.get(request_url) country = r.text.strip() if country == 'US': raise cherrypy.HTTPRedirect(redirect_url) else: return sorry<|fim▁end|>
import importlib import logging import os
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># Copyright 2012, Julius Seporaitis # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import unittest import os import os.path import urllib2 import time sys.path.append('.') import s3iam from urlparse import urlparse class S3GrabberTest(unittest.TestCase): def test_example_sign(self): """Test with example data""" req = urllib2.Request("https://johnsmith.s3.amazonaws.com/photos/puppy.jpg") grabber = s3iam.S3Grabber("http://johnsmith.s3.amazonaws.com/", iamrole="s3access") grabber.access_key = "AKIAIOSFODNN7EXAMPLE" grabber.secret_key = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY" grabber.token = None request = grabber._request("photos/puppy.jpg") signature = grabber.sign(request, timeval=time.mktime(time.struct_time( tm_year=2007, tm_mon=3, tm_mday=27, tm_hour=19, tm_min=36, tm_sec=42))) self.assertEqual(signature, "bWq2s1WEIj+Ydj0vQ697zp+IXMU=")<|fim▁hole|><|fim▁end|>
if __name__ == '__main__': unittest.main()
<|file_name|>generic.py<|end_file_name|><|fim▁begin|>from aioredis.util import wait_convert, wait_ok, _NOTSET, _ScanIter class GenericCommandsMixin: """Generic commands mixin. For commands details see: http://redis.io/commands/#generic """ def delete(self, key, *keys): """Delete a key.""" fut = self.execute(b'DEL', key, *keys) return wait_convert(fut, int) def dump(self, key): """Dump a key.""" return self.execute(b'DUMP', key) def exists(self, key, *keys): """Check if key(s) exists. .. versionchanged:: v0.2.9 Accept multiple keys; **return** type **changed** from bool to int. """ return self.execute(b'EXISTS', key, *keys) def expire(self, key, timeout): """Set a timeout on key. if timeout is float it will be multiplied by 1000 coerced to int and passed to `pexpire` method. Otherwise raises TypeError if timeout argument is not int. """ if isinstance(timeout, float): return self.pexpire(key, int(timeout * 1000)) if not isinstance(timeout, int): raise TypeError( "timeout argument must be int, not {!r}".format(timeout)) fut = self.execute(b'EXPIRE', key, timeout) return wait_convert(fut, bool) def expireat(self, key, timestamp): """Set expire timestamp on a key. if timeout is float it will be multiplied by 1000 coerced to int and passed to `pexpireat` method. Otherwise raises TypeError if timestamp argument is not int. """ if isinstance(timestamp, float): return self.pexpireat(key, int(timestamp * 1000)) if not isinstance(timestamp, int): raise TypeError("timestamp argument must be int, not {!r}" .format(timestamp)) fut = self.execute(b'EXPIREAT', key, timestamp) return wait_convert(fut, bool) def keys(self, pattern, *, encoding=_NOTSET): """Returns all keys matching pattern.""" return self.execute(b'KEYS', pattern, encoding=encoding) def migrate(self, host, port, key, dest_db, timeout, *, copy=False, replace=False): """Atomically transfer a key from a Redis instance to another one.""" if not isinstance(host, str): raise TypeError("host argument must be str") if not isinstance(timeout, int): raise TypeError("timeout argument must be int") if not isinstance(dest_db, int): raise TypeError("dest_db argument must be int") if not host: raise ValueError("Got empty host") if dest_db < 0: raise ValueError("dest_db must be greater equal 0") if timeout < 0: raise ValueError("timeout must be greater equal 0") flags = [] if copy: flags.append(b'COPY') if replace: flags.append(b'REPLACE') fut = self.execute(b'MIGRATE', host, port, key, dest_db, timeout, *flags) return wait_ok(fut) def migrate_keys(self, host, port, keys, dest_db, timeout, *, copy=False, replace=False): """Atomically transfer keys from one Redis instance to another one. Keys argument must be list/tuple of keys to migrate. """ if not isinstance(host, str): raise TypeError("host argument must be str") if not isinstance(timeout, int): raise TypeError("timeout argument must be int") if not isinstance(dest_db, int): raise TypeError("dest_db argument must be int") if not isinstance(keys, (list, tuple)): raise TypeError("keys argument must be list or tuple") if not host: raise ValueError("Got empty host") if dest_db < 0: raise ValueError("dest_db must be greater equal 0") if timeout < 0: raise ValueError("timeout must be greater equal 0") if not keys: raise ValueError("keys must not be empty") flags = [] if copy: flags.append(b'COPY') if replace: flags.append(b'REPLACE') flags.append(b'KEYS') flags.extend(keys) fut = self.execute(b'MIGRATE', host, port,<|fim▁hole|> return wait_ok(fut) def move(self, key, db): """Move key from currently selected database to specified destination. :raises TypeError: if db is not int :raises ValueError: if db is less than 0 """ if not isinstance(db, int): raise TypeError("db argument must be int, not {!r}".format(db)) if db < 0: raise ValueError("db argument must be not less than 0, {!r}" .format(db)) fut = self.execute(b'MOVE', key, db) return wait_convert(fut, bool) def object_refcount(self, key): """Returns the number of references of the value associated with the specified key (OBJECT REFCOUNT). """ return self.execute(b'OBJECT', b'REFCOUNT', key) def object_encoding(self, key): """Returns the kind of internal representation used in order to store the value associated with a key (OBJECT ENCODING). """ # TODO: set default encoding to 'utf-8' return self.execute(b'OBJECT', b'ENCODING', key) def object_idletime(self, key): """Returns the number of seconds since the object is not requested by read or write operations (OBJECT IDLETIME). """ return self.execute(b'OBJECT', b'IDLETIME', key) def persist(self, key): """Remove the existing timeout on key.""" fut = self.execute(b'PERSIST', key) return wait_convert(fut, bool) def pexpire(self, key, timeout): """Set a milliseconds timeout on key. :raises TypeError: if timeout is not int """ if not isinstance(timeout, int): raise TypeError("timeout argument must be int, not {!r}" .format(timeout)) fut = self.execute(b'PEXPIRE', key, timeout) return wait_convert(fut, bool) def pexpireat(self, key, timestamp): """Set expire timestamp on key, timestamp in milliseconds. :raises TypeError: if timeout is not int """ if not isinstance(timestamp, int): raise TypeError("timestamp argument must be int, not {!r}" .format(timestamp)) fut = self.execute(b'PEXPIREAT', key, timestamp) return wait_convert(fut, bool) def pttl(self, key): """Returns time-to-live for a key, in milliseconds. Special return values (starting with Redis 2.8): * command returns -2 if the key does not exist. * command returns -1 if the key exists but has no associated expire. """ # TODO: maybe convert negative values to: # -2 to None - no key # -1 to False - no expire return self.execute(b'PTTL', key) def randomkey(self, *, encoding=_NOTSET): """Return a random key from the currently selected database.""" return self.execute(b'RANDOMKEY', encoding=encoding) def rename(self, key, newkey): """Renames key to newkey. :raises ValueError: if key == newkey """ if key == newkey: raise ValueError("key and newkey are the same") fut = self.execute(b'RENAME', key, newkey) return wait_ok(fut) def renamenx(self, key, newkey): """Renames key to newkey only if newkey does not exist. :raises ValueError: if key == newkey """ if key == newkey: raise ValueError("key and newkey are the same") fut = self.execute(b'RENAMENX', key, newkey) return wait_convert(fut, bool) def restore(self, key, ttl, value): """Creates a key associated with a value that is obtained via DUMP.""" return self.execute(b'RESTORE', key, ttl, value) def scan(self, cursor=0, match=None, count=None): """Incrementally iterate the keys space. Usage example: >>> match = 'something*' >>> cur = b'0' >>> while cur: ... cur, keys = await redis.scan(cur, match=match) ... for key in keys: ... print('Matched:', key) """ args = [] if match is not None: args += [b'MATCH', match] if count is not None: args += [b'COUNT', count] fut = self.execute(b'SCAN', cursor, *args) return wait_convert(fut, lambda o: (int(o[0]), o[1])) def iscan(self, *, match=None, count=None): """Incrementally iterate the keys space using async for. Usage example: >>> async for key in redis.iscan(match='something*'): ... print('Matched:', key) """ return _ScanIter(lambda cur: self.scan(cur, match=match, count=count)) def sort(self, key, *get_patterns, by=None, offset=None, count=None, asc=None, alpha=False, store=None): """Sort the elements in a list, set or sorted set.""" args = [] if by is not None: args += [b'BY', by] if offset is not None and count is not None: args += [b'LIMIT', offset, count] if get_patterns: args += sum(([b'GET', pattern] for pattern in get_patterns), []) if asc is not None: args += [asc is True and b'ASC' or b'DESC'] if alpha: args += [b'ALPHA'] if store is not None: args += [b'STORE', store] return self.execute(b'SORT', key, *args) def touch(self, key, *keys): """Alters the last access time of a key(s). Returns the number of keys that were touched. """ return self.execute(b'TOUCH', key, *keys) def ttl(self, key): """Returns time-to-live for a key, in seconds. Special return values (starting with Redis 2.8): * command returns -2 if the key does not exist. * command returns -1 if the key exists but has no associated expire. """ # TODO: maybe convert negative values to: # -2 to None - no key # -1 to False - no expire return self.execute(b'TTL', key) def type(self, key): """Returns the string representation of the value's type stored at key. """ # NOTE: for non-existent keys TYPE returns b'none' return self.execute(b'TYPE', key) def unlink(self, key, *keys): """Delete a key asynchronously in another thread.""" return wait_convert(self.execute(b'UNLINK', key, *keys), int) def wait(self, numslaves, timeout): """Wait for the synchronous replication of all the write commands sent in the context of the current connection. """ return self.execute(b'WAIT', numslaves, timeout)<|fim▁end|>
"", dest_db, timeout, *flags)
<|file_name|>run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Wrapper for `python -m` to make running tools simpler. A tool is defined as a python module with a __main__.py file. This latter file is run by the present script. In particular, allows gclient to change directories when running hooks for infra. """ assert __name__ == '__main__' <|fim▁hole|>import os import sys RUNPY_PATH = os.path.abspath(__file__) ROOT_PATH = os.path.dirname(RUNPY_PATH) ENV_PATH = os.path.join(ROOT_PATH, 'ENV') # Do not want to mess with sys.path, load the module directly. run_helper = imp.load_source( 'run_helper', os.path.join(ROOT_PATH, 'bootstrap', 'run_helper.py')) sys.exit(run_helper.run_py_main(sys.argv[1:], RUNPY_PATH, ENV_PATH, 'infra'))<|fim▁end|>
import imp
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>mock_sdb = base_decorator(sdb_backends)<|fim▁end|>
"""sdb module initialization; sets value for base decorator.""" from .models import sdb_backends from ..core.models import base_decorator
<|file_name|>fail-macro-fmt.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern:failed at 'test-fail-fmt 42 rust' fn main() {<|fim▁hole|><|fim▁end|>
fail!("test-fail-fmt {} {}", 42i, "rust"); }
<|file_name|>StatefulComponent.stories.tsx<|end_file_name|><|fim▁begin|>import { BlueRain, BlueRainConsumer, withBlueRain } from '../../../src'; import React from 'react'; import storiesOf from '@blueeast/bluerain-storybook-addon'; storiesOf('StatefulComponent', module) .add('StatefulComponent children function prop', () => { const Story = withBlueRain((props: { bluerain: BlueRain }) => { const BR = props.bluerain; return ( <BR.Components.StatefulComponent data="foo"> {() => { return <BR.Components.Text>Render prop pattern</BR.Components.Text>; }} </BR.Components.StatefulComponent> ); }); return <Story />; }) .add('StatefulComponent children prop', () => { const Story = withBlueRain((props: { bluerain: BlueRain }) => { const BR = props.bluerain; return ( <BR.Components.StatefulComponent data="foo"> <BR.Components.Text>This this a child!</BR.Components.Text> </BR.Components.StatefulComponent> ); }); return <Story />; }) .add('StatefulComponent throw error', () => { const Story = withBlueRain((props: { bluerain: BlueRain }) => { const BR = props.bluerain; return ( <BR.Components.StatefulComponent data="foo"> {() => { throw new Error('Boom!'); }} </BR.Components.StatefulComponent> ); }); return <Story />; }) .add('StatefulComponent Component prop', () => { const Story = withBlueRain((props: { bluerain: BlueRain }) => { const BR = props.bluerain; const Comp = () => <BR.Components.Text>Hello</BR.Components.Text>; return ( <BR.Components.StatefulComponent data="foo" component={Comp} /> ); }); return <Story />; }) .add('Only loading Component', () => { const Story = withBlueRain((props: { bluerain: BlueRain }) => { const BR = props.bluerain; const LoadingState = BR.Components.get('LoadingState'); return <LoadingState />; }); return <Story />; }) .add('null data', () => ( <BlueRainConsumer> {(BR: BlueRain) => ( <BR.Components.StatefulComponent data={null}> <BR.Components.Text>This should never be displayed</BR.Components.Text> </BR.Components.StatefulComponent><|fim▁hole|> </BlueRainConsumer> )) .add('undefined data', () => ( <BlueRainConsumer> {(BR: BlueRain) => ( <BR.Components.StatefulComponent data={undefined}> <BR.Components.Text>This should never be displayed</BR.Components.Text> </BR.Components.StatefulComponent> )} </BlueRainConsumer> )) .add('empty array data', () => ( <BlueRainConsumer> {(BR: BlueRain) => ( <BR.Components.StatefulComponent data={[]}> <BR.Components.Text>This should never be displayed</BR.Components.Text> </BR.Components.StatefulComponent> )} </BlueRainConsumer> )) .add('Only EmptyState Component', () => { const Story = withBlueRain((props: { bluerain: BlueRain }) => { const BR = props.bluerain; const EmptyState = BR.Components.get('EmptyState'); return <EmptyState />; }); return <Story />; }) .add('Only ErrorState Component', () => { const Story = withBlueRain((props: { bluerain: BlueRain }) => { const BR = props.bluerain; const ErrorState = BR.Components.get('ErrorState'); return <ErrorState />; }); return <Story />; }); // storiesOf('ComponentState', module).add('Simple', () => <div>hello </div>);<|fim▁end|>
)}
<|file_name|>showdown.test.js<|end_file_name|><|fim▁begin|>/*global Showdown*/ describe('$showdown', function () { 'use strict'; beforeEach(module('pl.itcrowd.services')); it('should be possible to inject initialized $showdown converter', inject(function ($showdown)<|fim▁hole|> { expect($showdown).not.toBeUndefined(); })); it('should be instance of $showdown.converter', inject(function ($showdown) { expect($showdown instanceof Showdown.converter).toBeTruthy(); })); });<|fim▁end|>
<|file_name|>addressbook_send.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors // Licensed under the MIT License: // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. extern crate capnp; extern crate core; pub mod addressbook_capnp { include!(concat!(env!("OUT_DIR"), "/addressbook_capnp.rs")); } use capnp::message::{Builder, HeapAllocator, TypedReader}; use std::sync::mpsc; use std::thread; pub mod addressbook { use addressbook_capnp::{address_book, person}; use capnp::message::{Builder, HeapAllocator, TypedReader}; pub fn build_address_book() -> TypedReader<Builder<HeapAllocator>, address_book::Owned> { let mut message = Builder::new_default(); { let address_book = message.init_root::<address_book::Builder>(); let mut people = address_book.init_people(2); { let mut alice = people.reborrow().get(0); alice.set_id(123); alice.set_name("Alice"); alice.set_email("[email protected]"); { let mut alice_phones = alice.reborrow().init_phones(1); alice_phones.reborrow().get(0).set_number("555-1212"); alice_phones.reborrow().get(0).set_type(person::phone_number::Type::Mobile); } alice.get_employment().set_school("MIT"); } { let mut bob = people.get(1); bob.set_id(456); bob.set_name("Bob"); bob.set_email("[email protected]"); { let mut bob_phones = bob.reborrow().init_phones(2); bob_phones.reborrow().get(0).set_number("555-4567"); bob_phones.reborrow().get(0).set_type(person::phone_number::Type::Home); bob_phones.reborrow().get(1).set_number("555-7654"); bob_phones.reborrow().get(1).set_type(person::phone_number::Type::Work); } bob.get_employment().set_unemployed(()); } } // There are two ways to get a TypedReader from our `message`: // // Option 1: Go through the full process manually // message.into_reader().into_typed() // // Option 2: Use the "Into" trait defined on the builder // message.into() // // Option 3: Use the "From" trait defined on the builder TypedReader::from(message)<|fim▁hole|>pub fn main() { let book = addressbook::build_address_book(); let (tx_book, rx_book) = mpsc::channel::<TypedReader<Builder<HeapAllocator>, addressbook_capnp::address_book::Owned>>(); let (tx_id, rx_id) = mpsc::channel::<u32>(); thread::spawn(move || { let addressbook_reader = rx_book.recv().unwrap(); let addressbook = addressbook_reader.get().unwrap(); let first_person = addressbook.get_people().unwrap().get(0); let first_id = first_person.get_id(); tx_id.send(first_id) }); tx_book.send(book).unwrap(); let first_id = rx_id.recv().unwrap(); assert_eq!(first_id, 123); }<|fim▁end|>
} }
<|file_name|>InstanceCountNode.java<|end_file_name|><|fim▁begin|>/* * * Autopsy Forensic Browser * * Copyright 2018 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.commonfilesearch; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.DisplayableItemNode; import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor; import org.sleuthkit.autopsy.datamodel.NodeProperty; /** * Node used to indicate the number of matches found with the MD5 children of * this Node. */ final public class InstanceCountNode extends DisplayableItemNode { private static final Logger logger = Logger.getLogger(InstanceCountNode.class.getName()); final private int instanceCount; final private CommonAttributeValueList attributeValues; /** * Create a node with the given number of instances, and the given selection * of metadata. * * @param instanceCount * @param attributeValues */ @NbBundle.Messages({ "InstanceCountNode.displayName=Files with %s instances (%s)" }) public InstanceCountNode(int instanceCount, CommonAttributeValueList attributeValues) { super(Children.create(new CommonAttributeValueNodeFactory(attributeValues.getMetadataList()), true)); this.instanceCount = instanceCount; this.attributeValues = attributeValues; this.setDisplayName(String.format(Bundle.InstanceCountNode_displayName(), Integer.toString(instanceCount), attributeValues.getCommonAttributeListSize())); this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/fileset-icon-16.png"); //NON-NLS } /** * Number of matches found for each of the MD5 children. * * @return int match count */ int getInstanceCount() { return this.instanceCount; } /** * Refresh the node, by dynamically loading in the children when called, and * calling the CommonAttributeValueNodeFactory to generate nodes for the * children in attributeValues. */ public void refresh() { attributeValues.displayDelayedMetadata(); setChildren(Children.create(new CommonAttributeValueNodeFactory(attributeValues.getMetadataList()), true)); } /** * Get a list of metadata for the MD5s which are children of this object. * * @return List<Md5Metadata> */ CommonAttributeValueList getAttributeValues() { return this.attributeValues; } @Override<|fim▁hole|> } @Override public boolean isLeafTypeNode() { return false; } @Override public String getItemType() { return getClass().getName(); } @NbBundle.Messages({"InstanceCountNode.createSheet.noDescription= "}) @Override protected Sheet createSheet() { Sheet sheet = new Sheet(); Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES); if (sheetSet == null) { sheetSet = Sheet.createPropertiesSet(); sheet.put(sheetSet); } final String NO_DESCR = Bundle.InstanceCountNode_createSheet_noDescription(); sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_filesColLbl(), Bundle.CommonFilesSearchResultsViewerTable_filesColLbl(), NO_DESCR, "")); sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_instancesColLbl(), Bundle.CommonFilesSearchResultsViewerTable_instancesColLbl(), NO_DESCR, this.getInstanceCount())); sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_pathColLbl(), Bundle.CommonFilesSearchResultsViewerTable_pathColLbl(), NO_DESCR, "")); sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_caseColLbl1(), Bundle.CommonFilesSearchResultsViewerTable_caseColLbl1(), NO_DESCR, "")); sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_dataSourceColLbl(), Bundle.CommonFilesSearchResultsViewerTable_dataSourceColLbl(), NO_DESCR, "")); sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_hashsetHitsColLbl(), Bundle.CommonFilesSearchResultsViewerTable_hashsetHitsColLbl(), NO_DESCR, "")); sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_mimeTypeColLbl(), Bundle.CommonFilesSearchResultsViewerTable_mimeTypeColLbl(), NO_DESCR, "")); sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_tagsColLbl1(), Bundle.CommonFilesSearchResultsViewerTable_tagsColLbl1(), NO_DESCR, "")); return sheet; } /** * ChildFactory which builds CommonFileParentNodes from the * CommonAttributeValue metadata models. */ static class CommonAttributeValueNodeFactory extends ChildFactory<String> { /** * List of models, each of which is a parent node matching a single md5, * containing children FileNodes. */ // maps sting version of value to value Object (??) private final Map<String, CommonAttributeValue> metadata; CommonAttributeValueNodeFactory(List<CommonAttributeValue> attributeValues) { this.metadata = new HashMap<>(); Iterator<CommonAttributeValue> iterator = attributeValues.iterator(); while (iterator.hasNext()) { CommonAttributeValue attributeValue = iterator.next(); this.metadata.put(attributeValue.getValue(), attributeValue); } } @Override protected boolean createKeys(List<String> list) { // @@@ We should just use CommonAttributeValue as the key... list.addAll(this.metadata.keySet()); return true; } @Override protected Node createNodeForKey(String attributeValue) { CommonAttributeValue md5Metadata = this.metadata.get(attributeValue); return new CommonAttributeValueNode(md5Metadata); } } }<|fim▁end|>
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) { return visitor.visit(this);
<|file_name|>grade_school.py<|end_file_name|><|fim▁begin|>class School: def __init__(self): pass def add_student(self, name, grade): pass def roster(self): pass<|fim▁hole|> pass def added(self): pass<|fim▁end|>
def grade(self, grade_number):
<|file_name|>BaseCompilerTest.java<|end_file_name|><|fim▁begin|>/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.services.backend.compiler; import java.io.File; import java.io.Serializable; import org.junit.AfterClass; import org.junit.BeforeClass; import org.kie.workbench.common.services.backend.compiler.impl.WorkspaceCompilationInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.uberfire.java.nio.file.Files; import org.uberfire.java.nio.file.Path; import org.uberfire.java.nio.file.Paths; public class BaseCompilerTest implements Serializable { protected static Path tmpRoot; protected String mavenRepoPath; protected static Logger logger = LoggerFactory.getLogger(BaseCompilerTest.class); protected String alternateSettingsAbsPath; protected WorkspaceCompilationInfo info; protected AFCompiler compiler; @BeforeClass public static void setup() {<|fim▁hole|> System.setProperty("org.uberfire.nio.git.ssh.enabled", "false"); } public BaseCompilerTest(String prjName) { try { mavenRepoPath = TestUtilMaven.getMavenRepo(); tmpRoot = Files.createTempDirectory("repo"); alternateSettingsAbsPath = new File("src/test/settings.xml").getAbsolutePath(); Path tmp = Files.createDirectories(Paths.get(tmpRoot.toString(), "dummy")); TestUtil.copyTree(Paths.get(prjName), tmp); info = new WorkspaceCompilationInfo(Paths.get(tmp.toUri())); } catch (Exception e) { logger.error(e.getMessage()); } } @AfterClass public static void tearDown() { System.clearProperty("org.uberfire.nio.git.daemon.enabled"); System.clearProperty("org.uberfire.nio.git.ssh.enabled"); if (tmpRoot != null) { TestUtil.rm(tmpRoot.toFile()); } } }<|fim▁end|>
System.setProperty("org.uberfire.nio.git.daemon.enabled", "false");
<|file_name|>train.py<|end_file_name|><|fim▁begin|>import json, sys, glob, datetime, math, random, pickle, gzip import numpy as np import matplotlib.pyplot as plt import matplotlib.cm as cm import chainer from chainer import computational_graph as c from chainer import cuda import chainer.functions as F from chainer import optimizers class AutoEncoder: def __init__(self, n_units=64): self.n_units = n_units def load(self, train_x): self.N = len(train_x[0]) self.x_train = train_x # self.model = chainer.FunctionSet(encode=F.Linear(self.N, self.n_units), decode=F.Linear(self.n_units, self.N)) print("Network: encode({}-{}), decode({}-{})".format(self.N, self.n_units, self.n_units, self.N)) # self.optimizer = optimizers.Adam() self.optimizer.setup(self.model.collect_parameters()) def forward(self, x_data, train=True): x = chainer.Variable(x_data) t = chainer.Variable(x_data) h = F.relu(self.model.encode(x))<|fim▁hole|> def calc(self, n_epoch): for epoch in range(n_epoch): self.optimizer.zero_grads() loss, y = self.forward(self.x_train) loss.backward() self.optimizer.update() # print('epoch = {}, train mean loss={}'.format(epoch, loss.data)) def getY(self, test_x): self.test_x = test_x loss, y = self.forward(x_test, train=False) return y.data def getEncodeW(self): return self.model.encode.W def load_mnist(): with open('mnist.pkl', 'rb') as mnist_pickle: mnist = pickle.load(mnist_pickle) return mnist def save_mnist(s,l=28,prefix=""): n = len(s) print("exporting {} images.".format(n)) plt.clf() plt.figure(1) for i,bi in enumerate(s): plt.subplot(math.floor(n/6),6,i+1) bi = bi.reshape((l,l)) plt.imshow(bi, cmap=cm.Greys_r) #Needs to be in row,col order plt.axis('off') plt.savefig("output/{}.png".format(prefix)) if __name__=="__main__": rf = AutoEncoder(n_units=64) mnist = load_mnist() mnist['data'] = mnist['data'].astype(np.float32) mnist['data'] /= 255 x_train = mnist['data'][0:2000] x_test = mnist['data'][2000:2036] rf.load(x_train) save_mnist(x_test,prefix="test") for k in [1,9,90,400,1000,4000]: rf.calc(k) # epoch yy = rf.getY(x_test) ww = rf.getEncodeW() save_mnist(yy,prefix="ae-{}".format(k)) print("\ndone.")<|fim▁end|>
y = F.relu(self.model.decode(h)) return F.mean_squared_error(y, t), y
<|file_name|>routerrule_db.py<|end_file_name|><|fim▁begin|># Copyright 2013, Big Switch Networks # All Rights Reserved. #<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sa from sqlalchemy import orm from neutron.db import model_base class RouterRule(model_base.BASEV2): id = sa.Column(sa.Integer, primary_key=True) source = sa.Column(sa.String(64), nullable=False) destination = sa.Column(sa.String(64), nullable=False) nexthops = orm.relationship('NextHop', cascade='all,delete') action = sa.Column(sa.String(10), nullable=False) router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id', ondelete="CASCADE")) class NextHop(model_base.BASEV2): rule_id = sa.Column(sa.Integer, sa.ForeignKey('routerrules.id', ondelete="CASCADE"), primary_key=True) nexthop = sa.Column(sa.String(64), nullable=False, primary_key=True)<|fim▁end|>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at #
<|file_name|>example_function_obj.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 <|fim▁hole|> def myFunc(): x = 1 y = 2 z = 'abc' # noqa return x + y print(myFunc.__name__) print(myFunc.__code__.co_varnames) print(myFunc.__code__.co_consts) print(myFunc.__code__.co_code) dis.disassemble(myFunc.__code__)<|fim▁end|>
import dis
<|file_name|>texture.rs<|end_file_name|><|fim▁begin|>// OpenAOE: An open source reimplementation of Age of Empires (1997) // Copyright (c) 2016 Kevin Fuller // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. use sdl2; pub struct Texture { pub width: u32, pub height: u32,<|fim▁hole|>} // TODO: Haven't quite figured out how to make a new method on Texture that is only exposed // to other members of the crate (but not outside of the crate) pub fn create_texture(sdl_texture: sdl2::render::Texture, width: u32, height: u32) -> Texture { Texture { width: width, height: height, texture: sdl_texture, } } // Separate so that it's not exported with the crate pub trait SdlTexture { fn sdl_texture<'a>(&'a self) -> &'a sdl2::render::Texture; } impl SdlTexture for Texture { fn sdl_texture<'a>(&'a self) -> &'a sdl2::render::Texture { &self.texture } }<|fim▁end|>
texture: sdl2::render::Texture,
<|file_name|>Test.java<|end_file_name|><|fim▁begin|>package com.yao.app.java.nio.pipe; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.Pipe; public class Test { <|fim▁hole|> Thread t2 = new Thread(new MessageInput(pipe)); t2.start(); Thread.sleep(1000); t1.start(); } catch (Exception e) { e.printStackTrace(); } } public static class MessageOutput implements Runnable { private Pipe pipe; public MessageOutput(Pipe pipe) { this.pipe = pipe; } @Override public void run() { try { String message = "hello world,libailugo"; ByteBuffer buf = ByteBuffer.wrap(message.getBytes()); Pipe.SinkChannel channel = pipe.sink(); int count = channel.write(buf); channel.close(); System.out.println("send message:" + message + ",length:" + count); } catch (IOException e) { e.printStackTrace(); } } } public static class MessageInput implements Runnable { private Pipe pipe; public MessageInput(Pipe pipe) { this.pipe = pipe; } @Override public void run() { try { Pipe.SourceChannel channel = pipe.source(); ByteBuffer buf = ByteBuffer.allocate(10); StringBuilder sb = new StringBuilder(); int count = channel.read(buf); while (count > 0) { // 此处会导致错误 // sb.append(new String(buf.array())); sb.append(new String(buf.array(), 0, count)); buf.clear(); count = channel.read(buf); } channel.close(); System.out.println("recieve message:" + sb.toString()); } catch (IOException e) { e.printStackTrace(); } } } }<|fim▁end|>
public static void main(String[] args) { try { Pipe pipe = Pipe.open(); Thread t1 = new Thread(new MessageOutput(pipe));
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import<|fim▁hole|> from clay import app import clay.config from flask import make_response, request, redirect, render_template, url_for from epubber.fimfic_epubgen import FimFictionEPubGenerator site_epub_classes = [ FimFictionEPubGenerator ] accesslog = clay.config.get_logger('epubber_access') ##################################################################### # Main App Views Section ##################################################################### @app.route('/', methods=['GET', 'POST']) def main_view(): story = request.args.get("story") or None if story: data = None for epgenclass in site_epub_classes: epgen = epgenclass() if epgen.handle_url(story): epub_file, data = epgen.gen_epub() accesslog.info('%(title)s - %(url)s' % epgen.metas) del epgen response = make_response(data) response.headers["Content-Type"] = "application/epub+zip" response.headers["Content-Disposition"] = "attachment; filename=%s" % epub_file return response del epgen return ("Cannot generate epub for this URL.", 400) return render_template("main.html") ##################################################################### # Secondary Views Section ##################################################################### @app.route('/health', methods=['GET']) def health_view(): ''' Heartbeat view, because why not? ''' return ('OK', 200) ##################################################################### # URL Shortener Views Section ##################################################################### @app.route('/img/<path>', methods=['GET', 'POST']) def static_img_proxy_view(path): ''' Make shorter URLs for image files. ''' path = re.sub(r'[^A-Za-z0-9_.-]', r'_', path) return redirect(url_for('static', filename=os.path.join('img', path))) @app.route('/js/<path>', methods=['GET', 'POST']) def static_js_proxy_view(path): ''' Make shorter URLs for javascript files. ''' path = re.sub(r'[^A-Za-z0-9_+.-]', r'_', path) return redirect(url_for('static', filename=os.path.join('js', path))) @app.route('/css/<path>', methods=['GET', 'POST']) def static_css_proxy_view(path): ''' Make shorter URLs for CSS files. ''' path = re.sub(r'[^A-Za-z0-9_+.-]', r'_', path) return redirect(url_for('static', filename=os.path.join('css', path))) ##################################################################### # Main ##################################################################### def main(): # Make templates copacetic with UTF8 reload(sys) sys.setdefaultencoding('utf-8') # App Config app.secret_key = clay.config.get('flask.secret_key') main() # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 nowrap<|fim▁end|>
import re, os, sys
<|file_name|>densenet_distributed_test.py<|end_file_name|><|fim▁begin|><|fim▁hole|># You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Densely Connected Convolutional Networks. Reference [ Densely Connected Convolutional Networks](https://arxiv.org/abs/1608.06993) """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import time import tensorflow as tf from tensorflow_examples.models.densenet import distributed_train from tensorflow_examples.models.densenet import utils class DenseNetDistributedBenchmark(tf.test.Benchmark): def __init__(self, output_dir=None, **kwargs): self.output_dir = output_dir def benchmark_with_function_custom_loops(self): kwargs = utils.get_cifar10_kwargs() self._run_and_report_benchmark(**kwargs) def benchmark_with_function_custom_loops_300_epochs_2_gpus(self): kwargs = utils.get_cifar10_kwargs() kwargs.update({'epochs': 300, 'data_format': 'channels_first', 'bottleneck': False, 'compression': 1., 'num_gpu': 2, 'batch_size': 128}) self._run_and_report_benchmark(**kwargs) def benchmark_with_function_custom_loops_300_epochs_8_gpus(self): kwargs = utils.get_cifar10_kwargs() kwargs.update({'epochs': 300, 'data_format': 'channels_first', 'bottleneck': False, 'compression': 1., 'num_gpu': 8, 'batch_size': 512}) self._run_and_report_benchmark(**kwargs) def _run_and_report_benchmark(self, top_1_min=.944, top_1_max=.949, **kwargs): """Run the benchmark and report metrics.report. Args: top_1_min: Min value for top_1 accuracy. Default range is SOTA. top_1_max: Max value for top_1 accuracy. **kwargs: All args passed to the test. """ start_time_sec = time.time() train_loss, train_acc, _, test_acc = distributed_train.main(**kwargs) wall_time_sec = time.time() - start_time_sec metrics = [] metrics.append({'name': 'accuracy_top_1', 'value': test_acc, 'min_value': top_1_min, 'max_value': top_1_max}) metrics.append({'name': 'training_accuracy_top_1', 'value': train_acc}) metrics.append({'name': 'train_loss', 'value': train_loss}) self.report_benchmark(wall_time=wall_time_sec, metrics=metrics) if __name__ == '__main__': tf.test.main()<|fim▁end|>
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License.
<|file_name|>io.rs<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. use file::reader::ParquetReader; use std::{cmp, fs::File, io::*, sync::Mutex}; // ---------------------------------------------------------------------- // Read/Write wrappers for `File`. /// Position trait returns the current position in the stream. /// Should be viewed as a lighter version of `Seek` that does not allow seek operations, /// and does not require mutable reference for the current position. pub trait Position { /// Returns position in the stream. fn pos(&self) -> u64; } /// Struct that represents a slice of a file data with independent start position and /// length. Internally clones provided file handle, wraps with BufReader and resets /// position before any read. /// /// This is workaround and alternative for `file.try_clone()` method. It clones `File` /// while preserving independent position, which is not available with `try_clone()`. /// /// Designed after `arrow::io::RandomAccessFile`. pub struct FileSource<R: ParquetReader> { reader: Mutex<BufReader<R>>, start: u64, // start position in a file end: u64, // end position in a file } impl<R: ParquetReader> FileSource<R> { /// Creates new file reader with start and length from a file handle pub fn new(fd: &R, start: u64, length: usize) -> Self { Self { reader: Mutex::new(BufReader::new(fd.try_clone().unwrap())), start, end: start + length as u64, } } } impl<R: ParquetReader> Read for FileSource<R> { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { let mut reader = self .reader .lock() .map_err(|err| Error::new(ErrorKind::Other, err.to_string()))?; let bytes_to_read = cmp::min(buf.len(), (self.end - self.start) as usize); let buf = &mut buf[0..bytes_to_read]; reader.seek(SeekFrom::Start(self.start as u64))?; let res = reader.read(buf); if let Ok(bytes_read) = res { self.start += bytes_read as u64; } res } } impl<R: ParquetReader> Position for FileSource<R> { fn pos(&self) -> u64 { self.start } } /// Struct that represents `File` output stream with position tracking. /// Used as a sink in file writer. pub struct FileSink { buf: BufWriter<File>, // This is not necessarily position in the underlying file, // but rather current position in the sink. pos: u64, } impl FileSink { /// Creates new file sink. /// Position is set to whatever position file has. pub fn new(file: &File) -> Self { let mut owned_file = file.try_clone().unwrap(); let pos = owned_file.seek(SeekFrom::Current(0)).unwrap(); Self { buf: BufWriter::new(owned_file), pos, } } } impl Write for FileSink { fn write(&mut self, buf: &[u8]) -> Result<usize> { let num_bytes = self.buf.write(buf)?; self.pos += num_bytes as u64; Ok(num_bytes) } fn flush(&mut self) -> Result<()> { self.buf.flush() } } impl Position for FileSink { fn pos(&self) -> u64 { self.pos } } // Position implementation for Cursor to use in various tests. impl<'a> Position for Cursor<&'a mut Vec<u8>> { fn pos(&self) -> u64 { self.position() } } #[cfg(test)] mod tests {<|fim▁hole|> use super::*; use util::test_common::{get_temp_file, get_test_file}; #[test] fn test_io_read_fully() { let mut buf = vec![0; 8]; let mut src = FileSource::new(&get_test_file("alltypes_plain.parquet"), 0, 4); let bytes_read = src.read(&mut buf[..]).unwrap(); assert_eq!(bytes_read, 4); assert_eq!(buf, vec![b'P', b'A', b'R', b'1', 0, 0, 0, 0]); } #[test] fn test_io_read_in_chunks() { let mut buf = vec![0; 4]; let mut src = FileSource::new(&get_test_file("alltypes_plain.parquet"), 0, 4); let bytes_read = src.read(&mut buf[0..2]).unwrap(); assert_eq!(bytes_read, 2); let bytes_read = src.read(&mut buf[2..]).unwrap(); assert_eq!(bytes_read, 2); assert_eq!(buf, vec![b'P', b'A', b'R', b'1']); } #[test] fn test_io_read_pos() { let mut src = FileSource::new(&get_test_file("alltypes_plain.parquet"), 0, 4); src.read(&mut vec![0; 1]).unwrap(); assert_eq!(src.pos(), 1); src.read(&mut vec![0; 4]).unwrap(); assert_eq!(src.pos(), 4); } #[test] fn test_io_read_over_limit() { let mut src = FileSource::new(&get_test_file("alltypes_plain.parquet"), 0, 4); // Read all bytes from source src.read(&mut vec![0; 128]).unwrap(); assert_eq!(src.pos(), 4); // Try reading again, should return 0 bytes. let bytes_read = src.read(&mut vec![0; 128]).unwrap(); assert_eq!(bytes_read, 0); assert_eq!(src.pos(), 4); } #[test] fn test_io_seek_switch() { let mut buf = vec![0; 4]; let mut file = get_test_file("alltypes_plain.parquet"); let mut src = FileSource::new(&file, 0, 4); file .seek(SeekFrom::Start(5 as u64)) .expect("File seek to a position"); let bytes_read = src.read(&mut buf[..]).unwrap(); assert_eq!(bytes_read, 4); assert_eq!(buf, vec![b'P', b'A', b'R', b'1']); } #[test] fn test_io_write_with_pos() { let mut file = get_temp_file("file_sink_test", &[b'a', b'b', b'c']); file.seek(SeekFrom::Current(3)).unwrap(); // Write into sink let mut sink = FileSink::new(&file); assert_eq!(sink.pos(), 3); sink.write(&[b'd', b'e', b'f', b'g']).unwrap(); assert_eq!(sink.pos(), 7); sink.flush().unwrap(); assert_eq!(sink.pos(), file.seek(SeekFrom::Current(0)).unwrap()); // Read data using file chunk let mut res = vec![0u8; 7]; let mut chunk = FileSource::new(&file, 0, file.metadata().unwrap().len() as usize); chunk.read(&mut res[..]).unwrap(); assert_eq!(res, vec![b'a', b'b', b'c', b'd', b'e', b'f', b'g']); } }<|fim▁end|>
<|file_name|>blink_ops.rs<|end_file_name|><|fim▁begin|>/* Copyright 2013 Leon Sixt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use algorithm; use node::{Node, Leaf, INode}; use blinktree::physical_node::{PhysicalNode, T_LEAF, T_INODE}; #[deriving(Clone)] pub enum Movement { Right, Down, } pub trait BLinkOps<K: TotalOrd + ToStr, V: ToStr, Ptr: Clone + ToStr, INODE: PhysicalNode<K,Ptr,Ptr>, LEAF: PhysicalNode<K,V,Ptr>> { fn move_right<'a>(&self, node: &'a Node<INODE,LEAF>, key: &K) -> Option<&'a Ptr> { let can_contain = match node { &Leaf(ref leaf) => self.can_contain_key(leaf, key), &INode(ref inode) => self.can_contain_key(inode, key) }; if ! can_contain { node.link_ptr() } else { None } } fn get_value<'a>(&self, leaf: &'a LEAF, key: &K) -> Option<&'a V> { if !self.can_contain_key(leaf,key) { return None; } let idx = algorithm::bsearch_idx(leaf.keys().slice_from(0), key); debug!("[get] ptr: {}, keys: {} values: {}, key: {}, idx: {}", leaf.my_ptr().to_str(), leaf.keys().to_str(), leaf.values().to_str(), key.to_str(), idx.to_str()); if leaf.keys()[idx].cmp(key) == Equal { Some(&leaf.values()[idx]) } else { None } } fn get_ptr<'a>(&self, inode: &'a INODE, key: &K) -> Option<&'a Ptr> { if !self.can_contain_key(inode,key) { return None; } let idx = algorithm::bsearch_idx(inode.keys().slice_from(0), key); debug!("[get_ptr] key: {}, ptr: {}, keys: {} values: {}, idx: {}, is_most_right_node: {}, is_root: {}", key.to_str(), inode.my_ptr().to_str(), inode.keys().to_str(), inode.values().to_str(), idx.to_str(), inode.is_most_right_node(), inode.is_root()); Some(&inode.values()[idx]) } fn scannode<'a>(&self, node: &'a Node<INODE,LEAF>, key: &K) -> Option<(&'a Ptr, Movement)> { let can_contain = match node { &Leaf(ref leaf) => self.can_contain_key(leaf, key), &INode(ref inode) => self.can_contain_key(inode, key) }; if(! can_contain) { return node.link_ptr().map(|r| (r, Right)); } match node { &Leaf(*) => None, &INode(ref inode) => self.get_ptr(inode, key).map(|r| (r, Down)) } } fn split_and_insert_leaf(&self, leaf: &mut LEAF, new_page: Ptr, key: K, value: V) -> LEAF { let new_size = leaf.keys().len()/2; self.insert_leaf(leaf, key, value); let (keys_new, values_new) = leaf.split_at(new_size); let link_ptr = leaf.set_link_ptr(new_page.clone()); PhysicalNode::new(T_LEAF, new_page, link_ptr, keys_new, values_new) } /// Default splitting strategy: /// /// example max_size = 4: /// split /// | /// |<= 3 <|<= 5 <|<= 10 <|<= 15 <|<= 30 /// . . . . . /// fn split_and_insert_inode(&self, inode: &mut INODE, new_page: Ptr, key: K, value: Ptr) -> INODE { let new_size = inode.keys().len()/2; self.insert_inode(inode, key, value); let (keys_new, values_new) = inode.split_at(new_size); debug!("[split_and_insert_inode] keys.len: {}, value.len: {}", keys_new.to_str(), values_new.to_str()); let link_ptr = inode.set_link_ptr(new_page.clone()); PhysicalNode::new(T_INODE, new_page, link_ptr, keys_new, values_new) } fn insert_leaf(&self, leaf: &mut LEAF, key: K, value: V) { let idx = algorithm::bsearch_idx(leaf.keys().slice_from(0), &key); leaf.mut_keys().insert(idx, key); leaf.mut_values().insert(idx, value); } fn insert_inode(&self, inode: &mut INODE, key: K, value: Ptr) { let mut idx = algorithm::bsearch_idx(inode.keys().slice_from(0), &key); inode.mut_keys().insert(idx, key); //if (inode.is_root() || inode.is_most_right_node()) { idx += 1;<|fim▁hole|> fn can_contain_key< K1: TotalOrd, V1, Ptr1, N : PhysicalNode<K1,V1,Ptr1>>(&self, node: &N, key: &K1) -> bool { node.is_root() || (node.is_most_right_node() && key.cmp(node.max_key()) == Greater) || (key.cmp(node.max_key()) == Less || key.cmp(node.max_key()) == Equal) } } pub struct DefaultBLinkOps<K,V,Ptr, INODE, LEAF>; impl <K: TotalOrd + ToStr, V: ToStr, Ptr: Clone + ToStr, INODE: PhysicalNode<K,Ptr,Ptr>, LEAF: PhysicalNode<K,V,Ptr> > BLinkOps<K,V,Ptr,INODE, LEAF> for DefaultBLinkOps<K,V,Ptr, INODE, LEAF> {} #[cfg(test)] mod test { use super::{BLinkOps, DefaultBLinkOps}; use blinktree::physical_node::{PhysicalNode, DefaultBLinkNode, T_ROOT, T_LEAF}; macro_rules! can_contains_range( ($node:ident, $from:expr, $to:expr) => ( for i in range($from, $to+1) { assert!(self.can_contain_key(&$node, &i), format!("cannot contain key {}, is_root: {}, is_leaf: {}, is_inode: {}", i, $node.is_root(), $node.is_leaf(), $node.is_inode())); } ) ) trait BLinkOpsTest<INODE: PhysicalNode<uint, uint, uint>, LEAF: PhysicalNode<uint, uint, uint>> : BLinkOps<uint,uint,uint,INODE,LEAF> { fn test(&self) { self.test_can_contain_key(); self.test_needs_split(); self.test_insert_into_inode_ptr_must_be_off_by_one(); } fn test_can_contain_key(&self) { let tpe = T_ROOT ^ T_LEAF; let root : DefaultBLinkNode<uint, uint, uint> = PhysicalNode::new(tpe, 0u, None, ~[2u],~[0u,1u]); can_contains_range!(root, 0u, 10); assert!(self.can_contain_key(&root, &10000)); let leaf : DefaultBLinkNode<uint, uint, uint> = PhysicalNode::new(T_LEAF, 0u, None, ~[2u,4],~[0,1]); can_contains_range!(leaf, 0u, 4); } fn test_needs_split(&self) { } // ionde otherwise // keys: . 4 . 1 | 2 | 3 // values: 1 3 10 1 4 fn test_insert_into_inode_ptr_must_be_off_by_one(&self) { let mut inode: INODE = PhysicalNode::new(T_ROOT & T_LEAF, 0u, None, ~[1],~[0,1]); self.insert_inode(&mut inode, 4, 4); self.insert_inode(&mut inode, 3, 3); let expected = ~[0,1,3,4]; assert!(inode.values() == &expected, format!("expected: {}, got {}", expected.to_str(), inode.values().to_str())) } } impl BLinkOpsTest<DefaultBLinkNode<uint, uint, uint>, DefaultBLinkNode<uint, uint, uint>> for DefaultBLinkOps<uint,uint,uint, DefaultBLinkNode<uint,uint,uint>, DefaultBLinkNode<uint, uint, uint>> {} #[test] fn test_default_blink_ops() { let ops = DefaultBLinkOps; ops.test(); } }<|fim▁end|>
//} inode.mut_values().insert(idx, value); }
<|file_name|>context_apply_test.go<|end_file_name|><|fim▁begin|>package terraform import ( "bytes" "fmt" "os" "reflect" "sort" "strings" "sync" "sync/atomic" "testing" "time" "github.com/hashicorp/terraform/config/module" ) func TestContext2Apply(t *testing.T) { m := testModule(t, "apply-good") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() if len(mod.Resources) < 2 { t.Fatalf("bad: %#v", mod.Resources) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_providerAlias(t *testing.T) { m := testModule(t, "apply-provider-alias") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() if len(mod.Resources) < 2 { t.Fatalf("bad: %#v", mod.Resources) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProviderAliasStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } // GH-2870 func TestContext2Apply_providerWarning(t *testing.T) { m := testModule(t, "apply-provider-warning") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn p.ValidateFn = func(c *ResourceConfig) (ws []string, es []error) { ws = append(ws, "Just a warning") return } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(` aws_instance.foo: ID = foo `) if actual != expected { t.Fatalf("got: \n%s\n\nexpected:\n%s", actual, expected) } if !p.ConfigureCalled { t.Fatalf("provider Configure() was never called!") } } func TestContext2Apply_emptyModule(t *testing.T) { m := testModule(t, "apply-empty-module") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) actual = strings.Replace(actual, " ", "", -1) expected := strings.TrimSpace(testTerraformApplyEmptyModuleStr) if actual != expected { t.Fatalf("bad: \n%s\nexpect:\n%s", actual, expected) } } func TestContext2Apply_createBeforeDestroy(t *testing.T) { m := testModule(t, "apply-good-create-before") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "require_new": "abc", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: state, }) if p, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } else { t.Logf(p.String()) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() if len(mod.Resources) != 1 { t.Fatalf("bad: %s", state) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyCreateBeforeStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_createBeforeDestroyUpdate(t *testing.T) { m := testModule(t, "apply-good-create-before-update") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "foo": "bar", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: state, }) if p, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } else { t.Logf(p.String()) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() if len(mod.Resources) != 1 { t.Fatalf("bad: %s", state) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyCreateBeforeUpdateStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_destroyComputed(t *testing.T) { m := testModule(t, "apply-destroy-computed") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "foo", Attributes: map[string]string{ "output": "value", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: state, Destroy: true, }) if p, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } else { t.Logf(p.String()) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } } // https://github.com/hashicorp/terraform/pull/5096 func TestContext2Apply_destroySkipsCBD(t *testing.T) { // Config contains CBD resource depending on non-CBD resource, which triggers // a cycle if they are both replaced, but should _not_ trigger a cycle when // just doing a `terraform destroy`. m := testModule(t, "apply-destroy-cbd") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "foo", }, }, "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "foo", }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: state, Destroy: true, }) if p, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } else { t.Logf(p.String()) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } } // https://github.com/hashicorp/terraform/issues/2892 func TestContext2Apply_destroyCrossProviders(t *testing.T) { m := testModule(t, "apply-destroy-cross-providers") p_aws := testProvider("aws") p_aws.ApplyFn = testApplyFn p_aws.DiffFn = testDiffFn p_tf := testProvider("terraform") p_tf.ApplyFn = testApplyFn p_tf.DiffFn = testDiffFn providers := map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p_aws), "terraform": testProviderFuncFixed(p_tf), } // Bug only appears from time to time, // so we run this test multiple times // to check for the race-condition for i := 0; i <= 10; i++ { ctx := getContextForApply_destroyCrossProviders( t, m, providers) if p, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } else { t.Logf(p.String()) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } } } func getContextForApply_destroyCrossProviders( t *testing.T, m *module.Tree, providers map[string]ResourceProviderFactory) *Context { state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "terraform_remote_state.shared": &ResourceState{ Type: "terraform_remote_state", Primary: &InstanceState{ ID: "remote-2652591293", Attributes: map[string]string{ "output.env_name": "test", }, }, }, }, }, &ModuleState{ Path: []string{"root", "example"}, Resources: map[string]*ResourceState{ "aws_vpc.bar": &ResourceState{ Type: "aws_vpc", Primary: &InstanceState{ ID: "vpc-aaabbb12", Attributes: map[string]string{ "value": "test", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: providers, State: state, Destroy: true, }) return ctx } func TestContext2Apply_minimal(t *testing.T) { m := testModule(t, "apply-minimal") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyMinimalStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } <|fim▁hole|> p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) { return &InstanceDiff{ Attributes: map[string]*ResourceAttrDiff{ "newp": nil, }, }, nil } if _, err := ctx.Apply(); err == nil { t.Fatal("should error") } } func TestContext2Apply_cancel(t *testing.T) { stopped := false m := testModule(t, "apply-cancel") p := testProvider("aws") ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) p.ApplyFn = func(*InstanceInfo, *InstanceState, *InstanceDiff) (*InstanceState, error) { if !stopped { stopped = true go ctx.Stop() for { if ctx.sh.Stopped() { break } } } return &InstanceState{ ID: "foo", Attributes: map[string]string{ "num": "2", }, }, nil } p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) { return &InstanceDiff{ Attributes: map[string]*ResourceAttrDiff{ "num": &ResourceAttrDiff{ New: "bar", }, }, }, nil } if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } // Start the Apply in a goroutine stateCh := make(chan *State) go func() { state, err := ctx.Apply() if err != nil { panic(err) } stateCh <- state }() state := <-stateCh mod := state.RootModule() if len(mod.Resources) != 1 { t.Fatalf("bad: %s", state.String()) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyCancelStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_compute(t *testing.T) { m := testModule(t, "apply-compute") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } ctx.variables = map[string]string{"value": "1"} state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyComputeStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_countDecrease(t *testing.T) { m := testModule(t, "apply-count-dec") p := testProvider("aws") p.DiffFn = testDiffFn s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo.0": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "foo": "foo", "type": "aws_instance", }, }, }, "aws_instance.foo.1": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "foo": "foo", "type": "aws_instance", }, }, }, "aws_instance.foo.2": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "foo": "foo", "type": "aws_instance", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyCountDecStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_countDecreaseToOne(t *testing.T) { m := testModule(t, "apply-count-dec-one") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo.0": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "foo": "foo", "type": "aws_instance", }, }, }, "aws_instance.foo.1": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", }, }, "aws_instance.foo.2": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyCountDecToOneStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } // https://github.com/PeoplePerHour/terraform/pull/11 // // This tests a case where both a "resource" and "resource.0" are in // the state file, which apparently is a reasonable backwards compatibility // concern found in the above 3rd party repo. func TestContext2Apply_countDecreaseToOneCorrupted(t *testing.T) { m := testModule(t, "apply-count-dec-one") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "foo": "foo", "type": "aws_instance", }, }, }, "aws_instance.foo.0": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "baz", Attributes: map[string]string{ "type": "aws_instance", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) if p, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } else { testStringMatch(t, p, testTerraformApplyCountDecToOneCorruptedPlanStr) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyCountDecToOneCorruptedStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_countTainted(t *testing.T) { m := testModule(t, "apply-count-tainted") p := testProvider("aws") p.DiffFn = testDiffFn s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo.0": &ResourceState{ Type: "aws_instance", Tainted: []*InstanceState{ &InstanceState{ ID: "bar", Attributes: map[string]string{ "foo": "foo", "type": "aws_instance", }, }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyCountTaintedStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_countVariable(t *testing.T) { m := testModule(t, "apply-count-variable") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyCountVariableStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_mapVariableOverride(t *testing.T) { m := testModule(t, "apply-map-var-override") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Variables: map[string]string{ "images.us-west-2": "overridden", }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(` aws_instance.bar: ID = foo ami = overridden type = aws_instance aws_instance.foo: ID = foo ami = image-1234 type = aws_instance `) if actual != expected { t.Fatalf("got: \n%s\nexpected: \n%s", actual, expected) } } func TestContext2Apply_module(t *testing.T) { m := testModule(t, "apply-module") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyModuleStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_moduleDestroyOrder(t *testing.T) { m := testModule(t, "apply-module-destroy-order") p := testProvider("aws") p.DiffFn = testDiffFn // Create a custom apply function to track the order they were destroyed var order []string var orderLock sync.Mutex p.ApplyFn = func( info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) { orderLock.Lock() defer orderLock.Unlock() order = append(order, is.ID) return nil, nil } state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.b": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "b", }, }, }, }, &ModuleState{ Path: []string{"root", "child"}, Resources: map[string]*ResourceState{ "aws_instance.a": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "a", }, }, }, Outputs: map[string]string{ "a_output": "a", }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: state, Destroy: true, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } expected := []string{"b", "a"} if !reflect.DeepEqual(order, expected) { t.Fatalf("bad: %#v", order) } { actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyModuleDestroyOrderStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } } func TestContext2Apply_moduleOrphanProvider(t *testing.T) { m := testModule(t, "apply-module-orphan-provider-inherit") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn p.ConfigureFn = func(c *ResourceConfig) error { if _, ok := c.Get("value"); !ok { return fmt.Errorf("value is not found") } return nil } // Create a state with an orphan module state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: []string{"root", "child"}, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, State: state, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } } // This tests an issue where all the providers in a module but not // in the root weren't being added to the root properly. In this test // case: aws is explicitly added to root, but "test" should be added to. // With the bug, it wasn't. func TestContext2Apply_moduleOnlyProvider(t *testing.T) { m := testModule(t, "apply-module-only-provider") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn pTest := testProvider("test") pTest.ApplyFn = testApplyFn pTest.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), "test": testProviderFuncFixed(pTest), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyModuleOnlyProviderStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_moduleProviderAlias(t *testing.T) { m := testModule(t, "apply-module-provider-alias") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyModuleProviderAliasStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_moduleProviderAliasTargets(t *testing.T) { m := testModule(t, "apply-module-provider-alias") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Targets: []string{"no.thing"}, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(` <no state> `) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_moduleProviderCloseNested(t *testing.T) { m := testModule(t, "apply-module-provider-close-nested") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: &State{ Modules: []*ModuleState{ &ModuleState{ Path: []string{"root", "child", "subchild"}, Resources: map[string]*ResourceState{ "aws_instance.foo": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", }, }, }, }, }, }, Destroy: true, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } } func TestContext2Apply_moduleVarResourceCount(t *testing.T) { m := testModule(t, "apply-module-var-resource-count") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Variables: map[string]string{ "count": "2", }, Destroy: true, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } ctx = testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Variables: map[string]string{ "count": "5", }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } } // GH-819 func TestContext2Apply_moduleBool(t *testing.T) { m := testModule(t, "apply-module-bool") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyModuleBoolStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_multiProvider(t *testing.T) { m := testModule(t, "apply-multi-provider") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn pDO := testProvider("do") pDO.ApplyFn = testApplyFn pDO.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), "do": testProviderFuncFixed(pDO), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() if len(mod.Resources) < 2 { t.Fatalf("bad: %#v", mod.Resources) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyMultiProviderStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_multiVar(t *testing.T) { m := testModule(t, "apply-multi-var") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn // First, apply with a count of 3 ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Variables: map[string]string{ "count": "3", }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := state.RootModule().Outputs["output"] expected := "bar0,bar1,bar2" if actual != expected { t.Fatalf("bad: \n%s", actual) } // Apply again, reduce the count to 1 { ctx := testContext2(t, &ContextOpts{ Module: m, State: state, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Variables: map[string]string{ "count": "1", }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := state.RootModule().Outputs["output"] expected := "bar0" if actual != expected { t.Fatalf("bad: \n%s", actual) } } } func TestContext2Apply_nilDiff(t *testing.T) { m := testModule(t, "apply-good") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) { return nil, nil } if _, err := ctx.Apply(); err == nil { t.Fatal("should error") } } func TestContext2Apply_outputOrphan(t *testing.T) { m := testModule(t, "apply-output-orphan") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Outputs: map[string]string{ "foo": "bar", "bar": "baz", }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: state, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyOutputOrphanStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_providerComputedVar(t *testing.T) { m := testModule(t, "apply-provider-computed") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn pTest := testProvider("test") pTest.ApplyFn = testApplyFn pTest.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), "test": testProviderFuncFixed(pTest), }, }) p.ConfigureFn = func(c *ResourceConfig) error { if c.IsComputed("value") { return fmt.Errorf("value is computed") } v, ok := c.Get("value") if !ok { return fmt.Errorf("value is not found") } if v != "yes" { return fmt.Errorf("value is not 'yes': %v", v) } return nil } if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } } func TestContext2Apply_Provisioner_compute(t *testing.T) { m := testModule(t, "apply-provisioner-compute") p := testProvider("aws") pr := testProvisioner() p.ApplyFn = testApplyFn p.DiffFn = testDiffFn pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error { val, ok := c.Config["foo"] if !ok || val != "computed_dynamical" { t.Fatalf("bad value for foo: %v %#v", val, c) } return nil } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, Variables: map[string]string{ "value": "1", }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProvisionerStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } // Verify apply was invoked if !pr.ApplyCalled { t.Fatalf("provisioner not invoked") } } func TestContext2Apply_provisionerCreateFail(t *testing.T) { m := testModule(t, "apply-provisioner-fail-create") p := testProvider("aws") pr := testProvisioner() p.DiffFn = testDiffFn p.ApplyFn = func( info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) { is.ID = "foo" return is, fmt.Errorf("error") } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err == nil { t.Fatal("should error") } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProvisionerFailCreateStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_provisionerCreateFailNoId(t *testing.T) { m := testModule(t, "apply-provisioner-fail-create") p := testProvider("aws") pr := testProvisioner() p.DiffFn = testDiffFn p.ApplyFn = func( info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) { return nil, fmt.Errorf("error") } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err == nil { t.Fatal("should error") } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProvisionerFailCreateNoIdStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_provisionerFail(t *testing.T) { m := testModule(t, "apply-provisioner-fail") p := testProvider("aws") pr := testProvisioner() p.ApplyFn = testApplyFn p.DiffFn = testDiffFn pr.ApplyFn = func(*InstanceState, *ResourceConfig) error { return fmt.Errorf("EXPLOSION") } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, Variables: map[string]string{ "value": "1", }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err == nil { t.Fatal("should error") } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProvisionerFailStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_provisionerFail_createBeforeDestroy(t *testing.T) { m := testModule(t, "apply-provisioner-fail-create-before") p := testProvider("aws") pr := testProvisioner() p.ApplyFn = testApplyFn p.DiffFn = testDiffFn pr.ApplyFn = func(*InstanceState, *ResourceConfig) error { return fmt.Errorf("EXPLOSION") } state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "require_new": "abc", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, State: state, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err == nil { t.Fatal("should error") } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProvisionerFailCreateBeforeDestroyStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_error_createBeforeDestroy(t *testing.T) { m := testModule(t, "apply-error-create-before") p := testProvider("aws") state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "require_new": "abc", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: state, }) p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) { return nil, fmt.Errorf("error") } p.DiffFn = testDiffFn if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err == nil { t.Fatal("should have error") } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyErrorCreateBeforeDestroyStr) if actual != expected { t.Fatalf("bad: \n%s\n\nExpected:\n\n%s", actual, expected) } } func TestContext2Apply_errorDestroy_createBeforeDestroy(t *testing.T) { m := testModule(t, "apply-error-create-before") p := testProvider("aws") state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "require_new": "abc", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: state, }) p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) { // Fail the destroy! if id.Destroy { return is, fmt.Errorf("error") } // Create should work is = &InstanceState{ ID: "foo", } return is, nil } p.DiffFn = testDiffFn if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err == nil { t.Fatal("should have error") } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyErrorDestroyCreateBeforeDestroyStr) if actual != expected { t.Fatalf("bad: actual:\n%s\n\nexpected:\n%s", actual, expected) } } func TestContext2Apply_multiDepose_createBeforeDestroy(t *testing.T) { m := testModule(t, "apply-multi-depose-create-before-destroy") p := testProvider("aws") p.DiffFn = testDiffFn ps := map[string]ResourceProviderFactory{"aws": testProviderFuncFixed(p)} state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.web": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ID: "foo"}, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: ps, State: state, }) createdInstanceId := "bar" // Create works createFunc := func(is *InstanceState) (*InstanceState, error) { return &InstanceState{ID: createdInstanceId}, nil } // Destroy starts broken destroyFunc := func(is *InstanceState) (*InstanceState, error) { return is, fmt.Errorf("destroy failed") } p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) { if id.Destroy { return destroyFunc(is) } else { return createFunc(is) } } if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } // Destroy is broken, so even though CBD successfully replaces the instance, // we'll have to save the Deposed instance to destroy later state, err := ctx.Apply() if err == nil { t.Fatal("should have error") } checkStateString(t, state, ` aws_instance.web: (1 deposed) ID = bar Deposed ID 1 = foo `) createdInstanceId = "baz" ctx = testContext2(t, &ContextOpts{ Module: m, Providers: ps, State: state, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } // We're replacing the primary instance once again. Destroy is _still_ // broken, so the Deposed list gets longer state, err = ctx.Apply() if err == nil { t.Fatal("should have error") } checkStateString(t, state, ` aws_instance.web: (2 deposed) ID = baz Deposed ID 1 = foo Deposed ID 2 = bar `) // Destroy partially fixed! destroyFunc = func(is *InstanceState) (*InstanceState, error) { if is.ID == "foo" || is.ID == "baz" { return nil, nil } else { return is, fmt.Errorf("destroy partially failed") } } createdInstanceId = "qux" if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err = ctx.Apply() // Expect error because 1/2 of Deposed destroys failed if err == nil { t.Fatal("should have error") } // foo and baz are now gone, bar sticks around checkStateString(t, state, ` aws_instance.web: (1 deposed) ID = qux Deposed ID 1 = bar `) // Destroy working fully! destroyFunc = func(is *InstanceState) (*InstanceState, error) { return nil, nil } createdInstanceId = "quux" if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err = ctx.Apply() if err != nil { t.Fatal("should not have error:", err) } // And finally the state is clean checkStateString(t, state, ` aws_instance.web: ID = quux `) } func TestContext2Apply_provisionerResourceRef(t *testing.T) { m := testModule(t, "apply-provisioner-resource-ref") p := testProvider("aws") pr := testProvisioner() p.ApplyFn = testApplyFn p.DiffFn = testDiffFn pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error { val, ok := c.Config["foo"] if !ok || val != "2" { t.Fatalf("bad value for foo: %v %#v", val, c) } return nil } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProvisionerResourceRefStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } // Verify apply was invoked if !pr.ApplyCalled { t.Fatalf("provisioner not invoked") } } func TestContext2Apply_provisionerSelfRef(t *testing.T) { m := testModule(t, "apply-provisioner-self-ref") p := testProvider("aws") pr := testProvisioner() p.ApplyFn = testApplyFn p.DiffFn = testDiffFn pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error { val, ok := c.Config["command"] if !ok || val != "bar" { t.Fatalf("bad value for command: %v %#v", val, c) } return nil } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProvisionerSelfRefStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } // Verify apply was invoked if !pr.ApplyCalled { t.Fatalf("provisioner not invoked") } } func TestContext2Apply_provisionerMultiSelfRef(t *testing.T) { var lock sync.Mutex commands := make([]string, 0, 5) m := testModule(t, "apply-provisioner-multi-self-ref") p := testProvider("aws") pr := testProvisioner() p.ApplyFn = testApplyFn p.DiffFn = testDiffFn pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error { lock.Lock() defer lock.Unlock() val, ok := c.Config["command"] if !ok { t.Fatalf("bad value for command: %v %#v", val, c) } commands = append(commands, val.(string)) return nil } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProvisionerMultiSelfRefStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } // Verify apply was invoked if !pr.ApplyCalled { t.Fatalf("provisioner not invoked") } // Verify our result sort.Strings(commands) expectedCommands := []string{"number 0", "number 1", "number 2"} if !reflect.DeepEqual(commands, expectedCommands) { t.Fatalf("bad: %#v", commands) } } // Provisioner should NOT run on a diff, only create func TestContext2Apply_Provisioner_Diff(t *testing.T) { m := testModule(t, "apply-provisioner-diff") p := testProvider("aws") pr := testProvisioner() p.ApplyFn = testApplyFn p.DiffFn = testDiffFn pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error { return nil } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProvisionerDiffStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } // Verify apply was invoked if !pr.ApplyCalled { t.Fatalf("provisioner not invoked") } pr.ApplyCalled = false // Change the state to force a diff mod := state.RootModule() mod.Resources["aws_instance.bar"].Primary.Attributes["foo"] = "baz" // Re-create context with state ctx = testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, State: state, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state2, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual = strings.TrimSpace(state2.String()) if actual != expected { t.Fatalf("bad: \n%s", actual) } // Verify apply was NOT invoked if pr.ApplyCalled { t.Fatalf("provisioner invoked") } } func TestContext2Apply_outputDiffVars(t *testing.T) { m := testModule(t, "apply-good") p := testProvider("aws") s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.baz": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) { for k, ad := range d.Attributes { if ad.NewComputed { return nil, fmt.Errorf("%s: computed", k) } } result := s.MergeDiff(d) result.ID = "foo" return result, nil } p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) { return &InstanceDiff{ Attributes: map[string]*ResourceAttrDiff{ "foo": &ResourceAttrDiff{ NewComputed: true, Type: DiffAttrOutput, }, "bar": &ResourceAttrDiff{ New: "baz", }, }, }, nil } if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } } func TestContext2Apply_Provisioner_ConnInfo(t *testing.T) { m := testModule(t, "apply-provisioner-conninfo") p := testProvider("aws") pr := testProvisioner() p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) { if s.Ephemeral.ConnInfo == nil { t.Fatalf("ConnInfo not initialized") } result, _ := testApplyFn(info, s, d) result.Ephemeral.ConnInfo = map[string]string{ "type": "ssh", "host": "127.0.0.1", "port": "22", } return result, nil } p.DiffFn = testDiffFn pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error { conn := rs.Ephemeral.ConnInfo if conn["type"] != "telnet" { t.Fatalf("Bad: %#v", conn) } if conn["host"] != "127.0.0.1" { t.Fatalf("Bad: %#v", conn) } if conn["port"] != "2222" { t.Fatalf("Bad: %#v", conn) } if conn["user"] != "superuser" { t.Fatalf("Bad: %#v", conn) } if conn["pass"] != "test" { t.Fatalf("Bad: %#v", conn) } return nil } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, Variables: map[string]string{ "value": "1", "pass": "test", }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyProvisionerStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } // Verify apply was invoked if !pr.ApplyCalled { t.Fatalf("provisioner not invoked") } } func TestContext2Apply_destroy(t *testing.T) { m := testModule(t, "apply-destroy") h := new(HookRecordApplyOrder) p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Hooks: []Hook{h}, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) // First plan and apply a create operation if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } // Next, plan and apply a destroy operation h.Active = true ctx = testContext2(t, &ContextOpts{ Destroy: true, State: state, Module: m, Hooks: []Hook{h}, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err = ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } // Test that things were destroyed actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyDestroyStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } // Test that things were destroyed _in the right order_ expected2 := []string{"aws_instance.bar", "aws_instance.foo"} actual2 := h.IDs if !reflect.DeepEqual(actual2, expected2) { t.Fatalf("expected: %#v\n\ngot:%#v", expected2, actual2) } } func TestContext2Apply_destroyNestedModule(t *testing.T) { m := testModule(t, "apply-destroy-nested-module") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: []string{"root", "child", "subchild"}, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) // First plan and apply a create operation if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } // Test that things were destroyed actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyDestroyNestedModuleStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_destroyDeeplyNestedModule(t *testing.T) { m := testModule(t, "apply-destroy-deeply-nested-module") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: []string{"root", "child", "subchild", "subsubchild"}, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) // First plan and apply a create operation if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } // Test that things were destroyed actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(` module.child.subchild.subsubchild: <no state> `) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_destroyOutputs(t *testing.T) { m := testModule(t, "apply-destroy-outputs") h := new(HookRecordApplyOrder) p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Hooks: []Hook{h}, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) // First plan and apply a create operation if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } // Next, plan and apply a destroy operation h.Active = true ctx = testContext2(t, &ContextOpts{ Destroy: true, State: state, Module: m, Hooks: []Hook{h}, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err = ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() if len(mod.Resources) > 0 { t.Fatalf("bad: %#v", mod) } } func TestContext2Apply_destroyOrphan(t *testing.T) { m := testModule(t, "apply-error") p := testProvider("aws") s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.baz": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) { if d.Destroy { return nil, nil } result := s.MergeDiff(d) result.ID = "foo" return result, nil } p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) { return &InstanceDiff{ Attributes: map[string]*ResourceAttrDiff{ "num": &ResourceAttrDiff{ New: "bar", }, }, }, nil } if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() if _, ok := mod.Resources["aws_instance.baz"]; ok { t.Fatalf("bad: %#v", mod.Resources) } } func TestContext2Apply_destroyTaintedProvisioner(t *testing.T) { m := testModule(t, "apply-destroy-provisioner") p := testProvider("aws") pr := testProvisioner() p.ApplyFn = testApplyFn p.DiffFn = testDiffFn called := false pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error { called = true return nil } s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo": &ResourceState{ Type: "aws_instance", Tainted: []*InstanceState{ &InstanceState{ ID: "bar", Attributes: map[string]string{ "id": "bar", }, }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Provisioners: map[string]ResourceProvisionerFactory{ "shell": testProvisionerFuncFixed(pr), }, State: s, Destroy: true, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } if called { t.Fatal("provisioner should not be called") } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace("<no state>") if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_error(t *testing.T) { errored := false m := testModule(t, "apply-error") p := testProvider("aws") ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) p.ApplyFn = func(*InstanceInfo, *InstanceState, *InstanceDiff) (*InstanceState, error) { if errored { state := &InstanceState{ ID: "bar", } return state, fmt.Errorf("error") } errored = true return &InstanceState{ ID: "foo", Attributes: map[string]string{ "num": "2", }, }, nil } p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) { return &InstanceDiff{ Attributes: map[string]*ResourceAttrDiff{ "num": &ResourceAttrDiff{ New: "bar", }, }, }, nil } if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err == nil { t.Fatal("should have error") } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyErrorStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_errorPartial(t *testing.T) { errored := false m := testModule(t, "apply-error") p := testProvider("aws") s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) { if errored { return s, fmt.Errorf("error") } errored = true return &InstanceState{ ID: "foo", Attributes: map[string]string{ "num": "2", }, }, nil } p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) { return &InstanceDiff{ Attributes: map[string]*ResourceAttrDiff{ "num": &ResourceAttrDiff{ New: "bar", }, }, }, nil } if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err == nil { t.Fatal("should have error") } mod := state.RootModule() if len(mod.Resources) != 2 { t.Fatalf("bad: %#v", mod.Resources) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyErrorPartialStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_hook(t *testing.T) { m := testModule(t, "apply-good") h := new(MockHook) p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Hooks: []Hook{h}, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } if !h.PreApplyCalled { t.Fatal("should be called") } if !h.PostApplyCalled { t.Fatal("should be called") } if !h.PostStateUpdateCalled { t.Fatalf("should call post state update") } } func TestContext2Apply_hookOrphan(t *testing.T) { m := testModule(t, "apply-blank") h := new(MockHook) p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, State: state, Hooks: []Hook{h}, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } if _, err := ctx.Apply(); err != nil { t.Fatalf("err: %s", err) } if !h.PreApplyCalled { t.Fatal("should be called") } if !h.PostApplyCalled { t.Fatal("should be called") } if !h.PostStateUpdateCalled { t.Fatalf("should call post state update") } } func TestContext2Apply_idAttr(t *testing.T) { m := testModule(t, "apply-idattr") p := testProvider("aws") ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) { result := s.MergeDiff(d) result.ID = "foo" result.Attributes = map[string]string{ "id": "bar", } return result, nil } p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) { return &InstanceDiff{ Attributes: map[string]*ResourceAttrDiff{ "num": &ResourceAttrDiff{ New: "bar", }, }, }, nil } if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() rs, ok := mod.Resources["aws_instance.foo"] if !ok { t.Fatal("not in state") } if rs.Primary.ID != "foo" { t.Fatalf("bad: %#v", rs.Primary.ID) } if rs.Primary.Attributes["id"] != "foo" { t.Fatalf("bad: %#v", rs.Primary.Attributes) } } func TestContext2Apply_output(t *testing.T) { m := testModule(t, "apply-output") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyOutputStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_outputInvalid(t *testing.T) { m := testModule(t, "apply-output-invalid") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) _, err := ctx.Plan() if err == nil { t.Fatalf("err: %s", err) } if !strings.Contains(err.Error(), "is not a string") { t.Fatalf("err: %s", err) } } func TestContext2Apply_outputAdd(t *testing.T) { m1 := testModule(t, "apply-output-add-before") p1 := testProvider("aws") p1.ApplyFn = testApplyFn p1.DiffFn = testDiffFn ctx1 := testContext2(t, &ContextOpts{ Module: m1, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p1), }, }) if _, err := ctx1.Plan(); err != nil { t.Fatalf("err: %s", err) } state1, err := ctx1.Apply() if err != nil { t.Fatalf("err: %s", err) } m2 := testModule(t, "apply-output-add-after") p2 := testProvider("aws") p2.ApplyFn = testApplyFn p2.DiffFn = testDiffFn ctx2 := testContext2(t, &ContextOpts{ Module: m2, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p2), }, State: state1, }) if _, err := ctx2.Plan(); err != nil { t.Fatalf("err: %s", err) } state2, err := ctx2.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state2.String()) expected := strings.TrimSpace(testTerraformApplyOutputAddStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_outputList(t *testing.T) { m := testModule(t, "apply-output-list") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyOutputListStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_outputMulti(t *testing.T) { m := testModule(t, "apply-output-multi") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyOutputMultiStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_outputMultiIndex(t *testing.T) { m := testModule(t, "apply-output-multi-index") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyOutputMultiIndexStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_taint(t *testing.T) { m := testModule(t, "apply-taint") p := testProvider("aws") // destroyCount tests against regression of // https://github.com/hashicorp/terraform/issues/1056 var destroyCount = int32(0) var once sync.Once simulateProviderDelay := func() { time.Sleep(10 * time.Millisecond) } p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) { once.Do(simulateProviderDelay) if d.Destroy { atomic.AddInt32(&destroyCount, 1) } return testApplyFn(info, s, d) } p.DiffFn = testDiffFn s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.bar": &ResourceState{ Type: "aws_instance", Tainted: []*InstanceState{ &InstanceState{ ID: "baz", Attributes: map[string]string{ "num": "2", "type": "aws_instance", }, }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyTaintStr) if actual != expected { t.Fatalf("bad:\n%s", actual) } if destroyCount != 1 { t.Fatalf("Expected 1 destroy, got %d", destroyCount) } } func TestContext2Apply_taintDep(t *testing.T) { m := testModule(t, "apply-taint-dep") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo": &ResourceState{ Type: "aws_instance", Tainted: []*InstanceState{ &InstanceState{ ID: "baz", Attributes: map[string]string{ "num": "2", "type": "aws_instance", }, }, }, }, "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "foo": "baz", "num": "2", "type": "aws_instance", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) if p, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } else { t.Logf("plan: %s", p) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyTaintDepStr) if actual != expected { t.Fatalf("bad:\n%s", actual) } } func TestContext2Apply_taintDepRequiresNew(t *testing.T) { m := testModule(t, "apply-taint-dep-requires-new") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn s := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo": &ResourceState{ Type: "aws_instance", Tainted: []*InstanceState{ &InstanceState{ ID: "baz", Attributes: map[string]string{ "num": "2", "type": "aws_instance", }, }, }, }, "aws_instance.bar": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "foo": "baz", "num": "2", "type": "aws_instance", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: s, }) if p, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } else { t.Logf("plan: %s", p) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyTaintDepRequireNewStr) if actual != expected { t.Fatalf("bad:\n%s", actual) } } func TestContext2Apply_targeted(t *testing.T) { m := testModule(t, "apply-targeted") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Targets: []string{"aws_instance.foo"}, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() if len(mod.Resources) != 1 { t.Fatalf("expected 1 resource, got: %#v", mod.Resources) } checkStateString(t, state, ` aws_instance.foo: ID = foo num = 2 type = aws_instance `) } func TestContext2Apply_targetedCount(t *testing.T) { m := testModule(t, "apply-targeted-count") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Targets: []string{"aws_instance.foo"}, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } checkStateString(t, state, ` aws_instance.foo.0: ID = foo aws_instance.foo.1: ID = foo aws_instance.foo.2: ID = foo `) } func TestContext2Apply_targetedCountIndex(t *testing.T) { m := testModule(t, "apply-targeted-count") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Targets: []string{"aws_instance.foo[1]"}, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } checkStateString(t, state, ` aws_instance.foo.1: ID = foo `) } func TestContext2Apply_targetedDestroy(t *testing.T) { m := testModule(t, "apply-targeted") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo": resourceState("aws_instance", "i-bcd345"), "aws_instance.bar": resourceState("aws_instance", "i-abc123"), }, }, }, }, Targets: []string{"aws_instance.foo"}, Destroy: true, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() if len(mod.Resources) != 1 { t.Fatalf("expected 1 resource, got: %#v", mod.Resources) } checkStateString(t, state, ` aws_instance.bar: ID = i-abc123 `) } // https://github.com/hashicorp/terraform/issues/4462 func TestContext2Apply_targetedDestroyModule(t *testing.T) { m := testModule(t, "apply-targeted-module") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo": resourceState("aws_instance", "i-bcd345"), "aws_instance.bar": resourceState("aws_instance", "i-abc123"), }, }, &ModuleState{ Path: []string{"root", "child"}, Resources: map[string]*ResourceState{ "aws_instance.foo": resourceState("aws_instance", "i-bcd345"), "aws_instance.bar": resourceState("aws_instance", "i-abc123"), }, }, }, }, Targets: []string{"module.child.aws_instance.foo"}, Destroy: true, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } checkStateString(t, state, ` aws_instance.bar: ID = i-abc123 aws_instance.foo: ID = i-bcd345 module.child: aws_instance.bar: ID = i-abc123 `) } func TestContext2Apply_targetedDestroyCountIndex(t *testing.T) { m := testModule(t, "apply-targeted-count") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo.0": resourceState("aws_instance", "i-bcd345"), "aws_instance.foo.1": resourceState("aws_instance", "i-bcd345"), "aws_instance.foo.2": resourceState("aws_instance", "i-bcd345"), "aws_instance.bar.0": resourceState("aws_instance", "i-abc123"), "aws_instance.bar.1": resourceState("aws_instance", "i-abc123"), "aws_instance.bar.2": resourceState("aws_instance", "i-abc123"), }, }, }, }, Targets: []string{ "aws_instance.foo[2]", "aws_instance.bar[1]", }, Destroy: true, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } checkStateString(t, state, ` aws_instance.bar.0: ID = i-abc123 aws_instance.bar.2: ID = i-abc123 aws_instance.foo.0: ID = i-bcd345 aws_instance.foo.1: ID = i-bcd345 `) } func TestContext2Apply_targetedModule(t *testing.T) { m := testModule(t, "apply-targeted-module") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Targets: []string{"module.child"}, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.ModuleByPath([]string{"root", "child"}) if mod == nil { t.Fatalf("no child module found in the state!\n\n%#v", state) } if len(mod.Resources) != 2 { t.Fatalf("expected 2 resources, got: %#v", mod.Resources) } checkStateString(t, state, ` <no state> module.child: aws_instance.bar: ID = foo num = 2 type = aws_instance aws_instance.foo: ID = foo num = 2 type = aws_instance `) } // GH-1858 func TestContext2Apply_targetedModuleDep(t *testing.T) { m := testModule(t, "apply-targeted-module-dep") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Targets: []string{"aws_instance.foo"}, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } checkStateString(t, state, ` aws_instance.foo: ID = foo foo = foo type = aws_instance Dependencies: module.child module.child: aws_instance.mod: ID = foo Outputs: output = foo `) } func TestContext2Apply_targetedModuleResource(t *testing.T) { m := testModule(t, "apply-targeted-module-resource") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Targets: []string{"module.child.aws_instance.foo"}, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.ModuleByPath([]string{"root", "child"}) if len(mod.Resources) != 1 { t.Fatalf("expected 1 resource, got: %#v", mod.Resources) } checkStateString(t, state, ` <no state> module.child: aws_instance.foo: ID = foo num = 2 type = aws_instance `) } func TestContext2Apply_unknownAttribute(t *testing.T) { m := testModule(t, "apply-unknown") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err == nil { t.Fatal("should error") } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyUnknownAttrStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_unknownAttributeInterpolate(t *testing.T) { m := testModule(t, "apply-unknown-interpolate") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) if _, err := ctx.Plan(); err == nil { t.Fatal("should error") } } func TestContext2Apply_vars(t *testing.T) { m := testModule(t, "apply-vars") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, Variables: map[string]string{ "foo": "us-west-2", "amis.us-east-1": "override", }, }) w, e := ctx.Validate() if len(w) > 0 { t.Fatalf("bad: %#v", w) } if len(e) > 0 { t.Fatalf("bad: %s", e) } if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyVarsStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_varsEnv(t *testing.T) { // Set the env var old := tempEnv(t, "TF_VAR_ami", "baz") defer os.Setenv("TF_VAR_ami", old) m := testModule(t, "apply-vars-env") p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Module: m, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, }) w, e := ctx.Validate() if len(w) > 0 { t.Fatalf("bad: %#v", w) } if len(e) > 0 { t.Fatalf("bad: %s", e) } if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyVarsEnvStr) if actual != expected { t.Fatalf("bad: \n%s", actual) } } func TestContext2Apply_createBefore_depends(t *testing.T) { m := testModule(t, "apply-depends-create-before") h := new(HookRecordApplyOrder) p := testProvider("aws") p.ApplyFn = testApplyFn p.DiffFn = testDiffFn state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.web": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "require_new": "ami-old", }, }, }, "aws_instance.lb": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "baz", Attributes: map[string]string{ "instance": "bar", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Hooks: []Hook{h}, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: state, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } h.Active = true state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } mod := state.RootModule() if len(mod.Resources) < 2 { t.Fatalf("bad: %#v", mod.Resources) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(testTerraformApplyDependsCreateBeforeStr) if actual != expected { t.Fatalf("bad: \n%s\n%s", actual, expected) } // Test that things were managed _in the right order_ order := h.States diffs := h.Diffs if order[0].ID != "" || diffs[0].Destroy { t.Fatalf("should create new instance first: %#v", order) } if order[1].ID != "baz" { t.Fatalf("update must happen after create: %#v", order) } if order[2].ID != "bar" || !diffs[2].Destroy { t.Fatalf("destroy must happen after update: %#v", order) } } func TestContext2Apply_singleDestroy(t *testing.T) { m := testModule(t, "apply-depends-create-before") h := new(HookRecordApplyOrder) p := testProvider("aws") invokeCount := 0 p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) { invokeCount++ switch invokeCount { case 1: if d.Destroy { t.Fatalf("should not destroy") } if s.ID != "" { t.Fatalf("should not have ID") } case 2: if d.Destroy { t.Fatalf("should not destroy") } if s.ID != "baz" { t.Fatalf("should have id") } case 3: if !d.Destroy { t.Fatalf("should destroy") } if s.ID == "" { t.Fatalf("should have ID") } default: t.Fatalf("bad invoke count %d", invokeCount) } return testApplyFn(info, s, d) } p.DiffFn = testDiffFn state := &State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.web": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar", Attributes: map[string]string{ "require_new": "ami-old", }, }, }, "aws_instance.lb": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "baz", Attributes: map[string]string{ "instance": "bar", }, }, }, }, }, }, } ctx := testContext2(t, &ContextOpts{ Module: m, Hooks: []Hook{h}, Providers: map[string]ResourceProviderFactory{ "aws": testProviderFuncFixed(p), }, State: state, }) if _, err := ctx.Plan(); err != nil { t.Fatalf("err: %s", err) } h.Active = true state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } if invokeCount != 3 { t.Fatalf("bad: %d", invokeCount) } } // GH-5254 func TestContext2Apply_issue5254(t *testing.T) { // Create a provider. We use "template" here just to match the repro // we got from the issue itself. p := testProvider("template") p.ResourcesReturn = append(p.ResourcesReturn, ResourceType{ Name: "template_file", }) p.ApplyFn = testApplyFn p.DiffFn = testDiffFn // Apply cleanly step 0 ctx := testContext2(t, &ContextOpts{ Module: testModule(t, "issue-5254/step-0"), Providers: map[string]ResourceProviderFactory{ "template": testProviderFuncFixed(p), }, }) plan, err := ctx.Plan() if err != nil { t.Fatalf("err: %s", err) } state, err := ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } // Application success. Now make the modification and store a plan ctx = testContext2(t, &ContextOpts{ Module: testModule(t, "issue-5254/step-1"), State: state, Providers: map[string]ResourceProviderFactory{ "template": testProviderFuncFixed(p), }, }) plan, err = ctx.Plan() if err != nil { t.Fatalf("err: %s", err) } // Write / Read plan to simulate running it through a Plan file var buf bytes.Buffer if err := WritePlan(plan, &buf); err != nil { t.Fatalf("err: %s", err) } planFromFile, err := ReadPlan(&buf) if err != nil { t.Fatalf("err: %s", err) } ctx = planFromFile.Context(&ContextOpts{ Providers: map[string]ResourceProviderFactory{ "template": testProviderFuncFixed(p), }, }) state, err = ctx.Apply() if err != nil { t.Fatalf("err: %s", err) } actual := strings.TrimSpace(state.String()) expected := strings.TrimSpace(` template_file.child: ID = foo template = Hi type = template_file Dependencies: template_file.parent template_file.parent: ID = foo template = Hi type = template_file `) if actual != expected { t.Fatalf("expected state: \n%s\ngot: \n%s", expected, actual) } }<|fim▁end|>
func TestContext2Apply_badDiff(t *testing.T) { m := testModule(t, "apply-good")
<|file_name|>_ODataMetaModelUtils.js<|end_file_name|><|fim▁begin|>/*! * ${copyright} */ sap.ui.define([ "jquery.sap.global", "./_AnnotationHelperBasics" ], function (jQuery, _AnnotationHelperBasics) { "use strict"; /*global Promise */ var oBoolFalse = { "Bool" : "false" }, oBoolTrue = { "Bool" : "true" }, // maps V2 sap:semantics value for a date part to corresponding V4 term relative to // com.sap.vocabularies.Common.v1. mDatePartSemantics2CommonTerm = { "fiscalyear" : "IsFiscalYear", "fiscalyearperiod" : "IsFiscalYearPeriod", "year" : "IsCalendarYear", "yearmonth" : "IsCalendarYearMonth", "yearmonthday" : "IsCalendarDate", "yearquarter" : "IsCalendarYearQuarter", "yearweek" : "IsCalendarYearWeek" }, // maps V2 filter-restriction value to corresponding V4 FilterExpressionType enum value mFilterRestrictions = { "interval" : "SingleInterval", "multi-value" : "MultiValue", "single-value" : "SingleValue" }, sLoggingModule = "sap.ui.model.odata.ODataMetaModel", // maps V2 sap semantics annotations to a V4 annotations relative to // com.sap.vocabularies.Communication.v1. mSemanticsToV4AnnotationPath = { // contact annotations "bday" : "Contact", "city" : "Contact/adr", "country" : "Contact/adr", "email" : "Contact/email", "familyname" : "Contact/n", "givenname" : "Contact/n", "honorific" : "Contact/n", "middlename" : "Contact/n", "name" : "Contact", "nickname" : "Contact", "note" : "Contact", "org" : "Contact", "org-role" : "Contact", "org-unit" : "Contact", "photo" : "Contact", "pobox" : "Contact/adr", "region" : "Contact/adr", "street" : "Contact/adr", "suffix" : "Contact/n", "tel" : "Contact/tel", "title" : "Contact", "zip" : "Contact/adr", // event annotations "class" : "Event", "dtend" : "Event", "dtstart" : "Event", "duration" : "Event", "fbtype" : "Event", "location" : "Event", "status" : "Event", "transp" : "Event", "wholeday" : "Event", // message annotations "body" : "Message", "from" : "Message", "received" : "Message", "sender" : "Message", "subject" : "Message", // task annotations "completed" : "Task", "due" : "Task", "percent-complete" : "Task", "priority" : "Task" }, rSemanticsWithTypes = /(\w+)(?:;type=([\w,]+))?/, mV2SemanticsToV4TypeInfo = { "email" : { typeMapping : { "home" : "home", "pref" : "preferred", "work" : "work" }, v4EnumType : "com.sap.vocabularies.Communication.v1.ContactInformationType", v4PropertyAnnotation : "com.sap.vocabularies.Communication.v1.IsEmailAddress" }, "tel" : { typeMapping : { "cell" : "cell", "fax" : "fax", "home" : "home", "pref" : "preferred", "video" : "video", "voice" : "voice", "work" : "work" }, v4EnumType : "com.sap.vocabularies.Communication.v1.PhoneType", v4PropertyAnnotation : "com.sap.vocabularies.Communication.v1.IsPhoneNumber" } }, // map from V2 to V4 for NON-DEFAULT cases only mV2ToV4 = { creatable : { "Org.OData.Capabilities.V1.InsertRestrictions" : { "Insertable" : oBoolFalse } }, // deletable : { // "Org.OData.Capabilities.V1.DeleteRestrictions" : { "Deletable" : oBoolFalse } // }, // see handleXableAndXablePath() pageable : { "Org.OData.Capabilities.V1.SkipSupported" : oBoolFalse, "Org.OData.Capabilities.V1.TopSupported" : oBoolFalse }, "requires-filter" : { "Org.OData.Capabilities.V1.FilterRestrictions" : { "RequiresFilter" : oBoolTrue } }, topable : { "Org.OData.Capabilities.V1.TopSupported" : oBoolFalse } // updatable : { // "Org.OData.Capabilities.V1.UpdateRestrictions" : { "Updatable" : oBoolFalse } // } // see handleXableAndXablePath() }, // only if V4 name is different from V2 name mV2ToV4Attribute = { "city" : "locality", "email" : "address", "familyname" : "surname", "givenname" : "given", "honorific" : "prefix", "middlename" : "additional", "name" : "fn", "org-role" : "role", "org-unit" : "orgunit", "percent-complete" : "percentcomplete", "tel" : "uri", "zip" : "code" }, // map from V2 annotation to an array of an annotation term and a name in that annotation // that holds a collection of property references mV2ToV4PropertyCollection = { "sap:filterable" : [ "Org.OData.Capabilities.V1.FilterRestrictions", "NonFilterableProperties" ], "sap:required-in-filter" : [ "Org.OData.Capabilities.V1.FilterRestrictions", "RequiredProperties" ], "sap:sortable" : [ "Org.OData.Capabilities.V1.SortRestrictions", "NonSortableProperties" ] }, rValueList = /^com\.sap\.vocabularies\.Common\.v1\.ValueList(#.*)?$/, iWARNING = jQuery.sap.log.Level.WARNING, Utils; /** * This object contains helper functions for ODataMetaModel. * * @since 1.29.0 */ Utils = { /** * Adds EntitySet V4 annotation for current extension if extension value is equal to * the given non-default value. Depending on bDeepCopy the annotation will be merged * with deep copy. * @param {object} o * any object * @param {object} oExtension * the SAP Annotation (OData Version 2.0) for which a V4 annotation needs to be added * @param {string} sTypeClass * the type class of the given object; supported type classes are "Property" and * "EntitySet" * @param {string} sNonDefaultValue * if current extension value is equal to this sNonDefaultValue the annotation is * added * @param {boolean} bDeepCopy * if true the annotation is mixed in as deep copy of the entry in mV2ToV4 map */ addEntitySetAnnotation : function (o, oExtension, sTypeClass, sNonDefaultValue, bDeepCopy) { if (sTypeClass === "EntitySet" && oExtension.value === sNonDefaultValue) { // potentially nested structure so do deep copy if (bDeepCopy) { jQuery.extend(true, o, mV2ToV4[oExtension.name]); } else { // Warning: Passing false for the first argument is not supported! jQuery.extend(o, mV2ToV4[oExtension.name]); } } }, /** * Adds corresponding V4 annotation for V2 <code>sap:filter-restriction</code> to the given * entity set. * * @param {object} oProperty * the property of the entity * @param {object} oEntitySet * the entity set to which the corresponding V4 annotations need to be added */ addFilterRestriction : function (oProperty, oEntitySet) { var aFilterRestrictions, sFilterRestrictionValue = mFilterRestrictions[oProperty["sap:filter-restriction"]]; if (!sFilterRestrictionValue) { if (jQuery.sap.log.isLoggable(iWARNING, sLoggingModule)) { jQuery.sap.log.warning("Unsupported sap:filter-restriction: " + oProperty["sap:filter-restriction"], oEntitySet.entityType + "." + oProperty.name, sLoggingModule); } return; } aFilterRestrictions = oEntitySet["com.sap.vocabularies.Common.v1.FilterExpressionRestrictions"] || []; aFilterRestrictions.push({ "Property" : { "PropertyPath" : oProperty.name}, "AllowedExpressions" : { "EnumMember" : "com.sap.vocabularies.Common.v1.FilterExpressionType/" + sFilterRestrictionValue } }); oEntitySet["com.sap.vocabularies.Common.v1.FilterExpressionRestrictions"] = aFilterRestrictions; }, /** * Adds a V4 navigation restriction annotation with a filter restriction to the given entity * set for the given navigation property with the V2 annotation * <code>sap:filterable="false"</code>. * * @param {object} oNavigationProperty * the navigation property of the entity with the V2 annotation * <code>sap:filterable="false"</code> * @param {object} oEntitySet * the entity set to which the corresponding V4 annotation needs to be added */ addNavigationFilterRestriction : function (oNavigationProperty, oEntitySet) { var oNavigationRestrictions = oEntitySet["Org.OData.Capabilities.V1.NavigationRestrictions"] || {}; oNavigationRestrictions.RestrictedProperties = oNavigationRestrictions.RestrictedProperties || []; oNavigationRestrictions.RestrictedProperties.push({ "FilterRestrictions" : { "Filterable": oBoolFalse }, "NavigationProperty" : { "NavigationPropertyPath" : oNavigationProperty.name } }); oEntitySet["Org.OData.Capabilities.V1.NavigationRestrictions"] = oNavigationRestrictions; }, /** * Adds current property to the property collection for given V2 annotation. * * @param {string} sV2AnnotationName * V2 annotation name (key in map mV2ToV4PropertyCollection) * @param {object} oEntitySet * the entity set * @param {object} oProperty * the property of the entity */ addPropertyToAnnotation : function (sV2AnnotationName, oEntitySet, oProperty) { var aNames = mV2ToV4PropertyCollection[sV2AnnotationName], sTerm = aNames[0], sCollection = aNames[1], oAnnotation = oEntitySet[sTerm] || {}, aCollection = oAnnotation[sCollection] || []; aCollection.push({ "PropertyPath" : oProperty.name }); oAnnotation[sCollection] = aCollection; oEntitySet[sTerm] = oAnnotation; }, /** * Collects sap:semantics annotations of the given type's properties at the type. * * @param {object} oType * the entity type or the complex type for which sap:semantics needs to be added<|fim▁hole|> var aAnnotationParts, bIsCollection, aMatches, sSubStructure, vTmp, sV2Semantics = oProperty["sap:semantics"], sV4Annotation, sV4AnnotationPath, oV4Annotation, oV4TypeInfo, sV4TypeList; if (!sV2Semantics) { return; } if (sV2Semantics === "url") { oProperty["Org.OData.Core.V1.IsURL"] = oBoolTrue; return; } if (sV2Semantics in mDatePartSemantics2CommonTerm) { sV4Annotation = "com.sap.vocabularies.Common.v1." + mDatePartSemantics2CommonTerm[sV2Semantics]; oProperty[sV4Annotation] = oBoolTrue; return; } aMatches = rSemanticsWithTypes.exec(sV2Semantics); if (!aMatches) { if (jQuery.sap.log.isLoggable(iWARNING, sLoggingModule)) { jQuery.sap.log.warning("Unsupported sap:semantics: " + sV2Semantics, oType.name + "." + oProperty.name, sLoggingModule); } return; } if (aMatches[2]) { sV2Semantics = aMatches[1]; sV4TypeList = Utils.getV4TypesForV2Semantics(sV2Semantics, aMatches[2], oProperty, oType); } oV4TypeInfo = mV2SemanticsToV4TypeInfo[sV2Semantics]; bIsCollection = sV2Semantics === "tel" || sV2Semantics === "email"; sV4AnnotationPath = mSemanticsToV4AnnotationPath[sV2Semantics]; if (sV4AnnotationPath) { aAnnotationParts = sV4AnnotationPath.split("/"); sV4Annotation = "com.sap.vocabularies.Communication.v1." + aAnnotationParts[0]; oType[sV4Annotation] = oType[sV4Annotation] || {}; oV4Annotation = oType[sV4Annotation]; sSubStructure = aAnnotationParts[1]; if (sSubStructure) { oV4Annotation[sSubStructure] = oV4Annotation[sSubStructure] || (bIsCollection ? [] : {}); if (bIsCollection) { vTmp = {}; oV4Annotation[sSubStructure].push(vTmp); oV4Annotation = vTmp; } else { oV4Annotation = oV4Annotation[sSubStructure]; } } oV4Annotation[mV2ToV4Attribute[sV2Semantics] || sV2Semantics] = { "Path" : oProperty.name }; if (sV4TypeList) { // set also type attribute oV4Annotation.type = { "EnumMember" : sV4TypeList }; } } // Additional annotation at the property with sap:semantics "tel" or "email"; // ensure not to overwrite existing V4 annotations if (oV4TypeInfo) { oProperty[oV4TypeInfo.v4PropertyAnnotation] = oProperty[oV4TypeInfo.v4PropertyAnnotation] || oBoolTrue; } }); } }, /** * Adds unit annotations to properties that have a <code>sap:unit</code> OData V2 * annotation. * * Iterates over the given schemas and searches in their complex and entity types for * properties with a <code>sap:unit</code> OData V2 annotation. Creates a corresponding * OData V4 annotation <code>Org.OData.Measures.V1.Unit</code> or * <code>Org.OData.Measures.V1.ISOCurrency</code> based on the * <code>sap:semantics</code> V2 annotation of the referenced unit property, unless such an * annotation already exists. * * @param {object[]} aSchemas * the array of schemas * @param {sap.ui.model.odata.ODataMetaModel} oMetaModel * the OData meta model */ addUnitAnnotations : function (aSchemas, oMetaModel) { /** * Process all types in the given array. * @param {object[]} [aTypes] A list of complex types or entity types. */ function processTypes(aTypes) { (aTypes || []).forEach(function (oType) { (oType.property || []).forEach(function (oProperty) { var sAnnotationName, sSemantics, oTarget, oUnitPath, sUnitPath = oProperty["sap:unit"], oUnitProperty; if (sUnitPath) { oUnitPath = {"Path" : sUnitPath}; oTarget = _AnnotationHelperBasics.followPath({ getModel : function () { return oMetaModel; }, getPath : function () { return oType.$path; } }, oUnitPath); if (oTarget && oTarget.resolvedPath) { oUnitProperty = oMetaModel.getProperty(oTarget.resolvedPath); sSemantics = oUnitProperty["sap:semantics"]; if (sSemantics === "unit-of-measure") { sAnnotationName = "Org.OData.Measures.V1.Unit"; } else if (sSemantics === "currency-code") { sAnnotationName = "Org.OData.Measures.V1.ISOCurrency"; } else if (jQuery.sap.log.isLoggable(iWARNING, sLoggingModule)) { jQuery.sap.log.warning("Unsupported sap:semantics='" + sSemantics + "' at sap:unit='" + sUnitPath + "'; " + "expected 'currency-code' or 'unit-of-measure'", oType.namespace + "." + oType.name + "/" + oProperty.name, sLoggingModule); } // Do not overwrite an existing annotation if (sAnnotationName && !(sAnnotationName in oProperty)) { oProperty[sAnnotationName] = oUnitPath; } } else if (jQuery.sap.log.isLoggable(iWARNING, sLoggingModule)) { jQuery.sap.log.warning("Path '" + sUnitPath + "' for sap:unit cannot be resolved", oType.namespace + "." + oType.name + "/" + oProperty.name, sLoggingModule); } } }); }); } aSchemas.forEach(function (oSchema) { processTypes(oSchema.complexType); processTypes(oSchema.entityType); }); }, /** * Adds the corresponding V4 annotation to the given object based on the given SAP * extension. * * @param {object} o * any object * @param {object} oExtension * the SAP Annotation (OData Version 2.0) for which a V4 annotation needs to be added * @param {string} sTypeClass * the type class of the given object; supported type classes are "Property" and * "EntitySet" */ addV4Annotation : function (o, oExtension, sTypeClass) { switch (oExtension.name) { case "aggregation-role": if (oExtension.value === "dimension") { o["com.sap.vocabularies.Analytics.v1.Dimension"] = oBoolTrue; } else if (oExtension.value === "measure") { o["com.sap.vocabularies.Analytics.v1.Measure"] = oBoolTrue; } break; case "display-format": if (oExtension.value === "NonNegative") { o["com.sap.vocabularies.Common.v1.IsDigitSequence"] = oBoolTrue; } else if (oExtension.value === "UpperCase") { o["com.sap.vocabularies.Common.v1.IsUpperCase"] = oBoolTrue; } break; case "pageable": case "topable": Utils.addEntitySetAnnotation(o, oExtension, sTypeClass, "false", false); break; case "creatable": Utils.addEntitySetAnnotation(o, oExtension, sTypeClass, "false", true); break; case "deletable": case "deletable-path": Utils.handleXableAndXablePath(o, oExtension, sTypeClass, "Org.OData.Capabilities.V1.DeleteRestrictions", "Deletable"); break; case "updatable": case "updatable-path": Utils.handleXableAndXablePath(o, oExtension, sTypeClass, "Org.OData.Capabilities.V1.UpdateRestrictions", "Updatable"); break; case "requires-filter": Utils.addEntitySetAnnotation(o, oExtension, sTypeClass, "true", true); break; case "field-control": o["com.sap.vocabularies.Common.v1.FieldControl"] = { "Path" : oExtension.value }; break; case "heading": o["com.sap.vocabularies.Common.v1.Heading"] = { "String" : oExtension.value }; break; case "label": o["com.sap.vocabularies.Common.v1.Label"] = { "String" : oExtension.value }; break; case "precision": o["Org.OData.Measures.V1.Scale"] = { "Path" : oExtension.value }; break; case "quickinfo": o["com.sap.vocabularies.Common.v1.QuickInfo"] = { "String" : oExtension.value }; break; case "text": o["com.sap.vocabularies.Common.v1.Text"] = { "Path" : oExtension.value }; break; case "visible": if (oExtension.value === "false") { o["com.sap.vocabularies.Common.v1.FieldControl"] = { "EnumMember" : "com.sap.vocabularies.Common.v1.FieldControlType/Hidden" }; o["com.sap.vocabularies.UI.v1.Hidden"] = oBoolTrue; } break; default: // no transformation for V2 annotation supported or necessary } }, /** * Iterate over all properties of the associated entity type for given entity * set and check whether the property needs to be added to an annotation at the * entity set. * For example all properties with "sap:sortable=false" are collected in * annotation Org.OData.Capabilities.V1.SortRestrictions/NonSortableProperties. * * @param {object} oEntitySet * the entity set * @param {object} oEntityType * the corresponding entity type */ calculateEntitySetAnnotations : function (oEntitySet, oEntityType) { if (oEntityType.property) { oEntityType.property.forEach(function (oProperty) { if (oProperty["sap:filterable"] === "false") { Utils.addPropertyToAnnotation("sap:filterable", oEntitySet, oProperty); } if (oProperty["sap:required-in-filter"] === "true") { Utils.addPropertyToAnnotation("sap:required-in-filter", oEntitySet, oProperty); } if (oProperty["sap:sortable"] === "false") { Utils.addPropertyToAnnotation("sap:sortable", oEntitySet, oProperty); } if (oProperty["sap:filter-restriction"]) { Utils.addFilterRestriction(oProperty, oEntitySet); } }); } if (oEntityType.navigationProperty) { oEntityType.navigationProperty.forEach(function (oNavigationProperty) { if (oNavigationProperty["sap:filterable"] === "false") { Utils.addNavigationFilterRestriction(oNavigationProperty, oEntitySet); // keep deprecated conversion for compatibility reasons Utils.addPropertyToAnnotation("sap:filterable", oEntitySet, oNavigationProperty); } Utils.handleCreatableNavigationProperty(oEntitySet, oNavigationProperty); }); } }, /** * Returns the index of the object inside the given array, where the property with the * given name has the given expected value. * * @param {object[]} aArray * some array * @param {any} vExpectedPropertyValue * expected value of the property with given name * @param {string} [sPropertyName="name"] * some property name * @returns {number} * the index of the object found or <code>-1</code> if no such object is found */ findIndex : function (aArray, vExpectedPropertyValue, sPropertyName) { var iIndex = -1; sPropertyName = sPropertyName || "name"; if (aArray) { aArray.forEach(function (oObject, i) { if (oObject[sPropertyName] === vExpectedPropertyValue) { iIndex = i; return false; // break } }); } return iIndex; }, /** * Returns the object inside the given array, where the property with the given name has * the given expected value. * * @param {object[]} aArray * some array * @param {any} vExpectedPropertyValue * expected value of the property with given name * @param {string} [sPropertyName="name"] * some property name * @returns {object} * the object found or <code>null</code> if no such object is found */ findObject : function (aArray, vExpectedPropertyValue, sPropertyName) { var iIndex = Utils.findIndex(aArray, vExpectedPropertyValue, sPropertyName); return iIndex < 0 ? null : aArray[iIndex]; }, /** * Gets the map from child name to annotations for a parent with the given qualified * name which lives inside the entity container as indicated. * * @param {sap.ui.model.odata.ODataAnnotations} oAnnotations * the OData annotations * @param {string} sQualifiedName * the parent's qualified name * @param {boolean} bInContainer * whether the parent lives inside the entity container (or beside it) * @returns {object} * the map from child name to annotations */ getChildAnnotations : function (oAnnotations, sQualifiedName, bInContainer) { var o = bInContainer ? oAnnotations.EntityContainer : oAnnotations.propertyAnnotations; return o && o[sQualifiedName] || {}; }, /** * Returns the thing with the given simple name from the given entity container. * * @param {object} oEntityContainer * the entity container * @param {string} sArrayName * name of array within entity container which will be searched * @param {string} sName * a simple name, e.g. "Foo" * @param {boolean} [bAsPath=false] * determines whether the thing itself is returned or just its path * @returns {object|string} * (the path to) the thing with the given qualified name; <code>undefined</code> (for a * path) or <code>null</code> (for an object) if no such thing is found */ getFromContainer : function (oEntityContainer, sArrayName, sName, bAsPath) { var k, vResult = bAsPath ? undefined : null; if (oEntityContainer) { k = Utils.findIndex(oEntityContainer[sArrayName], sName); if (k >= 0) { vResult = bAsPath ? oEntityContainer.$path + "/" + sArrayName + "/" + k : oEntityContainer[sArrayName][k]; } } return vResult; }, /** * Returns the thing with the given qualified name from the given model's array (within a * schema) of given name. * * @param {sap.ui.model.Model|object[]} vModel * either a model or an array of schemas * @param {string} sArrayName * name of array within schema which will be searched * @param {string} sQualifiedName * a qualified name, e.g. "ACME.Foo" * @param {boolean} [bAsPath=false] * determines whether the thing itself is returned or just its path * @returns {object|string} * (the path to) the thing with the given qualified name; <code>undefined</code> (for a * path) or <code>null</code> (for an object) if no such thing is found */ getObject : function (vModel, sArrayName, sQualifiedName, bAsPath) { var aArray, vResult = bAsPath ? undefined : null, oSchema, iSeparatorPos, sNamespace, sName; sQualifiedName = sQualifiedName || ""; iSeparatorPos = sQualifiedName.lastIndexOf("."); sNamespace = sQualifiedName.slice(0, iSeparatorPos); sName = sQualifiedName.slice(iSeparatorPos + 1); oSchema = Utils.getSchema(vModel, sNamespace); if (oSchema) { aArray = oSchema[sArrayName]; if (aArray) { aArray.forEach(function (oThing) { if (oThing.name === sName) { vResult = bAsPath ? oThing.$path : oThing; return false; // break } }); } } return vResult; }, /** * Returns the schema with the given namespace. * * @param {sap.ui.model.Model|object[]} vModel * either a model or an array of schemas * @param {string} sNamespace * a namespace, e.g. "ACME" * @returns {object} * the schema with the given namespace; <code>null</code> if no such schema is found */ getSchema : function (vModel, sNamespace) { var oSchema = null, aSchemas = Array.isArray(vModel) ? vModel : vModel.getObject("/dataServices/schema"); if (aSchemas) { aSchemas.forEach(function (o) { if (o.namespace === sNamespace) { oSchema = o; return false; // break } }); } return oSchema; }, /** * Compute a space-separated list of V4 annotation enumeration values for the given * sap:semantics "tel" and "email". * E.g. for <code>sap:semantics="tel;type=fax"</code> this function returns * "com.sap.vocabularies.Communication.v1.PhoneType/fax". * * @param {string} sSemantics * the sap:semantivs value ("tel" or "email") * @param {string} sTypesList * the comma-separated list of types for sap:semantics * @param {object} oProperty * the property * @param {object} oType * the type * @returns {string} * the corresponding space-separated list of V4 annotation enumeration values; * returns an empty string if the sap:semantics value is not supported; unsupported types * are logged and skipped; */ getV4TypesForV2Semantics : function (sSemantics, sTypesList, oProperty, oType) { var aResult = [], oV4TypeInfo = mV2SemanticsToV4TypeInfo[sSemantics]; if (oV4TypeInfo) { sTypesList.split(",").forEach(function (sType) { var sTargetType = oV4TypeInfo.typeMapping[sType]; if (sTargetType) { aResult.push(oV4TypeInfo.v4EnumType + "/" + sTargetType); } else if (jQuery.sap.log.isLoggable(iWARNING, sLoggingModule)) { jQuery.sap.log.warning("Unsupported type for sap:semantics: " + sType, oType.name + "." + oProperty.name, sLoggingModule); } }); } return aResult.join(" "); }, /** * Returns the map representing the <code>com.sap.vocabularies.Common.v1.ValueList</code> * annotations of the given property. * * @param {object} oProperty the property * @returns {object} map of ValueList annotations contained in oProperty */ getValueLists : function (oProperty) { var aMatches, sName, sQualifier, mValueLists = {}; for (sName in oProperty) { aMatches = rValueList.exec(sName); if (aMatches){ sQualifier = (aMatches[1] || "").slice(1); // remove leading # mValueLists[sQualifier] = oProperty[sName]; } } return mValueLists; }, /** * Convert sap:creatable and sap:creatable-path at navigation property to V4 annotation * 'Org.OData.Capabilities.V1.InsertRestrictions/NonInsertableNavigationProperties' at * the given entity set. * If both V2 annotations 'sap:creatable' and 'sap:creatable-path' are given the service is * broken and the navigation property is added as non-insertable navigation property. * If neither 'sap:creatable' nor 'sap:creatable-path' are given this function does * nothing. * * @param {object} oEntitySet * The entity set * @param {object} oNavigationProperty * The navigation property */ handleCreatableNavigationProperty : function (oEntitySet, oNavigationProperty) { var sCreatable = oNavigationProperty["sap:creatable"], sCreatablePath = oNavigationProperty["sap:creatable-path"], oInsertRestrictions, oNonInsertable = {"NavigationPropertyPath" : oNavigationProperty.name}, aNonInsertableNavigationProperties; if (sCreatable && sCreatablePath) { // inconsistent service if both v2 annotations are set jQuery.sap.log.warning("Inconsistent service", "Use either 'sap:creatable' or 'sap:creatable-path' at navigation property " + "'" + oEntitySet.entityType + "/" + oNavigationProperty.name + "'", sLoggingModule); sCreatable = "false"; sCreatablePath = undefined; } if (sCreatable === "false" || sCreatablePath) { oInsertRestrictions = oEntitySet["Org.OData.Capabilities.V1.InsertRestrictions"] = oEntitySet["Org.OData.Capabilities.V1.InsertRestrictions"] || {}; aNonInsertableNavigationProperties = oInsertRestrictions["NonInsertableNavigationProperties"] = oInsertRestrictions["NonInsertableNavigationProperties"] || []; if (sCreatablePath) { oNonInsertable = { "If" : [{ "Not" : { "Path" : sCreatablePath } }, oNonInsertable] }; } aNonInsertableNavigationProperties.push(oNonInsertable); } }, /** * Converts deletable/updatable and delatable-path/updatable-path into corresponding V4 * annotation. * If both deletable/updatable and delatable-path/updatable-path are defined the service is * broken and the object is marked as non-deletable/non-updatable. * * @param {object} o * any object * @param {object} oExtension * the SAP Annotation (OData Version 2.0) for which a V4 annotation needs to be added * @param {string} sTypeClass * the type class of the given object; supported type is "EntitySet" * @param {string} sTerm * the V4 annotation term to use * @param {string} sProperty * the V4 annotation property to use */ handleXableAndXablePath : function (o, oExtension, sTypeClass, sTerm, sProperty) { var sV2Annotation = sProperty.toLowerCase(), oValue; if (sTypeClass !== "EntitySet") { return; // "Property" not supported here, see liftSAPData() } if (o["sap:" + sV2Annotation] && o["sap:" + sV2Annotation + "-path"]) { // the first extension (sap:xable or sap:xable-path) is processed as usual; // only if a second extension (sap:xable-path or sap:xable) is processed, // the warning is logged and the entity set is marked as non-deletable or // non-updatable jQuery.sap.log.warning("Inconsistent service", "Use either 'sap:" + sV2Annotation + "' or 'sap:" + sV2Annotation + "-path'" + " at entity set '" + o.name + "'", sLoggingModule); oValue = oBoolFalse; } else if (sV2Annotation !== oExtension.name) { // delatable-path/updatable-path oValue = { "Path" : oExtension.value }; } else if (oExtension.value === "false") { oValue = oBoolFalse; } if (oValue) { o[sTerm] = o[sTerm] || {}; o[sTerm][sProperty] = oValue; } }, /** * Lift all extensions from the <a href="http://www.sap.com/Protocols/SAPData"> SAP * Annotations for OData Version 2.0</a> namespace up as attributes with "sap:" prefix. * * @param {object} o * any object * @param {string} sTypeClass * the type class of the given object; supported type classes are "Property" and * "EntitySet" */ liftSAPData : function (o, sTypeClass) { if (!o.extensions) { return; } o.extensions.forEach(function (oExtension) { if (oExtension.namespace === "http://www.sap.com/Protocols/SAPData") { o["sap:" + oExtension.name] = oExtension.value; Utils.addV4Annotation(o, oExtension, sTypeClass); } }); // after all SAP V2 annotations are lifted up add V4 annotations that are calculated // by multiple V2 annotations or that have a different default value switch (sTypeClass) { case "Property": if (o["sap:updatable"] === "false") { if (o["sap:creatable"] === "false") { o["Org.OData.Core.V1.Computed"] = oBoolTrue; } else { o["Org.OData.Core.V1.Immutable"] = oBoolTrue; } } break; case "EntitySet": if (o["sap:searchable"] !== "true") { o["Org.OData.Capabilities.V1.SearchRestrictions"] = { "Searchable" : oBoolFalse }; } break; default: // nothing to do } }, /** * Merges the given annotation data into the given metadata and lifts SAPData extensions. * * @param {object} oAnnotations * annotations "JSON" * @param {object} oData * metadata "JSON" * @param {sap.ui.model.odata.ODataMetaModel} oMetaModel * the metamodel */ merge : function (oAnnotations, oData, oMetaModel) { var aSchemas = oData.dataServices.schema; if (!aSchemas) { return; } aSchemas.forEach(function (oSchema, i) { var sSchemaVersion; // remove datajs artefact for inline annotations in $metadata delete oSchema.annotations; Utils.liftSAPData(oSchema); oSchema.$path = "/dataServices/schema/" + i; sSchemaVersion = oSchema["sap:schema-version"]; if (sSchemaVersion) { oSchema["Org.Odata.Core.V1.SchemaVersion"] = { String : sSchemaVersion }; } jQuery.extend(oSchema, oAnnotations[oSchema.namespace]); Utils.visitParents(oSchema, oAnnotations, "association", function (oAssociation, mChildAnnotations) { Utils.visitChildren(oAssociation.end, mChildAnnotations); }); Utils.visitParents(oSchema, oAnnotations, "complexType", function (oComplexType, mChildAnnotations) { Utils.visitChildren(oComplexType.property, mChildAnnotations, "Property"); Utils.addSapSemantics(oComplexType); }); // visit all entity types before visiting the entity sets to ensure that V2 // annotations are already lifted up and can be used for calculating entity // set annotations which are based on V2 annotations on entity properties Utils.visitParents(oSchema, oAnnotations, "entityType", Utils.visitEntityType); Utils.visitParents(oSchema, oAnnotations, "entityContainer", function (oEntityContainer, mChildAnnotations) { Utils.visitChildren(oEntityContainer.associationSet, mChildAnnotations); Utils.visitChildren(oEntityContainer.entitySet, mChildAnnotations, "EntitySet", aSchemas); Utils.visitChildren(oEntityContainer.functionImport, mChildAnnotations, "", null, Utils.visitParameters.bind(this, oAnnotations, oSchema, oEntityContainer)); }); }); Utils.addUnitAnnotations(aSchemas, oMetaModel); }, /** * Visits all children inside the given array, lifts "SAPData" extensions and * inlines OData V4 annotations for each child. * * @param {object[]} aChildren * any array of children * @param {object} mChildAnnotations * map from child name (or role) to annotations * @param {string} [sTypeClass] * the type class of the given children; supported type classes are "Property" * and "EntitySet" * @param {object[]} [aSchemas] * Array of OData data service schemas (needed only for type class "EntitySet") * @param {function} [fnCallback] * optional callback for each child * @param {number} [iStartIndex=0] * optional start index in the given array */ visitChildren : function (aChildren, mChildAnnotations, sTypeClass, aSchemas, fnCallback, iStartIndex) { if (!aChildren) { return; } if (iStartIndex) { aChildren = aChildren.slice(iStartIndex); } aChildren.forEach(function (oChild) { // lift SAP data for easy access to SAP Annotations for OData V 2.0 Utils.liftSAPData(oChild, sTypeClass); }); aChildren.forEach(function (oChild) { var oEntityType; if (sTypeClass === "EntitySet") { // calculated entity set annotations need to be added before V4 // annotations are merged oEntityType = Utils.getObject(aSchemas, "entityType", oChild.entityType); Utils.calculateEntitySetAnnotations(oChild, oEntityType); } if (fnCallback) { fnCallback(oChild); } // merge V4 annotations after child annotations are processed jQuery.extend(oChild, mChildAnnotations[oChild.name || oChild.role]); }); }, /** * Visits the given entity type and its (structural or navigation) properties. * * @param {object} oEntityType * the entity type * @param {object} mChildAnnotations * map from child name (or role) to annotations */ visitEntityType : function (oEntityType, mChildAnnotations) { Utils.visitChildren(oEntityType.property, mChildAnnotations, "Property"); Utils.visitChildren(oEntityType.navigationProperty, mChildAnnotations); Utils.addSapSemantics(oEntityType); }, /** * Visits all parameters of the given function import. * * @param {object} oAnnotations * annotations "JSON" * @param {object} oSchema * OData data service schema * @param {object} oEntityContainer * the entity container * @param {object} oFunctionImport * a function import's V2 metadata object */ visitParameters : function (oAnnotations, oSchema, oEntityContainer, oFunctionImport) { var mAnnotations; if (!oFunctionImport.parameter) { return; } mAnnotations = Utils.getChildAnnotations(oAnnotations, oSchema.namespace + "." + oEntityContainer.name, true); oFunctionImport.parameter.forEach( function (oParam) { Utils.liftSAPData(oParam); jQuery.extend(oParam, mAnnotations[oFunctionImport.name + "/" + oParam.name]); } ); }, /** * Visits all parents (or a single parent) inside the current schema's array of given name, * lifts "SAPData" extensions, inlines OData V4 annotations, and adds <code>$path</code> * for each parent. * * @param {object} oSchema * OData data service schema * @param {object} oAnnotations * annotations "JSON" * @param {string} sArrayName * name of array of parents * @param {function} fnCallback * mandatory callback for each parent, child annotations are passed in * @param {number} [iIndex] * optional index of a single parent to visit; default is to visit all */ visitParents : function (oSchema, oAnnotations, sArrayName, fnCallback, iIndex) { var aParents = oSchema[sArrayName]; function visitParent(oParent, j) { var sQualifiedName = oSchema.namespace + "." + oParent.name, mChildAnnotations = Utils.getChildAnnotations(oAnnotations, sQualifiedName, sArrayName === "entityContainer"); Utils.liftSAPData(oParent); // @see sap.ui.model.odata.ODataMetadata#_getEntityTypeByName oParent.namespace = oSchema.namespace; oParent.$path = oSchema.$path + "/" + sArrayName + "/" + j; fnCallback(oParent, mChildAnnotations); // merge V4 annotations after child annotations are processed jQuery.extend(oParent, oAnnotations[sQualifiedName]); } if (!aParents) { return; } if (iIndex !== undefined) { visitParent(aParents[iIndex], iIndex); } else { aParents.forEach(visitParent); } } }; return Utils; }, /* bExport= */ false);<|fim▁end|>
*/ addSapSemantics : function (oType) { if (oType.property) { oType.property.forEach(function (oProperty) {
<|file_name|>package.py<|end_file_name|><|fim▁begin|>############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class A(AutotoolsPackage): """Simple package with one optional dependency""" homepage = "http://www.example.com" url = "http://www.example.com/a-1.0.tar.gz" version('1.0', '0123456789abcdef0123456789abcdef') version('2.0', '2.0_a_hash') variant( 'foo', values=('bar', 'baz', 'fee'), default='bar', description='', multi=True ) variant( 'foobar', values=('bar', 'baz', 'fee'), default='bar', description='', multi=False ) variant('bvv', default=True, description='The good old BV variant') depends_on('b', when='foobar=bar') def with_or_without_fee(self, activated): if not activated: return '--no-fee' return '--fee-all-the-time' def autoreconf(self, spec, prefix): pass<|fim▁hole|> def build(self, spec, prefix): pass def install(self, spec, prefix): pass<|fim▁end|>
def configure(self, spec, prefix): pass
<|file_name|>sync_sectors.py<|end_file_name|><|fim▁begin|>import logging logger = logging.getLogger(__name__) class SyncSectors: def __init__(self, sector_model, sectors, disable_on=None, simulate=False): self.sector_model = sector_model self.sectors = sectors self.disable_on = disable_on self.simulate = simulate def log(self, msg, level=logging.DEBUG): logger.log(level, msg) def __call__(self, *args, **kwargs): self.process() def process(self): self.add_new_sectors() self.update_existing_sectors() if self.disable_on: self.disable_sectors() def _get_sector(self, sector_id): try: return self.sector_model.objects.get(id=sector_id) except self.sector_model.DoesNotExist: return def _update_sector_name(self, sector, sector_name): if sector.name != sector_name: self.log(f'Updating Sector {sector.id}: [{sector.name} to {sector_name}]') if self.simulate: return sector.name = sector_name sector.save() def _create_sector(self, sector_id, sector_name): self.log(f'Creating Sector {sector_id}: [{sector_name}]') if self.simulate: return self.sector_model.objects.create(id=sector_id, name=sector_name) def _disable_sector(self, sector): self.log(f'Disabling Sector {sector.id}: [{sector.name}]') if self.simulate:<|fim▁hole|> def add_new_sectors(self): for sector_id, sector_name in self.sectors: sector = self._get_sector(sector_id) if not sector: self._create_sector(sector_id, sector_name) def update_existing_sectors(self): for sector_id, sector_name in self.sectors: sector = self._get_sector(sector_id) if not sector: self.log(f'Sector {sector_id}: DOES NOT EXIST [{sector_name}]') else: self._update_sector_name(sector, sector_name) def disable_sectors(self): sector_ids = list(dict(self.sectors).keys()) deprecated_sectors = self.sector_model.objects.exclude(id__in=sector_ids).filter( disabled_on__isnull=True ) for sector in deprecated_sectors: self._disable_sector(sector)<|fim▁end|>
return sector.disabled_on = self.disable_on sector.save()
<|file_name|>imagevectorsource.js<|end_file_name|><|fim▁begin|>goog.provide('ol.source.ImageVector'); goog.require('goog.asserts'); goog.require('goog.events'); goog.require('goog.events.EventType'); goog.require('goog.vec.Mat4'); goog.require('ol.dom'); goog.require('ol.extent'); goog.require('ol.render.canvas.ReplayGroup'); goog.require('ol.renderer.vector'); goog.require('ol.source.ImageCanvas'); goog.require('ol.source.Vector'); goog.require('ol.style.Style'); goog.require('ol.vec.Mat4'); /** * @classdesc * An image source whose images are canvas elements into which vector features * read from a vector source (`ol.source.Vector`) are drawn. An * `ol.source.ImageVector` object is to be used as the `source` of an image * layer (`ol.layer.Image`). Image layers are rotated, scaled, and translated, * as opposed to being re-rendered, during animations and interactions. So, like * any other image layer, an image layer configured with an * `ol.source.ImageVector` will exhibit this behaviour. This is in contrast to a * vector layer, where vector features are re-drawn during animations and * interactions. * * @constructor * @extends {ol.source.ImageCanvas} * @param {olx.source.ImageVectorOptions} options Options. * @api */ ol.source.ImageVector = function(options) { /** * @private * @type {ol.source.Vector} */ this.source_ = options.source; /** * @private * @type {!goog.vec.Mat4.Number} */ this.transform_ = goog.vec.Mat4.createNumber(); /** * @private * @type {CanvasRenderingContext2D} */ this.canvasContext_ = ol.dom.createCanvasContext2D(); /** * @private * @type {ol.Size} */ this.canvasSize_ = [0, 0]; /** * @private * @type {ol.render.canvas.ReplayGroup} */ this.replayGroup_ = null; goog.base(this, { attributions: options.attributions, canvasFunction: goog.bind(this.canvasFunctionInternal_, this), logo: options.logo, projection: options.projection, ratio: options.ratio, resolutions: options.resolutions, state: this.source_.getState() }); /** * User provided style. * @type {ol.style.Style|Array.<ol.style.Style>|ol.style.StyleFunction} * @private */ this.style_ = null; /** * Style function for use within the library. * @type {ol.style.StyleFunction|undefined} * @private */ this.styleFunction_ = undefined; this.setStyle(options.style); goog.events.listen(this.source_, goog.events.EventType.CHANGE, this.handleSourceChange_, undefined, this); }; goog.inherits(ol.source.ImageVector, ol.source.ImageCanvas); /** * @param {ol.Extent} extent Extent. * @param {number} resolution Resolution. * @param {number} pixelRatio Pixel ratio. * @param {ol.Size} size Size. * @param {ol.proj.Projection} projection Projection; * @return {HTMLCanvasElement} Canvas element. * @private */ ol.source.ImageVector.prototype.canvasFunctionInternal_ = function(extent, resolution, pixelRatio, size, projection) { var replayGroup = new ol.render.canvas.ReplayGroup( ol.renderer.vector.getTolerance(resolution, pixelRatio), extent, resolution); this.source_.loadFeatures(extent, resolution, projection); var loading = false; this.source_.forEachFeatureInExtentAtResolution(extent, resolution, /** * @param {ol.Feature} feature Feature. */ function(feature) { loading = loading || this.renderFeature_(feature, resolution, pixelRatio, replayGroup); }, this); replayGroup.finish(); if (loading) { return null; } if (this.canvasSize_[0] != size[0] || this.canvasSize_[1] != size[1]) { this.canvasContext_.canvas.width = size[0]; this.canvasContext_.canvas.height = size[1]; this.canvasSize_[0] = size[0]; this.canvasSize_[1] = size[1]; } else { this.canvasContext_.clearRect(0, 0, size[0], size[1]); } var transform = this.getTransform_(ol.extent.getCenter(extent), resolution, pixelRatio, size); replayGroup.replay(this.canvasContext_, pixelRatio, transform, 0, {}); this.replayGroup_ = replayGroup; return this.canvasContext_.canvas; }; /** * @inheritDoc */ ol.source.ImageVector.prototype.forEachFeatureAtCoordinate = function( coordinate, resolution, rotation, skippedFeatureUids, callback) { if (goog.isNull(this.replayGroup_)) {<|fim▁hole|> var features = {}; return this.replayGroup_.forEachFeatureAtCoordinate( coordinate, resolution, 0, skippedFeatureUids, /** * @param {ol.Feature} feature Feature. * @return {?} Callback result. */ function(feature) { goog.asserts.assert(goog.isDef(feature)); var key = goog.getUid(feature).toString(); if (!(key in features)) { features[key] = true; return callback(feature); } }); } }; /** * Get a reference to the wrapped source. * @return {ol.source.Vector} Source. * @api */ ol.source.ImageVector.prototype.getSource = function() { return this.source_; }; /** * Get the style for features. This returns whatever was passed to the `style` * option at construction or to the `setStyle` method. * @return {ol.style.Style|Array.<ol.style.Style>|ol.style.StyleFunction} * Layer style. * @api stable */ ol.source.ImageVector.prototype.getStyle = function() { return this.style_; }; /** * Get the style function. * @return {ol.style.StyleFunction|undefined} Layer style function. * @api stable */ ol.source.ImageVector.prototype.getStyleFunction = function() { return this.styleFunction_; }; /** * @param {ol.Coordinate} center Center. * @param {number} resolution Resolution. * @param {number} pixelRatio Pixel ratio. * @param {ol.Size} size Size. * @return {!goog.vec.Mat4.Number} Transform. * @private */ ol.source.ImageVector.prototype.getTransform_ = function(center, resolution, pixelRatio, size) { return ol.vec.Mat4.makeTransform2D(this.transform_, size[0] / 2, size[1] / 2, pixelRatio / resolution, -pixelRatio / resolution, 0, -center[0], -center[1]); }; /** * Handle changes in image style state. * @param {goog.events.Event} event Image style change event. * @private */ ol.source.ImageVector.prototype.handleImageChange_ = function(event) { this.changed(); }; /** * @private */ ol.source.ImageVector.prototype.handleSourceChange_ = function() { // setState will trigger a CHANGE event, so we always rely // change events by calling setState. this.setState(this.source_.getState()); }; /** * @param {ol.Feature} feature Feature. * @param {number} resolution Resolution. * @param {number} pixelRatio Pixel ratio. * @param {ol.render.canvas.ReplayGroup} replayGroup Replay group. * @return {boolean} `true` if an image is loading. * @private */ ol.source.ImageVector.prototype.renderFeature_ = function(feature, resolution, pixelRatio, replayGroup) { var styles; if (goog.isDef(feature.getStyleFunction())) { styles = feature.getStyleFunction().call(feature, resolution); } else if (goog.isDef(this.styleFunction_)) { styles = this.styleFunction_(feature, resolution); } if (!goog.isDefAndNotNull(styles)) { return false; } var i, ii, loading = false; for (i = 0, ii = styles.length; i < ii; ++i) { loading = ol.renderer.vector.renderFeature( replayGroup, feature, styles[i], ol.renderer.vector.getSquaredTolerance(resolution, pixelRatio), this.handleImageChange_, this) || loading; } return loading; }; /** * Set the style for features. This can be a single style object, an array * of styles, or a function that takes a feature and resolution and returns * an array of styles. If it is `undefined` the default style is used. If * it is `null` the layer has no style (a `null` style), so only features * that have their own styles will be rendered in the layer. See * {@link ol.style} for information on the default style. * @param {ol.style.Style|Array.<ol.style.Style>|ol.style.StyleFunction|undefined} * style Layer style. * @api stable */ ol.source.ImageVector.prototype.setStyle = function(style) { this.style_ = goog.isDef(style) ? style : ol.style.defaultStyleFunction; this.styleFunction_ = goog.isNull(style) ? undefined : ol.style.createStyleFunction(this.style_); this.changed(); };<|fim▁end|>
return undefined; } else { /** @type {Object.<string, boolean>} */
<|file_name|>ast.py<|end_file_name|><|fim▁begin|>""" The classes `Token` and `Nonterm` can be subclassed and enriched with docstrings indicating the intended grammar, and will then be used in the parsing as part of the abstract syntax tree that is constructed in the process. """ from __future__ import annotations class Symbol: pass class Nonterm(Symbol): """ Non-terminal symbols have sets of productions associated with them. The productions induce a parse forest on an input token stream. There is one special non-terminal, which is denoted via the %start directive, whereas all other non-terminals are denoted via the %nonterm directive. In addition to productions (%reduce directives associated with class methods), the merge() method may be called during resolution of ambiguous parses. See the merge() documentation for further details. Following are examples of how to specify non-terminal classes and their associated productions: class E(Parsing.Nonterm): "%start E" def __init__(self): Parsing.Nonterm.__init__(self) # ... # Productions. def reduceA(self, E, plus, T): "%reduce E plus T [split]" print "%r ::= %r %r %r." % (self, E, plus, T) def reduceB(self, T): "%reduce T" class T(Parsing.Nonterm): "%nonterm" # Name implicitly same as class name. def reduceA(self, T, star, F): "%reduce T star F" def reduceB(self, F): "%reduce F [p1]" class F(Parsing.Nonterm): "%nonterm F [p2]" def reduceA(self, lparen, E, rparen): "%reduce lparen E rparen" def reduceB(self, id): "%reduce id" """ def merge(self, other: Nonterm) -> Nonterm: """ Merging happens when there is an ambiguity in the input that allows non-terminals to be part of multiple overlapping series of reductions. If no merge() method is specified, the parser will throw a syntax error upon encountering an ambiguity that confounds reduction processing. However, it may be useful to either discard one of the possible parses, or to explicitly record the ambiguity in the data structures being created during parsing. In both of these cases, the non-terminal-specific merge() is the place to do the work; merge() returns an object that is stored by the parser onto the parse stack. In the case where merge() discards one of the possible parses, it need only return the parse that is to be preserved (self or other). If multiple merges are necessary, they cause a series of merge() calls. The first alternative (self) may be the result of a previous merge() call, whereas other will not have not been merged yet (unless as the result of merging further down in the parse forest). The alternative that is discarded is never touched by the parser again, so if any immediate cleanup is necessary, it should be done in merge(). """ raise SyntaxError( "No merge() for %r; merging %r <--> %r" % (type(self), self, other) ) class Token(Symbol): """ Tokens are terminal symbols. The parser is fed Token instances, which is what drives parsing. Typically, the user will define a class that subclasses Parsing.Token and implement parser-specific machinery there, then derive all actual token types from that class. class Token(Parsing.Token): def __init__(self, parser): Parsing.Token.__init__(self, parser) # ... <|fim▁hole|> "%token star [p2]" # Name implicitly same as class name. class lparen(Token): "%token [split]" class rparen(Token): "%token [none]" # [none] not necessary, since it's the default. class id(Token): "%token" """ class Precedence: """ Precedences can be associated with tokens, non-terminals, and productions. Precedence isn't as important for GLR parsers as for LR parsers, since GLR parsing allows for parse-time resolution of ambiguity. Still, precedence can be useful for reducing the volume of ambiguities that must be dealt with at run-time. There are five precedence types: %fail, %nonassoc, %left, %right, and %split. Each precedence can have relationships with other precedences: <, >, or =. These relationships specify a directed acyclic graph (DAG), which is used to compute the transitive closures of relationships among precedences. If no path exists between two precedences that are compared during conflict resolution, parser generation fails. < and > are reflexive; it does not matter which is used. Conceptually, the = relationship causes precedences to share a node in the DAG. During conflict resolution, an error results if no path exists in the DAG between the precedences under consideration. When such a path exists, the highest precedence non-terminal or production takes precedence. Associativity only comes into play for shift/reduce conflicts, where the terminal and the production have equivalent precedences (= relationship). In this case, the non-terminal's associativity determines how the conflict is resolved. The %fail and %split associativities are special because they can be mixed with other associativities. During conflict resolution, if another action has non-%fail associativity, then the %fail (lack of) associativity is overridden. Similarly, %split associativity overrides any other associativity. In contrast, any mixture of associativity between %nonassoc/%left/%right causes an unresolvable conflict. %fail : Any conflict is a parser-generation-time error. A pre-defined precedence, [none], is provided. It has %fail associativity, and has no pre-defined precedence relationships. %nonassoc : Resolve shift/reduce conflicts by removing both possibilities, thus making conflicts a parse-time error. %left : Resolve shift/reduce conflicts by reducing. %right : Resolve shift/reduce conflicts by shifting. %split : Do not resolve conflicts; the GLR algorithm will split the parse stack when necessary. A pre-defined precedence, [split], is provided. It has %split associativity, and has no pre-defined precedence relationships. By default, all symbols have [none] precedence. Each production inherits the precedence of its left-hand-side nonterminal's precedence unless a precedence is manually specified for the production. Following are some examples of how to specify precedence classes: class P1(Parsing.Precedence): "%split p1" class p2(Parsing.Precedence): "%left" # Name implicitly same as class name. class P3(Parsing.Precedence): "%left p3 >p2" # No whitespace is allowed between > and p2. class P4(Parsing.Precedence): "%left p4 =p3" # No whitespace is allowed between = and p3. """<|fim▁end|>
class Plus(Token): "%token plus [p1]" class star(Token):
<|file_name|>projecttags.py<|end_file_name|><|fim▁begin|>from django import template from django.conf import settings from django.template.defaultfilters import stringfilter import os register = template.Library() @register.filter(name='basename') @stringfilter def basename(value): return os.path.basename(value) @register.filter(name='replace_macros') @stringfilter def replace_macros(value, user_dict): return value.replace("#FIRSTNAME#", user_dict['first_name'].strip()) \ .replace("#LASTNAME#", user_dict['last_name'].strip()) @register.filter(name='state_label_css') def state_label_css(subm): green_label = "badge label label-success" red_label = "badge label label-important" grey_label = "badge label label-info" # We expect a submission as input<|fim▁hole|> return green_label else: return red_label if subm.state in [subm.SUBMITTED_TESTED, subm.SUBMITTED, subm.TEST_FULL_PENDING, subm.GRADED, subm.TEST_FULL_FAILED]: return green_label if subm.state == subm.TEST_VALIDITY_FAILED: return red_label return grey_label @register.assignment_tag def setting(name): return getattr(settings, name, "") @register.inclusion_tag('inclusion_tags/details_table.html') def details_table(submission): return {'submission': submission} @register.inclusion_tag('inclusion_tags/deadline.html') def deadline_timeout(assignment): return {'assignment': assignment, 'show_timeout': True} @register.inclusion_tag('inclusion_tags/deadline.html') def deadline(assignment): return {'assignment': assignment, 'show_timeout': False} @register.inclusion_tag('inclusion_tags/grading.html') def grading(submission): return {'submission': submission}<|fim▁end|>
if subm.is_closed() and subm.grading: if subm.grading.means_passed:
<|file_name|>hostList.js<|end_file_name|><|fim▁begin|>Template.HostList.events({ }); Template.HostList.helpers({ // Get list of Hosts sorted by the sort field. hosts: function () { return Hosts.find({}, {sort: {sort: 1}}); } }); Template.HostList.rendered = function () { // Make rows sortable/draggable using Jquery-UI.<|fim▁hole|> target = ui.item.get(0); before = ui.item.prev().get(0); after = ui.item.next().get(0); // Change the sort value dependnig on target location. // If target is now first, subtract 1 from sort value. if (!before) { newSort = Blaze.getData(after).sort - 1; // If target is now last, add 1 to sort value. } else if (!after) { newSort = Blaze.getData(before).sort + 1; // Get value of prev and next elements // to determine new target sort value. } else { newSort = (Blaze.getData(after).sort + Blaze.getData(before).sort) / 2; } // Update the database with new sort value. Hosts.update({_id: Blaze.getData(target)._id}, { $set: { sort: newSort } }); } }); };<|fim▁end|>
this.$('#sortable').sortable({ stop: function (event, ui) { // Define target row items.
<|file_name|>types.py<|end_file_name|><|fim▁begin|>def _types_gen(T): yield T if hasattr(T, 't'): for l in T.t: yield l if hasattr(l, 't'): for ll in _types_gen(l): yield ll class Type(type):<|fim▁hole|> assert type(u'') == String assert type('') == String assert type('') == Any assert Any.kind('') == String assert Any.decode('str') == String assert Any.kind({}) == Object """ def __init__(self, *args, **kwargs): type.__init__(self, *args, **kwargs) def __eq__(self, other): for T in _types_gen(self): if isinstance(other, Type): if T in other.t: return True if type.__eq__(T, other): return True return False def __str__(self): return getattr(self, '_name', 'unknown') def N(self, n): self._name = n return self def I(self, *args): self.t = list(args) return self def kind(self, t): if type(t) is Type: return t ty = lambda t: type(t) if type(t) is type: ty = lambda t: t return reduce( lambda L, R: R if (hasattr(R, 't') and ty(t) == R) else L, filter(lambda T: T is not Any, _types_gen(self))) def decode(self, n): return reduce( lambda L, R: R if (str(R) == n) else L, _types_gen(self)) # JSON primatives and data types Object = Type('Object', (object,), {}).I(dict).N('obj') Number = Type('Number', (object,), {}).I(int, long).N('num') Boolean = Type('Boolean', (object,), {}).I(bool).N('bit') String = Type('String', (object,), {}).I(str, unicode).N('str') Array = Type('Array', (object,), {}).I(list, set, tuple).N('arr') Nil = Type('Nil', (object,), {}).I(type(None)).N('nil') Any = Type('Any', (object,), {}).I( Object, Number, Boolean, String, Array, Nil).N('any')<|fim▁end|>
""" A rudimentary extension to `type` that provides polymorphic types for run-time type checking of JSON data types. IE:
<|file_name|>main_test.go<|end_file_name|><|fim▁begin|>package cryptostore import ( "fmt" "log" "os" "path/filepath" "testing" ) func init() { log.SetFlags(0) // buf := &bytes.Buffer{} // log.SetOutput(buf) } func TestEncryptAndDecrypt(t *testing.T) { secret := []byte("cei6je9aig2ahzi8eiyau2oP8feeKie7") crypter := newCrypter(secret) text := "this is secret" encrypted, e := crypter.Encrypt([]byte(text)) if e != nil { t.Errorf("error encrypting %v", e) } if string(encrypted) == text { t.Error("encrypted should not equal text") } var v, ex interface{} v = fmt.Sprintf("%T", encrypted) ex = "[]uint8" if ex != v { t.Errorf("expected encrypted to be %#v, was %#v", ex, v) } decrypted, e := crypter.Decrypt(encrypted) if e != nil { t.Errorf("error decrypting: %v", e) } v = string(decrypted) ex = text if ex != v { t.Errorf("expected decrypted to be %#v, was %#v", ex, v) } } func TestEncryptKeyNotLongEnough(t *testing.T) { crypter := newCrypter([]byte("test")) _, e := crypter.Cipher() if e == nil { t.Errorf("error should not be nil") } var v, ex interface{} v = e.Error() ex = "crypto/aes: invalid key size 4" if ex != v { t.Errorf("expected error to be %#v, was %#v", ex, v) } } const ( TestStorePath = "./tmp/store" userSecret = "sososecret123456" ) var blob = []byte("this is a test") func setup() (*Store, error) { storePath, e := filepath.Abs(TestStorePath) if e != nil { return nil, e } os.RemoveAll(storePath)<|fim▁hole|> func createUser(store *Store) error { _, e := store.createUserWithBits("user1", userSecret, 1024) return e } func TestStoreCreateUser(t *testing.T) { store, err := setup() if err != nil { t.Fatal("error setting up", err) } users, e := store.Users() if e != nil { t.Errorf("error iterating users: %v", e) } if len(users) != 0 { t.Errorf("expected to get 0 users, got %v", len(users)) } e = createUser(store) if e != nil { t.Errorf("error creating user with bits %v", e) } se := []string{ "./tmp/store/users/user1", "./tmp/store/users/user1/id_rsa.pub", "./tmp/store/users/user1/id_rsa", } for _, s := range se { _, err := os.Stat(s) if err != nil { t.Errorf("expected %v to exist but got error %v", s, err) } } users, e = store.Users() if e != nil { t.Errorf("error iterating users %v", err) } if len(users) != 1 { t.Errorf("expected to find 1 user, found %d", len(users)) } if users[0].Login != "user1" { t.Errorf("expected first login to be %v. was %v", "user1", users[0].Login) } } func TestPutAndGetBlob(t *testing.T) { store, err := setup() if err != nil { t.Fatal("error setting up", err) } err = createUser(store) if err != nil { t.Fatal("error creating user", err) } err = store.Put("first", blob, "user1") if err != nil { t.Fatal("error putting blob:", err) } b, err := store.Get("first", "user1", userSecret) if err != nil { t.Errorf("error getting from store: %v", err) } var v, ex interface{} v = string(b) ex = "this is a test" if ex != v { t.Errorf("expected value of blob to be %#v, was %#v", ex, v) } }<|fim▁end|>
return NewStore(storePath), nil }
<|file_name|>status_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Canonical Ltd. // Licensed under the AGPLv3, see LICENCE file for details. package v4_test import ( "encoding/json" "net/http" "time" jc "github.com/juju/testing/checkers" "github.com/juju/testing/httptesting" "github.com/juju/utils/debugstatus" gc "gopkg.in/check.v1" "gopkg.in/juju/charm.v5" "gopkg.in/juju/charmstore.v4/internal/mongodoc" "gopkg.in/juju/charmstore.v4/internal/router" "gopkg.in/juju/charmstore.v4/params" ) var zeroTimeStr = time.Time{}.Format(time.RFC3339) func (s *APISuite) TestStatus(c *gc.C) { for _, id := range []*router.ResolvedURL{ newResolvedURL("cs:~charmers/precise/wordpress-2", 2), newResolvedURL("cs:~charmers/precise/wordpress-3", 3), newResolvedURL("cs:~foo/precise/arble-9", -1), newResolvedURL("cs:~bar/utopic/arble-10", -1), newResolvedURL("cs:~charmers/bundle/oflaughs-3", 3), newResolvedURL("cs:~bar/bundle/oflaughs-4", -1), } { if id.URL.Series == "bundle" { s.addPublicBundle(c, "wordpress-simple", id) } else { s.addPublicCharm(c, "wordpress", id) } } now := time.Now() s.PatchValue(&debugstatus.StartTime, now) start := now.Add(-2 * time.Hour) s.addLog(c, &mongodoc.Log{ Data: []byte(`"ingestion started"`), Level: mongodoc.InfoLevel, Type: mongodoc.IngestionType, Time: start, }) end := now.Add(-1 * time.Hour) s.addLog(c, &mongodoc.Log{ Data: []byte(`"ingestion completed"`), Level: mongodoc.InfoLevel, Type: mongodoc.IngestionType, Time: end, }) statisticsStart := now.Add(-1*time.Hour - 30*time.Minute) s.addLog(c, &mongodoc.Log{ Data: []byte(`"legacy statistics import started"`), Level: mongodoc.InfoLevel, Type: mongodoc.LegacyStatisticsType, Time: statisticsStart, }) statisticsEnd := now.Add(-30 * time.Minute) s.addLog(c, &mongodoc.Log{ Data: []byte(`"legacy statistics import completed"`), Level: mongodoc.InfoLevel, Type: mongodoc.LegacyStatisticsType, Time: statisticsEnd, }) s.AssertDebugStatus(c, true, map[string]params.DebugStatus{ "mongo_connected": { Name: "MongoDB is connected", Value: "Connected", Passed: true, }, "mongo_collections": { Name: "MongoDB collections", Value: "All required collections exist", Passed: true, }, "elasticsearch": { Name: "Elastic search is running", Value: "Elastic search is not configured", Passed: true, }, "entities": { Name: "Entities in charm store", Value: "4 charms; 2 bundles; 3 promulgated", Passed: true, }, "base_entities": { Name: "Base entities in charm store", Value: "count: 5", Passed: true, }, "server_started": { Name: "Server started", Value: now.String(), Passed: true, }, "ingestion": { Name: "Ingestion", Value: "started: " + start.Format(time.RFC3339) + ", completed: " + end.Format(time.RFC3339), Passed: true, }, "legacy_statistics": { Name: "Legacy Statistics Load", Value: "started: " + statisticsStart.Format(time.RFC3339) + ", completed: " + statisticsEnd.Format(time.RFC3339), Passed: true, }, }) } func (s *APISuite) TestStatusWithoutCorrectCollections(c *gc.C) { s.store.DB.Entities().DropCollection() s.AssertDebugStatus(c, false, map[string]params.DebugStatus{ "mongo_collections": { Name: "MongoDB collections", Value: "Missing collections: [" + s.store.DB.Entities().Name + "]", Passed: false, }, }) } func (s *APISuite) TestStatusWithoutIngestion(c *gc.C) { s.AssertDebugStatus(c, false, map[string]params.DebugStatus{ "ingestion": { Name: "Ingestion", Value: "started: " + zeroTimeStr + ", completed: " + zeroTimeStr, Passed: false, }, }) } func (s *APISuite) TestStatusIngestionStarted(c *gc.C) { now := time.Now() start := now.Add(-1 * time.Hour) s.addLog(c, &mongodoc.Log{ Data: []byte(`"ingestion started"`), Level: mongodoc.InfoLevel, Type: mongodoc.IngestionType, Time: start, }) s.AssertDebugStatus(c, false, map[string]params.DebugStatus{ "ingestion": { Name: "Ingestion", Value: "started: " + start.Format(time.RFC3339) + ", completed: " + zeroTimeStr, Passed: false, }, }) } func (s *APISuite) TestStatusWithoutLegacyStatistics(c *gc.C) { s.AssertDebugStatus(c, false, map[string]params.DebugStatus{ "legacy_statistics": { Name: "Legacy Statistics Load", Value: "started: " + zeroTimeStr + ", completed: " + zeroTimeStr, Passed: false, }, }) } func (s *APISuite) TestStatusLegacyStatisticsStarted(c *gc.C) { now := time.Now() statisticsStart := now.Add(-1*time.Hour - 30*time.Minute) s.addLog(c, &mongodoc.Log{ Data: []byte(`"legacy statistics import started"`), Level: mongodoc.InfoLevel, Type: mongodoc.LegacyStatisticsType, Time: statisticsStart, }) s.AssertDebugStatus(c, false, map[string]params.DebugStatus{ "legacy_statistics": { Name: "Legacy Statistics Load", Value: "started: " + statisticsStart.Format(time.RFC3339) + ", completed: " + zeroTimeStr, Passed: false, }, }) } func (s *APISuite) TestStatusLegacyStatisticsMultipleLogs(c *gc.C) { now := time.Now() statisticsStart := now.Add(-1*time.Hour - 30*time.Minute) s.addLog(c, &mongodoc.Log{ Data: []byte(`"legacy statistics import started"`), Level: mongodoc.InfoLevel, Type: mongodoc.LegacyStatisticsType, Time: statisticsStart.Add(-1 * time.Hour), }) s.addLog(c, &mongodoc.Log{ Data: []byte(`"legacy statistics import started"`), Level: mongodoc.InfoLevel, Type: mongodoc.LegacyStatisticsType, Time: statisticsStart, }) statisticsEnd := now.Add(-30 * time.Minute) s.addLog(c, &mongodoc.Log{ Data: []byte(`"legacy statistics import completed"`), Level: mongodoc.InfoLevel, Type: mongodoc.LegacyStatisticsType, Time: statisticsEnd.Add(-1 * time.Hour), }) s.addLog(c, &mongodoc.Log{ Data: []byte(`"legacy statistics import completed"`), Level: mongodoc.InfoLevel, Type: mongodoc.LegacyStatisticsType, Time: statisticsEnd, }) s.AssertDebugStatus(c, false, map[string]params.DebugStatus{ "legacy_statistics": { Name: "Legacy Statistics Load", Value: "started: " + statisticsStart.Format(time.RFC3339) + ", completed: " + statisticsEnd.Format(time.RFC3339), Passed: true, }, }) } func (s *APISuite) TestStatusBaseEntitiesError(c *gc.C) { // Add a base entity without any corresponding entities. entity := &mongodoc.BaseEntity{ URL: charm.MustParseReference("django"), Name: "django", } err := s.store.DB.BaseEntities().Insert(entity) c.Assert(err, gc.IsNil) s.AssertDebugStatus(c, false, map[string]params.DebugStatus{ "base_entities": { Name: "Base entities in charm store",<|fim▁hole|>} // AssertDebugStatus asserts that the current /debug/status endpoint // matches the given status, ignoring status duration. // If complete is true, it fails if the results contain // keys not mentioned in status. func (s *APISuite) AssertDebugStatus(c *gc.C, complete bool, status map[string]params.DebugStatus) { rec := httptesting.DoRequest(c, httptesting.DoRequestParams{ Handler: s.srv, URL: storeURL("debug/status"), }) c.Assert(rec.Code, gc.Equals, http.StatusOK, gc.Commentf("body: %s", rec.Body.Bytes())) c.Assert(rec.Header().Get("Content-Type"), gc.Equals, "application/json") var gotStatus map[string]params.DebugStatus err := json.Unmarshal(rec.Body.Bytes(), &gotStatus) c.Assert(err, gc.IsNil) for key, r := range gotStatus { if _, found := status[key]; !complete && !found { delete(gotStatus, key) continue } r.Duration = 0 gotStatus[key] = r } c.Assert(gotStatus, jc.DeepEquals, status) } type statusWithElasticSearchSuite struct { commonSuite } var _ = gc.Suite(&statusWithElasticSearchSuite{}) func (s *statusWithElasticSearchSuite) SetUpSuite(c *gc.C) { s.enableES = true s.commonSuite.SetUpSuite(c) } func (s *statusWithElasticSearchSuite) TestStatusWithElasticSearch(c *gc.C) { rec := httptesting.DoRequest(c, httptesting.DoRequestParams{ Handler: s.srv, URL: storeURL("debug/status"), }) var results map[string]params.DebugStatus err := json.Unmarshal(rec.Body.Bytes(), &results) c.Assert(err, gc.IsNil) c.Assert(results["elasticsearch"].Name, gc.Equals, "Elastic search is running") c.Assert(results["elasticsearch"].Value, jc.Contains, "cluster_name:") }<|fim▁end|>
Value: "count: 1", Passed: false, }, })
<|file_name|>test_cfn.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import json import tempfile import fixtures from lxml import etree from oslo_config import cfg import requests import testtools from testtools import content as test_content from testtools import matchers import urllib.parse as urlparse from os_collect_config import cfn from os_collect_config import collect from os_collect_config import exc META_DATA = {u'int1': 1, u'strfoo': u'foo', u'map_ab': { u'a': 'apple', u'b': 'banana', }} SOFTWARE_CONFIG_DATA = { u'old-style': u'value', u'deployments': [ { u'inputs': [ { u'type': u'String', u'name': u'input1', u'value': u'value1' } ], u'group': 'Heat::Ungrouped', u'name': 'dep-name1', u'outputs': None, u'options': None, u'config': { u'config1': 'value1' } }, { u'inputs': [ { u'type': u'String', u'name': u'input1', u'value': u'value1' } ], u'group': 'os-apply-config', u'name': 'dep-name2', u'outputs': None, u'options': None, u'config': { u'config2': 'value2' } }, { u'inputs': [ { u'type': u'String', u'name': u'input1', u'value': u'value1' } ], u'name': 'dep-name3', u'outputs': None, u'options': None, u'config': { u'config3': 'value3' } }, { u'inputs': [], u'group': 'ignore_me', u'name': 'ignore_me_name', u'outputs': None, u'options': None, u'config': 'ignore_me_config' } ] } SOFTWARE_CONFIG_IMPOSTER_DATA = { u'old-style': u'value', u'deployments': { u"not": u"a list" } } class FakeResponse(dict): def __init__(self, text): self.text = text def raise_for_status(self): pass class FakeReqSession(object): SESSION_META_DATA = META_DATA def __init__(self, testcase, expected_netloc): self._test = testcase self._expected_netloc = expected_netloc self.verify = False def get(self, url, params, headers, verify=None, timeout=None): self._test.addDetail('url', test_content.text_content(url)) url = urlparse.urlparse(url) self._test.assertEqual(self._expected_netloc, url.netloc) self._test.assertEqual('/v1/', url.path) self._test.assertEqual('application/json', headers['Content-Type']) self._test.assertIn('SignatureVersion', params) self._test.assertEqual('2', params['SignatureVersion']) self._test.assertIn('Signature', params) self._test.assertIn('Action', params) self._test.assertEqual('DescribeStackResource', params['Action']) self._test.assertIn('LogicalResourceId', params) self._test.assertEqual('foo', params['LogicalResourceId']) self._test.assertEqual(10, timeout) root = etree.Element('DescribeStackResourceResponse') result = etree.SubElement(root, 'DescribeStackResourceResult') detail = etree.SubElement(result, 'StackResourceDetail') metadata = etree.SubElement(detail, 'Metadata') metadata.text = json.dumps(self.SESSION_META_DATA) if verify is not None: self.verify = True return FakeResponse(etree.tostring(root)) class FakeRequests(object): exceptions = requests.exceptions def __init__(self, testcase, expected_netloc='192.0.2.1:8000'): self._test = testcase self._expected_netloc = expected_netloc def Session(self): return FakeReqSession(self._test, self._expected_netloc) class FakeReqSessionSoftwareConfig(FakeReqSession): SESSION_META_DATA = SOFTWARE_CONFIG_DATA class FakeRequestsSoftwareConfig(FakeRequests): FAKE_SESSION = FakeReqSessionSoftwareConfig def Session(self): return self.FAKE_SESSION(self._test, self._expected_netloc) class FakeReqSessionConfigImposter(FakeReqSession): SESSION_META_DATA = SOFTWARE_CONFIG_IMPOSTER_DATA class FakeRequestsConfigImposter(FakeRequestsSoftwareConfig): FAKE_SESSION = FakeReqSessionConfigImposter class FakeFailRequests(object): exceptions = requests.exceptions class Session(object): def get(self, url, params, headers, verify=None, timeout=None): raise requests.exceptions.HTTPError(403, 'Forbidden') class TestCfnBase(testtools.TestCase): def setUp(self): super(TestCfnBase, self).setUp() self.log = self.useFixture(fixtures.FakeLogger()) self.useFixture(fixtures.NestedTempfile()) self.hint_file = tempfile.NamedTemporaryFile() self.hint_file.write(u'http://192.0.2.1:8000'.encode('utf-8')) self.hint_file.flush() self.addCleanup(self.hint_file.close) collect.setup_conf() cfg.CONF.cfn.heat_metadata_hint = self.hint_file.name cfg.CONF.cfn.metadata_url = None cfg.CONF.cfn.path = ['foo.Metadata'] cfg.CONF.cfn.access_key_id = '0123456789ABCDEF' cfg.CONF.cfn.secret_access_key = 'FEDCBA9876543210' class TestCfn(TestCfnBase): def test_collect_cfn(self): cfn_md = cfn.Collector(requests_impl=FakeRequests(self)).collect() self.assertThat(cfn_md, matchers.IsInstance(list)) self.assertEqual('cfn', cfn_md[0][0]) cfn_md = cfn_md[0][1] for k in ('int1', 'strfoo', 'map_ab'): self.assertIn(k, cfn_md) self.assertEqual(cfn_md[k], META_DATA[k]) self.assertEqual('', self.log.output) def test_collect_with_ca_cert(self): cfn.CONF.cfn.ca_certificate = "foo" collector = cfn.Collector(requests_impl=FakeRequests(self)) collector.collect() self.assertTrue(collector._session.verify) def test_collect_cfn_fail(self): cfn_collect = cfn.Collector(requests_impl=FakeFailRequests) self.assertRaises(exc.CfnMetadataNotAvailable, cfn_collect.collect) self.assertIn('Forbidden', self.log.output) def test_collect_cfn_no_path(self): cfg.CONF.cfn.path = None<|fim▁hole|> def test_collect_cfn_bad_path(self): cfg.CONF.cfn.path = ['foo'] cfn_collect = cfn.Collector(requests_impl=FakeRequests(self)) self.assertRaises(exc.CfnMetadataNotConfigured, cfn_collect.collect) self.assertIn('Path not in format', self.log.output) def test_collect_cfn_no_metadata_url(self): cfg.CONF.cfn.heat_metadata_hint = None cfn_collect = cfn.Collector(requests_impl=FakeRequests(self)) self.assertRaises(exc.CfnMetadataNotConfigured, cfn_collect.collect) self.assertIn('No metadata_url configured', self.log.output) def test_collect_cfn_missing_sub_path(self): cfg.CONF.cfn.path = ['foo.Metadata.not_there'] cfn_collect = cfn.Collector(requests_impl=FakeRequests(self)) self.assertRaises(exc.CfnMetadataNotAvailable, cfn_collect.collect) self.assertIn('Sub-key not_there does not exist', self.log.output) def test_collect_cfn_sub_path(self): cfg.CONF.cfn.path = ['foo.Metadata.map_ab'] cfn_collect = cfn.Collector(requests_impl=FakeRequests(self)) content = cfn_collect.collect() self.assertThat(content, matchers.IsInstance(list)) self.assertEqual('cfn', content[0][0]) content = content[0][1] self.assertIn(u'b', content) self.assertEqual(u'banana', content[u'b']) def test_collect_cfn_metadata_url_overrides_hint(self): cfg.CONF.cfn.metadata_url = 'http://127.0.1.1:8000/v1/' cfn_collect = cfn.Collector( requests_impl=FakeRequests(self, expected_netloc='127.0.1.1:8000')) cfn_collect.collect() class TestCfnSoftwareConfig(TestCfnBase): def test_collect_cfn_software_config(self): cfn_md = cfn.Collector( requests_impl=FakeRequestsSoftwareConfig(self)).collect() self.assertThat(cfn_md, matchers.IsInstance(list)) self.assertEqual('cfn', cfn_md[0][0]) cfn_config = cfn_md[0][1] self.assertThat(cfn_config, matchers.IsInstance(dict)) self.assertEqual(set(['old-style', 'deployments']), set(cfn_config.keys())) self.assertIn('deployments', cfn_config) self.assertThat(cfn_config['deployments'], matchers.IsInstance(list)) self.assertEqual(4, len(cfn_config['deployments'])) deployment = cfn_config['deployments'][0] self.assertIn('inputs', deployment) self.assertThat(deployment['inputs'], matchers.IsInstance(list)) self.assertEqual(1, len(deployment['inputs'])) self.assertEqual('dep-name1', cfn_md[1][0]) self.assertEqual('value1', cfn_md[1][1]['config1']) self.assertEqual('dep-name2', cfn_md[2][0]) self.assertEqual('value2', cfn_md[2][1]['config2']) def test_collect_cfn_deployments_not_list(self): cfn_md = cfn.Collector( requests_impl=FakeRequestsConfigImposter(self)).collect() self.assertEqual(1, len(cfn_md)) self.assertEqual('cfn', cfn_md[0][0]) self.assertIn('not', cfn_md[0][1]['deployments']) self.assertEqual('a list', cfn_md[0][1]['deployments']['not'])<|fim▁end|>
cfn_collect = cfn.Collector(requests_impl=FakeRequests(self)) self.assertRaises(exc.CfnMetadataNotConfigured, cfn_collect.collect) self.assertIn('No path configured', self.log.output)
<|file_name|>constants.js<|end_file_name|><|fim▁begin|><|fim▁hole|> * */ export const LOGIN_REQUEST = 'app/LoginContainer/LOGIN_REQUEST'; export const LOGIN_SUCCESS = 'app/LoginContainer/LOGIN_SUCCESS'; export const LOGIN_FAILURE = 'app/LoginContainer/LOGIN_FAILURE';<|fim▁end|>
/* * * LoginContainer constants
<|file_name|>details.py<|end_file_name|><|fim▁begin|>import web import sam.common import sam.models.links class Details: def __init__(self, db, subscription, ds, address, timestamp_range=None, port=None, page_size=50): self.db = db self.sub = subscription self.table_nodes = "s{acct}_Nodes".format(acct=self.sub) self.table_links = "s{acct}_ds{id}_Links".format(acct=self.sub, id=ds) self.table_links_in = "s{acct}_ds{id}_LinksIn".format(acct=self.sub, id=ds) self.table_links_out = "s{acct}_ds{id}_LinksOut".format(acct=self.sub, id=ds) self.ds = ds self.ip_start, self.ip_end = sam.common.determine_range_string(address) self.page_size = page_size self.port = port if timestamp_range: self.time_range = timestamp_range else: linksModel = sam.models.links.Links(db, self.sub, self.ds) tr = linksModel.get_timerange() self.time_range = (tr['min'], tr['max']) if self.db.dbname == 'mysql': self.elapsed = '(UNIX_TIMESTAMP(MAX(timestamp)) - UNIX_TIMESTAMP(MIN(timestamp)))' self.divop = 'DIV' else: self.elapsed = '(MAX(timestamp) - MIN(timestamp))' self.divop = '/' sam.common.sqlite_udf(self.db) def get_metadata(self): qvars = {"start": self.ip_start, "end": self.ip_end} # TODO: seconds has a magic number 300 added to account for DB time quantization. query = """ SELECT {address_q} AS 'address' , COALESCE(n.hostname, '') AS 'hostname' , COALESCE(l_out.unique_out_ip, 0) AS 'unique_out_ip' , COALESCE(l_out.unique_out_conn, 0) AS 'unique_out_conn' , COALESCE(l_out.total_out, 0) AS 'total_out' , COALESCE(l_out.b_s, 0) AS 'out_bytes_sent' , COALESCE(l_out.b_r, 0) AS 'out_bytes_received' , COALESCE(l_out.max_bps, 0) AS 'out_max_bps' , COALESCE(l_out.sum_b * 1.0 / l_out.sum_duration, 0) AS 'out_avg_bps' , COALESCE(l_out.p_s, 0) AS 'out_packets_sent' , COALESCE(l_out.p_r, 0) AS 'out_packets_received' , COALESCE(l_out.sum_duration * 1.0 / l_out.total_out, 0) AS 'out_duration' , COALESCE(l_in.unique_in_ip, 0) AS 'unique_in_ip' , COALESCE(l_in.unique_in_conn, 0) AS 'unique_in_conn' , COALESCE(l_in.total_in, 0) AS 'total_in' , COALESCE(l_in.b_s, 0) AS 'in_bytes_sent' , COALESCE(l_in.b_r, 0) AS 'in_bytes_received' , COALESCE(l_in.max_bps, 0) AS 'in_max_bps' , COALESCE(l_in.sum_b * 1.0 / l_in.sum_duration, 0) AS 'in_avg_bps' , COALESCE(l_in.p_s, 0) AS 'in_packets_sent' , COALESCE(l_in.p_r, 0) AS 'in_packets_received' , COALESCE(l_in.sum_duration * 1.0 / l_in.total_in, 0) AS 'in_duration' , COALESCE(l_in.ports_used, 0) AS 'ports_used' , children.endpoints AS 'endpoints' , COALESCE(t.seconds, 0) + 300 AS 'seconds' , (COALESCE(l_in.sum_b, 0) + COALESCE(l_out.sum_b, 0)) / (COALESCE(t.seconds, 0) + 300) AS 'overall_bps' , COALESCE(l_in.protocol, "") AS 'in_protocols' , COALESCE(l_out.protocol, "") AS 'out_protocols' FROM ( SELECT ipstart, subnet, alias AS 'hostname' FROM {nodes_table} WHERE ipstart = $start AND ipend = $end ) AS n LEFT JOIN ( SELECT $start AS 's1' , COUNT(DISTINCT dst) AS 'unique_out_ip' , (SELECT COUNT(1) FROM (SELECT DISTINCT src, dst, port FROM {links_table} WHERE src BETWEEN $start AND $end) AS `temp1`) AS 'unique_out_conn' , SUM(links) AS 'total_out' , SUM(bytes_sent) AS 'b_s' , SUM(bytes_received) AS 'b_r' , MAX((bytes_sent + bytes_received) * 1.0 / duration) AS 'max_bps' , SUM(bytes_sent + bytes_received) AS 'sum_b' , SUM(packets_sent) AS 'p_s' , SUM(packets_received) AS 'p_r' , SUM(duration * links) AS 'sum_duration' , GROUP_CONCAT(DISTINCT protocol) AS 'protocol' FROM {links_table} WHERE src BETWEEN $start AND $end GROUP BY 's1' ) AS l_out ON n.ipstart = l_out.s1 LEFT JOIN ( SELECT $start AS 's1' , COUNT(DISTINCT src) AS 'unique_in_ip' , (SELECT COUNT(1) FROM (SELECT DISTINCT src, dst, port FROM {links_table} WHERE dst BETWEEN $start AND $end) AS `temp2`) AS 'unique_in_conn' , SUM(links) AS 'total_in' , SUM(bytes_sent) AS 'b_s' , SUM(bytes_received) AS 'b_r' , MAX((bytes_sent + bytes_received) * 1.0 / duration) AS 'max_bps' , SUM(bytes_sent + bytes_received) AS 'sum_b' , SUM(packets_sent) AS 'p_s' , SUM(packets_received) AS 'p_r' , SUM(duration * links) AS 'sum_duration' , COUNT(DISTINCT port) AS 'ports_used' , GROUP_CONCAT(DISTINCT protocol) AS 'protocol' FROM {links_table} WHERE dst BETWEEN $start AND $end GROUP BY 's1' ) AS l_in ON n.ipstart = l_in.s1 LEFT JOIN ( SELECT $start AS 's1' , COUNT(ipstart) AS 'endpoints' FROM {nodes_table} WHERE ipstart = ipend AND ipstart BETWEEN $start AND $end ) AS children ON n.ipstart = children.s1 LEFT JOIN ( SELECT $start AS 's1' , {elapsed} AS 'seconds' FROM {links_table} GROUP BY 's1' ) AS t ON n.ipstart = t.s1 LIMIT 1; """.format( address_q=sam.common.db_concat(self.db, 'decodeIP(n.ipstart)', "'/'", 'n.subnet'), elapsed=self.elapsed, nodes_table=self.table_nodes, links_table=self.table_links) results = self.db.query(query, vars=qvars) first = results.first() if first: return first else: return {} def build_where_clause(self, timestamp_range=None, port=None, protocol=None, rounding=True): """ Build a WHERE SQL clause that covers basic timerange, port, and protocol filtering. :param timestamp_range: start and end times as unix timestamps (integers). Default is all time. :type timestamp_range: tuple[int, int] :param port: exclusively report traffic destined for this port, if specified. :type port: int or str :param protocol: exclusively report traffic using this protocol :type protocol: str :param rounding: round each time stamp to the nearest quantization mark. (db records are quantized for consiceness) :type rounding: bool :return: String SQL clause :rtype: str """ clauses = [] t_start = 0 t_end = 0 if timestamp_range: t_start = timestamp_range[0] t_end = timestamp_range[1] if rounding: # rounding to 5 minutes, for use with the Syslog table if t_start > 150: t_start -= 150 if t_end <= 2 ** 31 - 150: t_end += 149 if self.db.dbname == 'sqlite': clauses.append("timestamp BETWEEN $tstart AND $tend") else: clauses.append("timestamp BETWEEN FROM_UNIXTIME($tstart) AND FROM_UNIXTIME($tend)") if port: clauses.append("port = $port") if protocol: clauses.append("protocols LIKE $protocol") protocol = "%{0}%".format(protocol) qvars = {'tstart': t_start, 'tend': t_end, 'port': port, 'protocol': protocol} where = str(web.db.reparam("\n AND ".join(clauses), qvars)) if where: where = " AND " + where return where def get_details_connections(self, inbound, page=1, order="-links", simple=False): sort_options = ['links', 'src', 'dst', 'port', 'sum_bytes', 'sum_packets', 'protocols', 'avg_duration'] sort_options_simple = ['links', 'src', 'dst', 'port'] qvars = { 'table_links': self.table_links, 'start': self.ip_start, 'end': self.ip_end, 'page': self.page_size * (page - 1), 'page_size': self.page_size, 'WHERE': self.build_where_clause(self.time_range, self.port) } if inbound: qvars['collected'] = "src" qvars['filtered'] = "dst" else: qvars['filtered'] = "src" qvars['collected'] = "dst" # determine the sort direction if order and order[0] == '-': sort_dir = "DESC" else: sort_dir = "ASC" # determine the sort column if simple: if order and order[1:] in sort_options_simple: sort_by = order[1:] else: sort_by = sort_options_simple[0] else: if order and order[1:] in sort_options: sort_by = order[1:] else: sort_by = sort_options[0] # add table prefix for some columns if sort_by in ['port', 'src', 'dst']: sort_by = "`links`." + sort_by qvars['order'] = "{0} {1}".format(sort_by, sort_dir) if simple: query = """ SELECT decodeIP({collected}) AS '{collected}' , port AS 'port' , sum(links) AS 'links' FROM {table_links} AS `links` WHERE {filtered} BETWEEN $start AND $end {WHERE} GROUP BY `links`.{collected}, `links`.port ORDER BY {order} LIMIT {page}, {page_size} """.format(**qvars) else: query = """ SELECT src, dst, port, links, protocols , sum_bytes , (sum_bytes / links) AS 'avg_bytes' , sum_packets , (sum_packets / links) AS 'avg_packets' , avg_duration FROM( SELECT decodeIP(src) AS 'src' , decodeIP(dst) AS 'dst' , port AS 'port' , SUM(links) AS 'links' , GROUP_CONCAT(DISTINCT protocol) AS 'protocols' , SUM(bytes_sent + COALESCE(bytes_received, 0)) AS 'sum_bytes' , SUM(packets_sent + COALESCE(packets_received, 0)) AS 'sum_packets' , SUM(duration*links) / SUM(links) AS 'avg_duration' FROM {table_links} AS `links` WHERE {filtered} BETWEEN $start AND $end {WHERE} GROUP BY `links`.src, `links`.dst, `links`.port ORDER BY {order} LIMIT {page}, {page_size} ) AS precalc; """.format(**qvars) return list(self.db.query(query, vars=qvars)) def get_details_ports(self, page=1, order="-links"): sort_options = ['links', 'port'] first_result = (page - 1) * self.page_size qvars = { 'links_table': self.table_links, 'start': self.ip_start, 'end': self.ip_end, 'first': first_result, 'size': self.page_size, 'WHERE': self.build_where_clause(self.time_range, self.port), } if order and order[0] == '-': sort_dir = "DESC" else: sort_dir = "ASC" if order and order[1:] in sort_options: sort_by = order[1:] else: sort_by = sort_options[0] qvars['order'] = "{0} {1}".format(sort_by, sort_dir) query = """ SELECT port AS 'port', sum(links) AS 'links' FROM {links_table} WHERE dst BETWEEN $start AND $end {WHERE} GROUP BY port ORDER BY {order} LIMIT $first, $size; """.format(**qvars) return list(sam.common.db.query(query, vars=qvars)) def get_details_children(self, order='+ipstart'): sort_options = ['ipstart', 'hostname', 'endpoints', 'ratio'] ip_diff = self.ip_end - self.ip_start if ip_diff == 0: return [] elif ip_diff == 255: quotient = 1 child_subnet_start = 25 child_subnet_end = 32 elif ip_diff == 65535: quotient = 256 child_subnet_start = 17 child_subnet_end = 24 elif ip_diff == 16777215: quotient = 65536 child_subnet_start = 9 child_subnet_end = 16 else: quotient = 16777216 child_subnet_start = 1 child_subnet_end = 8 qvars = {'ip_start': self.ip_start, 'ip_end': self.ip_end, 's_start': child_subnet_start, 's_end': child_subnet_end, 'quot': quotient, 'quot_1': quotient - 1} if order and order[0] == '-': sort_dir = "DESC" else: sort_dir = "ASC" if order and order[1:] in sort_options: sort_by = order[1:] else: sort_by = sort_options[0] qvars['order'] = "{0} {1}".format(sort_by, sort_dir) query = """ SELECT decodeIP(`n`.ipstart) AS 'address' , COALESCE(`n`.alias, '') AS 'hostname' , `n`.subnet AS 'subnet' , `sn`.kids AS 'endpoints' , COALESCE(COALESCE(`l_in`.links,0) / (COALESCE(`l_in`.links,0) + COALESCE(`l_out`.links,0)), 0) AS 'ratio' FROM {nodes_table} AS `n`<|fim▁hole|> LEFT JOIN ( SELECT dst_start {div} $quot * $quot AS 'low' , dst_end {div} $quot * $quot + $quot_1 AS 'high' , sum(links) AS 'links' FROM {links_in_table} GROUP BY low, high ) AS `l_in` ON `l_in`.low = `n`.ipstart AND `l_in`.high = `n`.ipend LEFT JOIN ( SELECT src_start {div} $quot * $quot AS 'low' , src_end {div} $quot * $quot + $quot_1 AS 'high' , sum(links) AS 'links' FROM {links_out_table} GROUP BY low, high ) AS `l_out` ON `l_out`.low = `n`.ipstart AND `l_out`.high = `n`.ipend LEFT JOIN ( SELECT ipstart {div} $quot * $quot AS 'low' , ipend {div} $quot * $quot + $quot_1 AS 'high' , COUNT(ipstart) AS 'kids' FROM {nodes_table} WHERE ipstart = ipend GROUP BY low, high ) AS `sn` ON `sn`.low = `n`.ipstart AND `sn`.high = `n`.ipend WHERE `n`.ipstart BETWEEN $ip_start AND $ip_end AND `n`.subnet BETWEEN $s_start AND $s_end ORDER BY {order}; """.format(div=self.divop, order=qvars['order'], nodes_table=self.table_nodes, links_in_table=self.table_links_in, links_out_table=self.table_links_out) return list(sam.common.db.query(query, vars=qvars)) def get_details_summary(self): where = self.build_where_clause(timestamp_range=self.time_range, port=self.port) # TODO: seconds has a magic number 300 added to account for DB time quantization. query = """ SELECT `inputs`.ips AS 'unique_in' , `outputs`.ips AS 'unique_out' , `inputs`.ports AS 'unique_ports' FROM (SELECT COUNT(DISTINCT src) AS 'ips', COUNT(DISTINCT port) AS 'ports' FROM {links_table} WHERE dst BETWEEN $start AND $end {where} ) AS `inputs` JOIN (SELECT COUNT(DISTINCT dst) AS 'ips' FROM {links_table} WHERE src BETWEEN $start AND $end {where} ) AS `outputs`;""".format(where=where, links_table=self.table_links) qvars = {'start': self.ip_start, 'end': self.ip_end} rows = sam.common.db.query(query, vars=qvars) return rows.first()<|fim▁end|>
<|file_name|>visitor.py<|end_file_name|><|fim▁begin|>import copy from mongoengine.errors import InvalidQueryError from mongoengine.queryset import transform __all__ = ('Q',) class QNodeVisitor(object): """Base visitor class for visiting Q-object nodes in a query tree. """ def visit_combination(self, combination): """Called by QCombination objects. """ return combination def visit_query(self, query): """Called by (New)Q objects. """ return query class DuplicateQueryConditionsError(InvalidQueryError): pass class SimplificationVisitor(QNodeVisitor): """Simplifies query trees by combining unnecessary 'and' connection nodes into a single Q-object. """ def visit_combination(self, combination): if combination.operation == combination.AND: # The simplification only applies to 'simple' queries if all(isinstance(node, Q) for node in combination.children): queries = [n.query for n in combination.children] try: return Q(**self._query_conjunction(queries)) except DuplicateQueryConditionsError: # Cannot be simplified pass return combination def _query_conjunction(self, queries): """Merges query dicts - effectively &ing them together. """ query_ops = set() combined_query = {} for query in queries: ops = set(query.keys()) # Make sure that the same operation isn't applied more than once # to a single field intersection = ops.intersection(query_ops) if intersection: raise DuplicateQueryConditionsError() query_ops.update(ops) combined_query.update(copy.deepcopy(query)) return combined_query class QueryCompilerVisitor(QNodeVisitor): """Compiles the nodes in a query tree to a PyMongo-compatible query dictionary. """ def __init__(self, document): self.document = document def visit_combination(self, combination): operator = "$and" if combination.operation == combination.OR: operator = "$or" return {operator: combination.children} def visit_query(self, query): return transform.query(self.document, **query.query) class QNode(object): """Base class for nodes in query trees. """ AND = 0 OR = 1 def to_query(self, document): query = self.accept(SimplificationVisitor()) query = query.accept(QueryCompilerVisitor(document)) return query<|fim▁hole|> def accept(self, visitor): raise NotImplementedError def _combine(self, other, operation): """Combine this node with another node into a QCombination object. """ if getattr(other, 'empty', True): return self if self.empty: return other return QCombination(operation, [self, other]) @property def empty(self): return False def __or__(self, other): return self._combine(other, self.OR) def __and__(self, other): return self._combine(other, self.AND) class QCombination(QNode): """Represents the combination of several conditions by a given logical operator. """ def __init__(self, operation, children): self.operation = operation self.children = [] for node in children: # If the child is a combination of the same type, we can merge its # children directly into this combinations children if isinstance(node, QCombination) and node.operation == operation: self.children += node.children else: self.children.append(node) def accept(self, visitor): for i in range(len(self.children)): if isinstance(self.children[i], QNode): self.children[i] = self.children[i].accept(visitor) return visitor.visit_combination(self) @property def empty(self): return not bool(self.children) class Q(QNode): """A simple query object, used in a query tree to build up more complex query structures. """ def __init__(self, **query): self.query = query def accept(self, visitor): return visitor.visit_query(self) @property def empty(self): return not bool(self.query)<|fim▁end|>
<|file_name|>AllTests.java<|end_file_name|><|fim▁begin|>package de.devisnik.mine.robot.test;<|fim▁hole|> import de.devisnik.mine.robot.AutoPlayerTest; import de.devisnik.mine.robot.ConfigurationTest; import junit.framework.Test; import junit.framework.TestSuite; public class AllTests { public static Test suite() { TestSuite suite = new TestSuite("Tests for de.devisnik.mine.robot"); //$JUnit-BEGIN$ suite.addTestSuite(AutoPlayerTest.class); suite.addTestSuite(ConfigurationTest.class); //$JUnit-END$ return suite; } }<|fim▁end|>
<|file_name|>getfmtast.cpp<|end_file_name|><|fim▁begin|>/*************************************************************************** getfmtast.cpp - returns the AST for formatted IO ------------------- begin : July 22 2002 copyright : (C) 2002 by Marc Schellens email : [email protected]<|fim▁hole|> * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ //#define FMT_DEBUG #undef FMT_DEBUG #include "includefirst.hpp" #include "basegdl.hpp" #include "fmtnode.hpp" #include "print_tree.hpp" #include "FMTLexer.hpp" #include "FMTParser.hpp" #include <antlr/ASTFactory.hpp> using namespace std; antlr::ASTFactory FMTNodeFactory("FMTNode",FMTNode::factory); RefFMTNode GetFMTAST( DString fmtString) { istringstream istr(fmtString); //+"\n"); RefFMTNode fmtAST; try { antlr::TokenStreamSelector selector; FMTLexer lexer( istr); lexer.SetSelector( selector); CFMTLexer cLexer( lexer.getInputState()); cLexer.SetSelector( selector); lexer.SetCLexer( cLexer); selector.select( &lexer); FMTParser parser( selector); // because we use the standard (ANTLR generated) constructor here // we cannot do it in the constructor parser.initializeASTFactory( FMTNodeFactory); parser.setASTFactory( &FMTNodeFactory ); parser.format( 1); fmtAST=parser.getAST(); #ifdef FMT_DEBUG antlr::print_tree pt; pt.pr_tree(static_cast<antlr::RefAST>(fmtAST)); cout << endl; #endif } catch( GDLException& ex) { throw GDLException("Format: "+ex.getMessage()); } catch( antlr::ANTLRException& ex) { throw GDLException("Format parser: "+ex.getMessage()); } catch( exception& ex) { throw GDLException("Format exception: "+string(ex.what())); } catch(...) { throw GDLException("Format: general exception."); } return fmtAST; }<|fim▁end|>
***************************************************************************/ /*************************************************************************** * *
<|file_name|>fbcode_builder_config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals 'fbcode_builder steps to build Facebook Thrift' import specs.fbthrift as fbthrift def fbcode_builder_spec(builder): return { 'depends_on': [fbthrift], } config = {<|fim▁hole|> 'github_project': 'facebook/fbthrift', 'fbcode_builder_spec': fbcode_builder_spec, }<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate time; use std::sync::{Arc, mpsc}; use std::thread;<|fim▁hole|> type Float = f64; type Integer = u32; type Callback = Box<Fn(Float) -> Float + Send + Sync + 'static>; type FnThread = Fn() -> Callback + Send + Sync; #[derive(Clone)] struct Integrator { f: Arc<FnThread>, a: Float, b: Float, n: Integer, } impl Integrator { fn new(func: Arc<FnThread>, a: Float, b: Float, iteration: Integer) -> Integrator { Integrator { f: func, a: a, b: b, n: iteration, } } fn call(&self, x: Float) -> Float { (self.f)()(x) } // parallel Monte-Carlo method // share the desired reporting interval and are distributing to threads fn monte_carlo(&self, threads: Integer) -> Float { let mut thread_list = Vec::new(); let h_step = (self.b - self.a) / self.n as Float; let t_step = self.n / threads; let (tx, rx) = mpsc::channel::<Float>(); for i in 0..threads { let local_tx = tx.clone(); let local_self = self.clone(); thread_list.push(thread::spawn(move || { let u_i = |i: Integer| -> Float { local_self.a + h_step * i as Float }; let (x0, x1) = (t_step * i, t_step * (i+1)); // main part of method let sum = (x0..x1).fold(0.0, |acc, i| acc + local_self.call(u_i(i))); local_tx.send(sum).expect("Data not sended!"); })); } let mut result = 0.0; for thread in thread_list { thread.join().expect("Thread can't joined!"); result += rx.recv().expect("Data not recieved!"); } result * h_step } } // linear Monte-Carlo method fn monte_carlo_linear(f: Arc<FnThread>, a: Float, b: Float, n: Integer) -> Float { let h = (b - a) / n as Float; let u_i = |i: Integer| -> Float { a + h * i as Float }; (0..n).fold(0.0, |acc, x| acc + (f())(u_i(x))) * h } // calculated function fn f() -> Callback { Box::new(|x: Float| -> Float { (x.powf(2.0) + 1.0).recip() }) } fn main() { // [a, b] -- interval // n -- iteration count let (a, b, n) = (0.0, 1.0, 10_000_000); let f_a = Integrator::new(Arc::new(f), a, b, n); println!("# Iteration count: {:E}", n as Float); let start = time::get_time(); let pi = monte_carlo_linear(Arc::new(f), a, b, n) * 4.0; let duration = time::get_time() - start; println!("# Linear code"); println!("result = {:+.16}", pi); println!(" err = {:+.16}", std::f64::consts::PI - pi); println!(" time = {} ms\n", duration.num_milliseconds()); for threads in (1..9).filter(|&x| x % 2 == 0) { println!("# Thread count: {}", threads); let start = time::get_time(); let pi = f_a.monte_carlo(threads) * 4.0; let duration = time::get_time() - start; println!("result = {:+.16}", pi); println!(" err = {:+.16}", std::f64::consts::PI - pi); println!(" time = {} ms\n", duration.num_milliseconds()); } }<|fim▁end|>
<|file_name|>forms.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from wtforms import StringField, BooleanField, PasswordField, SelectField, DateTimeField, TextAreaField<|fim▁end|>
from flask.ext.wtf import Form
<|file_name|>config.cpp<|end_file_name|><|fim▁begin|>#include "script_component.hpp" class CfgPatches { class ADDON { units[] = {}; weapons[] = {}; requiredVersion = REQUIRED_VERSION; requiredAddons[] = {"ace_interaction"};<|fim▁hole|> authors[] = {"KoffeinFlummi", "BaerMitUmlaut"}; url = ECSTRING(main,URL); VERSION_CONFIG; }; }; #include "CfgEventHandlers.hpp" #include "CfgMoves.hpp" #include "CfgSounds.hpp" #include "CfgVehicles.hpp" #include "CfgWaypoints.hpp"<|fim▁end|>
author = ECSTRING(common,ACETeam);
<|file_name|>AssignableNode.java<|end_file_name|><|fim▁begin|>package org.apache.helix.controller.rebalancer.waged.model; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.helix.HelixException; import org.apache.helix.controller.rebalancer.util.WagedValidationUtil; import org.apache.helix.model.ClusterConfig; import org.apache.helix.model.InstanceConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class represents a possible allocation of the replication. * Note that any usage updates to the AssignableNode are not thread safe. */ public class AssignableNode implements Comparable<AssignableNode> { private static final Logger LOG = LoggerFactory.getLogger(AssignableNode.class.getName()); // Immutable Instance Properties private final String _instanceName; private final String _faultZone; // maximum number of the partitions that can be assigned to the instance. private final int _maxPartition; private final ImmutableSet<String> _instanceTags; private final ImmutableMap<String, List<String>> _disabledPartitionsMap; private final ImmutableMap<String, Integer> _maxAllowedCapacity; // Mutable (Dynamic) Instance Properties // A map of <resource name, <partition name, replica>> that tracks the replicas assigned to the // node. private Map<String, Map<String, AssignableReplica>> _currentAssignedReplicaMap; // A map of <capacity key, capacity value> that tracks the current available node capacity private Map<String, Integer> _remainingCapacity; /** * Update the node with a ClusterDataCache. This resets the current assignment and recalculates * currentCapacity. * NOTE: While this is required to be used in the constructor, this can also be used when the * clusterCache needs to be * refreshed. This is under the assumption that the capacity mappings of InstanceConfig and * ResourceConfig could * subject to change. If the assumption is no longer true, this function should become private. */ AssignableNode(ClusterConfig clusterConfig, InstanceConfig instanceConfig, String instanceName) { _instanceName = instanceName; Map<String, Integer> instanceCapacity = fetchInstanceCapacity(clusterConfig, instanceConfig); _faultZone = computeFaultZone(clusterConfig, instanceConfig); _instanceTags = ImmutableSet.copyOf(instanceConfig.getTags()); _disabledPartitionsMap = ImmutableMap.copyOf(instanceConfig.getDisabledPartitionsMap()); // make a copy of max capacity _maxAllowedCapacity = ImmutableMap.copyOf(instanceCapacity); _remainingCapacity = new HashMap<>(instanceCapacity); _maxPartition = clusterConfig.getMaxPartitionsPerInstance(); _currentAssignedReplicaMap = new HashMap<>(); } /** * This function should only be used to assign a set of new partitions that are not allocated on * this node. It's because the any exception could occur at the middle of batch assignment and the * previous finished assignment cannot be reverted * Using this function avoids the overhead of updating capacity repeatedly. */ void assignInitBatch(Collection<AssignableReplica> replicas) { Map<String, Integer> totalPartitionCapacity = new HashMap<>(); for (AssignableReplica replica : replicas) { // TODO: the exception could occur in the middle of for loop and the previous added records cannot be reverted addToAssignmentRecord(replica); // increment the capacity requirement according to partition's capacity configuration. for (Map.Entry<String, Integer> capacity : replica.getCapacity().entrySet()) { totalPartitionCapacity.compute(capacity.getKey(), (key, totalValue) -> (totalValue == null) ? capacity.getValue() : totalValue + capacity.getValue()); } } // Update the global state after all single replications' calculation is done. for (String capacityKey : totalPartitionCapacity.keySet()) { updateRemainingCapacity(capacityKey, totalPartitionCapacity.get(capacityKey)); } } /** * Assign a replica to the node. * @param assignableReplica - the replica to be assigned */ void assign(AssignableReplica assignableReplica) { addToAssignmentRecord(assignableReplica); assignableReplica.getCapacity().entrySet().stream() .forEach(capacity -> updateRemainingCapacity(capacity.getKey(), capacity.getValue())); } /** * Release a replica from the node. * If the replication is not on this node, the assignable node is not updated. * @param replica - the replica to be released */ void release(AssignableReplica replica) throws IllegalArgumentException { String resourceName = replica.getResourceName(); String partitionName = replica.getPartitionName(); // Check if the release is necessary if (!_currentAssignedReplicaMap.containsKey(resourceName)) { LOG.warn("Resource {} is not on node {}. Ignore the release call.", resourceName, getInstanceName()); return; } Map<String, AssignableReplica> partitionMap = _currentAssignedReplicaMap.get(resourceName); if (!partitionMap.containsKey(partitionName) || !partitionMap.get(partitionName) .equals(replica)) { LOG.warn("Replica {} is not assigned to node {}. Ignore the release call.", replica.toString(), getInstanceName()); return; } AssignableReplica removedReplica = partitionMap.remove(partitionName); removedReplica.getCapacity().entrySet().stream() .forEach(entry -> updateRemainingCapacity(entry.getKey(), -1 * entry.getValue())); } /** * @return A set of all assigned replicas on the node. */ Set<AssignableReplica> getAssignedReplicas() { return _currentAssignedReplicaMap.values().stream() .flatMap(replicaMap -> replicaMap.values().stream()).collect(Collectors.toSet()); } /** * @return The current assignment in a map of <resource name, set of partition names> */ Map<String, Set<String>> getAssignedPartitionsMap() { Map<String, Set<String>> assignmentMap = new HashMap<>(); for (String resourceName : _currentAssignedReplicaMap.keySet()) { assignmentMap.put(resourceName, _currentAssignedReplicaMap.get(resourceName).keySet()); } return assignmentMap; } /** * @param resource Resource name * @return A set of the current assigned replicas' partition names in the specified resource. */ public Set<String> getAssignedPartitionsByResource(String resource) { return _currentAssignedReplicaMap.getOrDefault(resource, Collections.emptyMap()).keySet(); } /** * @param resource Resource name * @return A set of the current assigned replicas' partition names with the top state in the * specified resource. */ Set<String> getAssignedTopStatePartitionsByResource(String resource) { return _currentAssignedReplicaMap.getOrDefault(resource, Collections.emptyMap()).entrySet() .stream().filter(partitionEntry -> partitionEntry.getValue().isReplicaTopState()) .map(partitionEntry -> partitionEntry.getKey()).collect(Collectors.toSet()); } /** * @return The total count of assigned top state partitions. */ public int getAssignedTopStatePartitionsCount() { return (int) _currentAssignedReplicaMap.values().stream() .flatMap(replicaMap -> replicaMap.values().stream()) .filter(AssignableReplica::isReplicaTopState).count(); } /** * @return The total count of assigned replicas. */ public int getAssignedReplicaCount() { return _currentAssignedReplicaMap.values().stream().mapToInt(Map::size).sum(); } /** * @return The current available capacity. */ public Map<String, Integer> getRemainingCapacity() { return _remainingCapacity; } /** * @return A map of <capacity category, capacity number> that describes the max capacity of the * node. */ public Map<String, Integer> getMaxCapacity() { return _maxAllowedCapacity; } /** * Return the most concerning capacity utilization number for evenly partition assignment. * The method dynamically calculates the projected highest utilization number among all the<|fim▁hole|> * @param newUsage the proposed new additional capacity usage. * @return The highest utilization number of the node among all the capacity category. */ public float getProjectedHighestUtilization(Map<String, Integer> newUsage) { float highestCapacityUtilization = 0; for (String capacityKey : _maxAllowedCapacity.keySet()) { float capacityValue = _maxAllowedCapacity.get(capacityKey); float utilization = (capacityValue - _remainingCapacity.get(capacityKey) + newUsage .getOrDefault(capacityKey, 0)) / capacityValue; highestCapacityUtilization = Math.max(highestCapacityUtilization, utilization); } return highestCapacityUtilization; } public String getInstanceName() { return _instanceName; } public Set<String> getInstanceTags() { return _instanceTags; } public String getFaultZone() { return _faultZone; } public boolean hasFaultZone() { return _faultZone != null; } /** * @return A map of <resource name, set of partition names> contains all the partitions that are * disabled on the node. */ public Map<String, List<String>> getDisabledPartitionsMap() { return _disabledPartitionsMap; } /** * @return The max partition count that are allowed to be allocated on the node. */ public int getMaxPartition() { return _maxPartition; } /** * Computes the fault zone id based on the domain and fault zone type when topology is enabled. * For example, when * the domain is "zone=2, instance=testInstance" and the fault zone type is "zone", this function * returns "2". * If cannot find the fault zone type, this function leaves the fault zone id as the instance name. * Note the WAGED rebalancer does not require full topology tree to be created. So this logic is * simpler than the CRUSH based rebalancer. */ private String computeFaultZone(ClusterConfig clusterConfig, InstanceConfig instanceConfig) { if (!clusterConfig.isTopologyAwareEnabled()) { // Instance name is the default fault zone if topology awareness is false. return instanceConfig.getInstanceName(); } String topologyStr = clusterConfig.getTopology(); String faultZoneType = clusterConfig.getFaultZoneType(); if (topologyStr == null || faultZoneType == null) { LOG.debug("Topology configuration is not complete. Topology define: {}, Fault Zone Type: {}", topologyStr, faultZoneType); // Use the instance name, or the deprecated ZoneId field (if exists) as the default fault // zone. String zoneId = instanceConfig.getZoneId(); return zoneId == null ? instanceConfig.getInstanceName() : zoneId; } else { // Get the fault zone information from the complete topology definition. String[] topologyKeys = topologyStr.trim().split("/"); if (topologyKeys.length == 0 || Arrays.stream(topologyKeys) .noneMatch(type -> type.equals(faultZoneType))) { throw new HelixException( "The configured topology definition is empty or does not contain the fault zone type."); } Map<String, String> domainAsMap = instanceConfig.getDomainAsMap(); StringBuilder faultZoneStringBuilder = new StringBuilder(); for (String key : topologyKeys) { if (!key.isEmpty()) { // if a key does not exist in the instance domain config, apply the default domain value. faultZoneStringBuilder.append(domainAsMap.getOrDefault(key, "Default_" + key)); if (key.equals(faultZoneType)) { break; } else { faultZoneStringBuilder.append('/'); } } } return faultZoneStringBuilder.toString(); } } /** * @throws HelixException if the replica has already been assigned to the node. */ private void addToAssignmentRecord(AssignableReplica replica) { String resourceName = replica.getResourceName(); String partitionName = replica.getPartitionName(); if (_currentAssignedReplicaMap.containsKey(resourceName) && _currentAssignedReplicaMap .get(resourceName).containsKey(partitionName)) { throw new HelixException(String .format("Resource %s already has a replica with state %s from partition %s on node %s", replica.getResourceName(), replica.getReplicaState(), replica.getPartitionName(), getInstanceName())); } else { _currentAssignedReplicaMap.computeIfAbsent(resourceName, key -> new HashMap<>()) .put(partitionName, replica); } } private void updateRemainingCapacity(String capacityKey, int usage) { if (!_remainingCapacity.containsKey(capacityKey)) { //if the capacityKey belongs to replicas does not exist in the instance's capacity, // it will be treated as if it has unlimited capacity of that capacityKey return; } _remainingCapacity.put(capacityKey, _remainingCapacity.get(capacityKey) - usage); } /** * Get and validate the instance capacity from instance config. * @throws HelixException if any required capacity key is not configured in the instance config. */ private Map<String, Integer> fetchInstanceCapacity(ClusterConfig clusterConfig, InstanceConfig instanceConfig) { Map<String, Integer> instanceCapacity = WagedValidationUtil.validateAndGetInstanceCapacity(clusterConfig, instanceConfig); // Remove all the non-required capacity items from the map. instanceCapacity.keySet().retainAll(clusterConfig.getInstanceCapacityKeys()); return instanceCapacity; } @Override public int hashCode() { return _instanceName.hashCode(); } @Override public int compareTo(AssignableNode o) { return _instanceName.compareTo(o.getInstanceName()); } @Override public String toString() { return _instanceName; } }<|fim▁end|>
* capacity categories assuming the new capacity usage is added to the node. * For example, if the current node usage is {CPU: 0.9, MEM: 0.4, DISK: 0.6}. Then this call shall * return 0.9.
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin from .models import Lesson, Series class LessonAdmin(admin.ModelAdmin): pass class SeriesAdmin(admin.ModelAdmin): pass <|fim▁hole|><|fim▁end|>
admin.site.register(Lesson, LessonAdmin) admin.site.register(Series, SeriesAdmin)
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export * from './MessagePipeFrontend.class';<|fim▁end|>
export * from './messaging.definitions'; export * from './MessagePipeBackend.class';
<|file_name|>Formatter.cpp<|end_file_name|><|fim▁begin|>#define BOOST_TEST_NO_LIB #include <boost/test/auto_unit_test.hpp> #include "dormouse-engine/essentials/Formatter.hpp" <|fim▁hole|>namespace { BOOST_AUTO_TEST_SUITE(FormatterTestSuite); BOOST_AUTO_TEST_CASE(FormatsEmptyString) { std::string s; Formatter f(Formatter::FormatterChars('$', '{', '}', '\\')); Formatter::FormatList result; f.format(result, s); BOOST_CHECK(result.empty()); } BOOST_AUTO_TEST_CASE(FormatsNoSpecial) { std::string s = "No special characters here ({} is not special if no dollar sign before)"; Formatter f(Formatter::FormatterChars('$', '{', '}', '\\')); Formatter::FormatList result; f.format(result, s); BOOST_REQUIRE(result.size() == 1); BOOST_CHECK_EQUAL(result.front().function, '\0'); BOOST_CHECK_EQUAL(result.front().opts, s); } BOOST_AUTO_TEST_CASE(FormatsAllEscaped) { std::string s = "No special characters here (neither \\$f nor \\${opts}f is not special if \\$ is preceded by \\\\)"; std::string escaped = "No special characters here (neither $f nor ${opts}f is not special if $ is preceded by \\)"; Formatter f(Formatter::FormatterChars('$', '{', '}', '\\')); Formatter::FormatList result; f.format(result, s); BOOST_REQUIRE(result.size() == 1); BOOST_CHECK_EQUAL(result.front().function, '\0'); BOOST_CHECK_EQUAL(result.front().opts, escaped); } BOOST_AUTO_TEST_CASE(FormatsOnlySpecial) { std::string s = "%1%[opts]2%3"; Formatter f(Formatter::FormatterChars('%', '[', ']', '\\')); Formatter::FormatList result; f.format(result, s); BOOST_REQUIRE(result.size() == 3); Formatter::FormatList::iterator it = result.begin(); BOOST_CHECK_EQUAL(it->function, '1'); BOOST_CHECK_EQUAL(it->opts, ""); ++it; BOOST_CHECK_EQUAL(it->function, '2'); BOOST_CHECK_EQUAL(it->opts, "opts"); ++it; BOOST_CHECK_EQUAL(it->function, '3'); BOOST_CHECK_EQUAL(it->opts, ""); } BOOST_AUTO_TEST_CASE(FormatsMixed) { std::string s = "This is a mixed %s with opts %(012\\(\\)3)d and an escaped \\% char"; Formatter f(Formatter::FormatterChars('%', '(', ')', '\\')); Formatter::FormatList result; f.format(result, s); BOOST_REQUIRE(result.size() == 5); Formatter::FormatList::iterator it = result.begin(); BOOST_CHECK_EQUAL(it->function, '\0'); BOOST_CHECK_EQUAL(it->opts, "This is a mixed "); ++it; BOOST_CHECK_EQUAL(it->function, 's'); BOOST_CHECK_EQUAL(it->opts, ""); ++it; BOOST_CHECK_EQUAL(it->function, '\0'); BOOST_CHECK_EQUAL(it->opts, " with opts "); ++it; BOOST_CHECK_EQUAL(it->function, 'd'); BOOST_CHECK_EQUAL(it->opts, "012()3"); ++it; BOOST_CHECK_EQUAL(it->function, '\0'); BOOST_CHECK_EQUAL(it->opts, " and an escaped % char"); } BOOST_AUTO_TEST_CASE(ThrowsOnMissingFunction) { Formatter f(Formatter::FormatterChars('$', '{', '}', '\\')); Formatter::FormatList result; std::string s = "$"; BOOST_CHECK_THROW(f.format(result, s), FormatterError); s = "${}"; BOOST_CHECK_THROW(f.format(result, s), FormatterError); s = "$$a"; BOOST_CHECK_THROW(f.format(result, s), FormatterError); } BOOST_AUTO_TEST_CASE(ThrowsOnUnclosedOpts) { Formatter f(Formatter::FormatterChars('$', '{', '}', '\\')); Formatter::FormatList result; std::string s = "${opts$n"; BOOST_CHECK_THROW(f.format(result, s), FormatterError); } BOOST_AUTO_TEST_CASE(ThrowsOnExtraEscape) { Formatter f(Formatter::FormatterChars('$', '{', '}', '!')); Formatter::FormatList result; std::string s = "Just a text with an extra ! char"; BOOST_CHECK_THROW(f.format(result, s), FormatterError); s = "Text and format with escaped {: $!{"; BOOST_CHECK_THROW(f.format(result, s), FormatterError); } BOOST_AUTO_TEST_SUITE_END(); } // anonymous namespace<|fim▁end|>
using namespace dormouse_engine::essentials;
<|file_name|>patch_set.js<|end_file_name|><|fim▁begin|>"use strict"; // https://codereview.chromium.org/api/148223004/70001/?comments=true function PatchSet(issue, id, sequence) { this.files = []; // Array<PatchFile> this.sourceFiles = []; // Array<PatchFile> this.testFiles = []; // Array<PatchFile> this.tryJobResults = []; // Array<tryJobResults> this.created = ""; // Date this.messageCount = 0;<|fim▁hole|> this.issue = issue || null; this.owner = null // User this.message = ""; this.id = id || 0; this.sequence = sequence || 0; this.commit = false; this.mostRecent = false; this.active = false; } PatchSet.DETAIL_URL = "/api/{1}/{2}/?comments=true" PatchSet.REVERT_URL = "/api/{1}/{2}/revert"; PatchSet.prototype.getDetailUrl = function() { return PatchSet.DETAIL_URL.assign( encodeURIComponent(this.issue.id), encodeURIComponent(this.id)); }; PatchSet.prototype.getRevertUrl = function() { return PatchSet.REVERT_URL.assign( encodeURIComponent(this.issue.id), encodeURIComponent(this.id)); }; PatchSet.prototype.loadDetails = function() { var patchset = this; return loadJSON(this.getDetailUrl()).then(function(data) { patchset.parseData(data); return patchset; }); }; PatchSet.prototype.revert = function(options) { if (!options.reason) return Promise.reject(new Error("Must supply a reason")); var patchset = this; return this.createRevertData(options).then(function(data) { return sendFormData(patchset.getRevertUrl(), data); }); }; PatchSet.prototype.createRevertData = function(options) { return User.loadCurrentUser().then(function(user) { return { xsrf_token: user.xsrfToken, revert_reason: options.reason, revert_cq: options.commit ? "1" : "0", }; }); }; PatchSet.prototype.parseData = function(data) { var patchset = this; if (!this.issue || data.issue != this.issue.id || data.patchset != this.id) { throw new Error("Invalid patchset loaded " + data.issue + " != " + this.issue.id + " or " + data.patchset + " != " + this.id); } this.owner = new User(data.owner); this.message = data.message || ""; this.lastModified = Date.utc.create(data.modified); this.created = Date.utc.create(data.created); Object.keys(data.files || {}, function(name, value) { var file = new PatchFile(patchset, name); file.parseData(value); patchset.files.push(file); }); this.files.sort(PatchFile.compare); this.files.forEach(function(file) { if (file.isLayoutTest) this.testFiles.push(file); else this.sourceFiles.push(file); }, this); var tryResults = (data.try_job_results || []).groupBy("builder"); this.tryJobResults = Object.keys(tryResults) .sort() .map(function(builder) { var jobSet = new TryJobResultSet(builder); jobSet.results = tryResults[builder].map(function(resultData) { var result = new TryJobResult(); result.parseData(resultData); return result; }).reverse(); return jobSet; }); };<|fim▁end|>
this.draftCount = 0; this.lastModified = ""; // Date
<|file_name|>MainController.ts<|end_file_name|><|fim▁begin|>import { BaseController } from "./BaseController"; import { Route } from "./BaseController"; import * as loadHtml from "./HtmlLoader"; export class MainController extends BaseController { main = (): void => { loadHtml.load(this.requestData, '/views/index.html', {}); }<|fim▁hole|> ]; }<|fim▁end|>
routes: Route[] = [ new Route("/", this.main)
<|file_name|>JavaDocConfigurationImpl.java<|end_file_name|><|fim▁begin|>package com.github.setial.intellijjavadocs.configuration.impl; import com.github.setial.intellijjavadocs.configuration.JavaDocConfiguration; import com.github.setial.intellijjavadocs.exception.SetupTemplateException; import com.github.setial.intellijjavadocs.model.settings.JavaDocSettings; import com.github.setial.intellijjavadocs.model.settings.Level; import com.github.setial.intellijjavadocs.model.settings.Mode; import com.github.setial.intellijjavadocs.model.settings.Visibility; import com.github.setial.intellijjavadocs.template.DocTemplateManager; import com.intellij.openapi.components.PersistentStateComponent; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.components.State; import com.intellij.openapi.components.Storage; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.ui.Messages; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.HashSet; import java.util.Set; import static com.github.setial.intellijjavadocs.configuration.JavaDocConfiguration.COMPONENT_CONFIG_PATH; /** * The type Java doc configuration impl. * * @author Sergey Timofiychuk */ @State( name = JavaDocConfiguration.COMPONENT_NAME, storages = { @Storage(value = COMPONENT_CONFIG_PATH) } ) public class JavaDocConfigurationImpl implements JavaDocConfiguration, PersistentStateComponent<Element> { public static final String JAVADOCS_PLUGIN_TITLE_MSG = "Javadocs plugin"; private static final Logger LOGGER = Logger.getInstance(JavaDocConfigurationImpl.class); private JavaDocSettings settings; private DocTemplateManager templateManager; private boolean loadedStoredConfig = false; /** * Instantiates a new Java doc configuration object. */ public JavaDocConfigurationImpl() { templateManager = ServiceManager.getService(DocTemplateManager.class); initSettings(); } @Override public JavaDocSettings getConfiguration() { return settings; } @Nullable @Override public Element getState() { Element root = new Element("JAVA_DOC_SETTINGS_PLUGIN"); if (settings != null) {<|fim▁hole|> } return root; } @Override public void loadState(@NotNull Element javaDocSettings) { settings = new JavaDocSettings(javaDocSettings); setupTemplates(); loadedStoredConfig = true; } private void initSettings() { if (!loadedStoredConfig) { // setup default values settings = new JavaDocSettings(); Set<Level> levels = new HashSet<>(); levels.add(Level.TYPE); levels.add(Level.METHOD); levels.add(Level.FIELD); Set<Visibility> visibilities = new HashSet<>(); visibilities.add(Visibility.PUBLIC); visibilities.add(Visibility.PROTECTED); visibilities.add(Visibility.DEFAULT); settings.getGeneralSettings().setOverriddenMethods(false); settings.getGeneralSettings().setSplittedClassName(true); settings.getGeneralSettings().setMode(Mode.UPDATE); settings.getGeneralSettings().setLevels(levels); settings.getGeneralSettings().setVisibilities(visibilities); settings.getTemplateSettings().setClassTemplates(templateManager.getClassTemplates()); settings.getTemplateSettings().setConstructorTemplates(templateManager.getConstructorTemplates()); settings.getTemplateSettings().setMethodTemplates(templateManager.getMethodTemplates()); settings.getTemplateSettings().setFieldTemplates(templateManager.getFieldTemplates()); } } @Override public void setupTemplates() { try { templateManager.setClassTemplates(settings.getTemplateSettings().getClassTemplates()); templateManager.setConstructorTemplates(settings.getTemplateSettings().getConstructorTemplates()); templateManager.setMethodTemplates(settings.getTemplateSettings().getMethodTemplates()); templateManager.setFieldTemplates(settings.getTemplateSettings().getFieldTemplates()); } catch (SetupTemplateException e) { LOGGER.error(e); Messages.showErrorDialog("Javadocs plugin is not available, cause: " + e.getMessage(), JAVADOCS_PLUGIN_TITLE_MSG); } } }<|fim▁end|>
settings.addToDom(root); loadedStoredConfig = true;
<|file_name|>AMLUtils.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2011-2013 Team XBMC * http://kodi.tv * * This Program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2, or (at your option) * any later version. * * This Program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with XBMC; see the file COPYING. If not, see * <http://www.gnu.org/licenses/>. * */ #include <unistd.h> #include <string.h> #include <stdio.h> #include <stdlib.h> #include <fcntl.h> #include <string> #include "AMLUtils.h" #include "utils/CPUInfo.h" #include "utils/log.h" #include "utils/SysfsUtils.h" #include "utils/StringUtils.h" #include "guilib/gui3d.h" #include "utils/RegExp.h" #include "filesystem/SpecialProtocol.h" #include "rendering/RenderSystem.h" #include "linux/fb.h" #include <sys/ioctl.h> bool aml_present() { static int has_aml = -1; if (has_aml == -1) { if (SysfsUtils::Has("/sys/class/audiodsp/digital_raw")) has_aml = 1; else has_aml = 0; if (has_aml) CLog::Log(LOGNOTICE, "AML device detected"); } return has_aml == 1; } bool aml_wired_present() { static int has_wired = -1; if (has_wired == -1) { std::string test; if (SysfsUtils::GetString("/sys/class/net/eth0/operstate", test) != -1) has_wired = 1; else has_wired = 0; } return has_wired == 1; } bool aml_permissions() { if (!aml_present()) return false; static int permissions_ok = -1; if (permissions_ok == -1) { permissions_ok = 1; if (!SysfsUtils::HasRW("/dev/amvideo")) { CLog::Log(LOGERROR, "AML: no rw on /dev/amvideo"); permissions_ok = 0; } if (!SysfsUtils::HasRW("/dev/amstream_mpts")) { CLog::Log(LOGERROR, "AML: no rw on /dev/amstream*"); permissions_ok = 0; } if (!SysfsUtils::HasRW("/sys/class/video/axis")) { CLog::Log(LOGERROR, "AML: no rw on /sys/class/video/axis"); permissions_ok = 0; } if (!SysfsUtils::HasRW("/sys/class/video/screen_mode")) { CLog::Log(LOGERROR, "AML: no rw on /sys/class/video/screen_mode"); permissions_ok = 0; } if (!SysfsUtils::HasRW("/sys/class/video/disable_video")) { CLog::Log(LOGERROR, "AML: no rw on /sys/class/video/disable_video"); permissions_ok = 0; } if (!SysfsUtils::HasRW("/sys/class/tsync/pts_pcrscr")) { CLog::Log(LOGERROR, "AML: no rw on /sys/class/tsync/pts_pcrscr"); permissions_ok = 0; } if (!SysfsUtils::HasRW("/dev/video10")) { CLog::Log(LOGERROR, "AML: no rw on /dev/video10"); permissions_ok = 0; } if (!SysfsUtils::HasRW("/sys/module/amlvideodri/parameters/freerun_mode")) { CLog::Log(LOGERROR, "AML: no rw on /sys/module/amlvideodri/parameters/freerun_mode"); permissions_ok = 0; } if (!SysfsUtils::HasRW("/sys/class/audiodsp/digital_raw")) { CLog::Log(LOGERROR, "AML: no rw on /sys/class/audiodsp/digital_raw"); } if (!SysfsUtils::HasRW("/sys/class/amhdmitx/amhdmitx0/config")) { CLog::Log(LOGERROR, "AML: no rw on /sys/class/amhdmitx/amhdmitx0/config"); } if (!SysfsUtils::HasRW("/sys/class/vfm/map")) { CLog::Log(LOGERROR, "AML: no rw on /sys/class/vfm/map"); } if (!SysfsUtils::HasRW("/sys/class/tsync/enable")) { CLog::Log(LOGERROR, "AML: no rw on /sys/class/tsync/enable"); } if (!SysfsUtils::HasRW("/sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq")) { CLog::Log(LOGERROR, "AML: no rw on /sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq"); } if (!SysfsUtils::HasRW("/sys/devices/system/cpu/cpu0/cpufreq/scaling_max_freq")) { CLog::Log(LOGERROR, "AML: no rw on /sys/devices/system/cpu/cpu0/cpufreq/scaling_max_freq"); } if (!SysfsUtils::HasRW("/sys/devices/system/cpu/cpu0/cpufreq/scaling_governor")) { CLog::Log(LOGERROR, "AML: no rw on /sys/devices/system/cpu/cpu0/cpufreq/scaling_governor"); } if (aml_has_frac_rate_policy() && !SysfsUtils::HasRW("/sys/class/amhdmitx/amhdmitx0/frac_rate_policy")) { CLog::Log(LOGERROR, "AML: no rw on /sys/class/amhdmitx/amhdmitx0/frac_rate_policy"); } } return permissions_ok == 1; } bool aml_support_hevc() { static int has_hevc = -1; if (has_hevc == -1) { std::string valstr; if(SysfsUtils::GetString("/sys/class/amstream/vcodec_profile", valstr) != 0) has_hevc = 0; else has_hevc = (valstr.find("hevc:") != std::string::npos) ? 1: 0; } return (has_hevc == 1); } bool aml_support_hevc_4k2k() { static int has_hevc_4k2k = -1; if (has_hevc_4k2k == -1) { CRegExp regexp; regexp.RegComp("hevc:.*4k"); std::string valstr; if (SysfsUtils::GetString("/sys/class/amstream/vcodec_profile", valstr) != 0) has_hevc_4k2k = 0; else has_hevc_4k2k = (regexp.RegFind(valstr) >= 0) ? 1 : 0; } return (has_hevc_4k2k == 1); } bool aml_support_hevc_10bit() { static int has_hevc_10bit = -1; if (has_hevc_10bit == -1) { CRegExp regexp; regexp.RegComp("hevc:.*10bit"); std::string valstr; if (SysfsUtils::GetString("/sys/class/amstream/vcodec_profile", valstr) != 0) has_hevc_10bit = 0; else has_hevc_10bit = (regexp.RegFind(valstr) >= 0) ? 1 : 0; } return (has_hevc_10bit == 1); } AML_SUPPORT_H264_4K2K aml_support_h264_4k2k() { static AML_SUPPORT_H264_4K2K has_h264_4k2k = AML_SUPPORT_H264_4K2K_UNINIT; if (has_h264_4k2k == AML_SUPPORT_H264_4K2K_UNINIT) { std::string valstr; if (SysfsUtils::GetString("/sys/class/amstream/vcodec_profile", valstr) != 0) has_h264_4k2k = AML_NO_H264_4K2K; else if (valstr.find("h264:4k") != std::string::npos) has_h264_4k2k = AML_HAS_H264_4K2K_SAME_PROFILE; else if (valstr.find("h264_4k2k:") != std::string::npos) has_h264_4k2k = AML_HAS_H264_4K2K; else has_h264_4k2k = AML_NO_H264_4K2K; } return has_h264_4k2k; } bool aml_support_vp9() { static int has_vp9 = -1; if (has_vp9 == -1) { CRegExp regexp; regexp.RegComp("vp9:.*compressed"); std::string valstr; if (SysfsUtils::GetString("/sys/class/amstream/vcodec_profile", valstr) != 0) has_vp9 = 0; else has_vp9 = (regexp.RegFind(valstr) >= 0) ? 1 : 0; } return (has_vp9 == 1); } bool aml_has_frac_rate_policy() { static int has_frac_rate_policy = -1; if (has_frac_rate_policy == -1) has_frac_rate_policy = SysfsUtils::Has("/sys/class/amhdmitx/amhdmitx0/frac_rate_policy"); return (has_frac_rate_policy == 1); } void aml_set_audio_passthrough(bool passthrough) { SysfsUtils::SetInt("/sys/class/audiodsp/digital_raw", passthrough ? 2:0); } void aml_probe_hdmi_audio() { // Audio {format, channel, freq, cce} // {1, 7, 7f, 7} // {7, 5, 1e, 0} // {2, 5, 7, 0} // {11, 7, 7e, 1} // {10, 7, 6, 0} // {12, 7, 7e, 0} int fd = open("/sys/class/amhdmitx/amhdmitx0/edid", O_RDONLY); if (fd >= 0) { char valstr[1024] = {0}; read(fd, valstr, sizeof(valstr) - 1); valstr[strlen(valstr)] = '\0'; close(fd); std::vector<std::string> probe_str = StringUtils::Split(valstr, "\n"); for (std::vector<std::string>::const_iterator i = probe_str.begin(); i != probe_str.end(); ++i) { if (i->find("Audio") == std::string::npos) { for (std::vector<std::string>::const_iterator j = i + 1; j != probe_str.end(); ++j) { if (j->find("{1,") != std::string::npos) printf(" PCM found {1,\n"); else if (j->find("{2,") != std::string::npos) printf(" AC3 found {2,\n"); else if (j->find("{3,") != std::string::npos) printf(" MPEG1 found {3,\n"); else if (j->find("{4,") != std::string::npos) printf(" MP3 found {4,\n"); else if (j->find("{5,") != std::string::npos) printf(" MPEG2 found {5,\n"); else if (j->find("{6,") != std::string::npos) printf(" AAC found {6,\n"); else if (j->find("{7,") != std::string::npos) printf(" DTS found {7,\n"); else if (j->find("{8,") != std::string::npos) printf(" ATRAC found {8,\n"); else if (j->find("{9,") != std::string::npos) printf(" One_Bit_Audio found {9,\n"); else if (j->find("{10,") != std::string::npos) printf(" Dolby found {10,\n"); else if (j->find("{11,") != std::string::npos) printf(" DTS_HD found {11,\n"); else if (j->find("{12,") != std::string::npos) printf(" MAT found {12,\n"); else if (j->find("{13,") != std::string::npos) printf(" ATRAC found {13,\n"); else if (j->find("{14,") != std::string::npos) printf(" WMA found {14,\n"); else break; } break; } } } } int aml_axis_value(AML_DISPLAY_AXIS_PARAM param) { std::string axis; int value[8]; SysfsUtils::GetString("/sys/class/display/axis", axis); sscanf(axis.c_str(), "%d %d %d %d %d %d %d %d", &value[0], &value[1], &value[2], &value[3], &value[4], &value[5], &value[6], &value[7]); return value[param]; } bool aml_IsHdmiConnected() { int hpd_state; SysfsUtils::GetInt("/sys/class/amhdmitx/amhdmitx0/hpd_state", hpd_state); if (hpd_state == 2) { return 1; } return 0; } bool aml_mode_to_resolution(const char *mode, RESOLUTION_INFO *res) { if (!res) return false; res->iWidth = 0; res->iHeight= 0; if(!mode) return false; std::string fromMode = mode; StringUtils::Trim(fromMode); // strips, for example, 720p* to 720p // the * indicate the 'native' mode of the display if (StringUtils::EndsWith(fromMode, "*")) fromMode.erase(fromMode.size() - 1); if (StringUtils::EqualsNoCase(fromMode, "panel")) { res->iWidth = aml_axis_value(AML_DISPLAY_AXIS_PARAM_WIDTH); res->iHeight= aml_axis_value(AML_DISPLAY_AXIS_PARAM_HEIGHT); res->iScreenWidth = aml_axis_value(AML_DISPLAY_AXIS_PARAM_WIDTH); res->iScreenHeight= aml_axis_value(AML_DISPLAY_AXIS_PARAM_HEIGHT); res->fRefreshRate = 60; res->dwFlags = D3DPRESENTFLAG_PROGRESSIVE; } else if (StringUtils::EqualsNoCase(fromMode, "4k2ksmpte") || StringUtils::EqualsNoCase(fromMode, "smpte24hz")) { res->iWidth = 1920; res->iHeight= 1080; res->iScreenWidth = 4096; res->iScreenHeight= 2160; res->fRefreshRate = 24; res->dwFlags = D3DPRESENTFLAG_PROGRESSIVE; } else { int width = 0, height = 0, rrate = 60; char smode = 'p'; if (sscanf(fromMode.c_str(), "%dx%dp%dhz", &width, &height, &rrate) == 3) { smode = 'p'; } else if (sscanf(fromMode.c_str(), "%d%[ip]%dhz", &height, &smode, &rrate) >= 2) { switch (height) { case 480: case 576: width = 720; break; case 720: width = 1280; break; case 1080: width = 1920; break; case 2160: width = 3840; break; } } else if (sscanf(fromMode.c_str(), "%dcvbs", &height) == 1) { width = 720; smode = 'i'; rrate = (height == 576) ? 50 : 60; } else if (sscanf(fromMode.c_str(), "4k2k%d", &rrate) == 1) { width = 3840; height = 2160; smode = 'p'; } else { return false; } res->iWidth = (width < 3840) ? width : 1920; res->iHeight= (height < 2160) ? height : 1080; res->iScreenWidth = width; res->iScreenHeight = height; res->dwFlags = (smode == 'p') ? D3DPRESENTFLAG_PROGRESSIVE : D3DPRESENTFLAG_INTERLACED; switch (rrate) { case 23: case 29: case 59: res->fRefreshRate = (float)((rrate + 1)/1.001); break; default: res->fRefreshRate = (float)rrate; break; } } res->iScreen = 0; res->bFullScreen = true; res->iSubtitles = (int)(0.965 * res->iHeight); res->fPixelRatio = 1.0f; res->strId = fromMode; res->strMode = StringUtils::Format("%dx%d @ %.2f%s - Full Screen", res->iScreenWidth, res->iScreenHeight, res->fRefreshRate, res->dwFlags & D3DPRESENTFLAG_INTERLACED ? "i" : ""); return res->iWidth > 0 && res->iHeight> 0; } bool aml_get_native_resolution(RESOLUTION_INFO *res) { std::string mode; SysfsUtils::GetString("/sys/class/display/mode", mode); bool result = aml_mode_to_resolution(mode.c_str(), res); if (aml_has_frac_rate_policy()) { int fractional_rate; SysfsUtils::GetInt("/sys/class/amhdmitx/amhdmitx0/frac_rate_policy", fractional_rate); if (fractional_rate == 1) res->fRefreshRate /= 1.001; } return result; } bool aml_set_native_resolution(const RESOLUTION_INFO &res, std::string framebuffer_name, const int stereo_mode) { bool result = false; result = aml_set_display_resolution(res, framebuffer_name); aml_handle_scale(res); aml_handle_display_stereo_mode(stereo_mode); return result; } bool aml_probe_resolutions(std::vector<RESOLUTION_INFO> &resolutions) { std::string valstr, vesastr, dcapfile; dcapfile = CSpecialProtocol::TranslatePath("special://home/userdata/disp_cap"); if (SysfsUtils::GetString(dcapfile, valstr) < 0) { if (SysfsUtils::GetString("/sys/class/amhdmitx/amhdmitx0/disp_cap", valstr) < 0) return false; if (SysfsUtils::GetString("/sys/class/amhdmitx/amhdmitx0/vesa_cap", vesastr) == 0) valstr += "\n" + vesastr; } std::vector<std::string> probe_str = StringUtils::Split(valstr, "\n"); resolutions.clear(); RESOLUTION_INFO res; for (std::vector<std::string>::const_iterator i = probe_str.begin(); i != probe_str.end(); ++i) { if (((StringUtils::StartsWith(i->c_str(), "4k2k")) && (aml_support_h264_4k2k() > AML_NO_H264_4K2K)) || !(StringUtils::StartsWith(i->c_str(), "4k2k"))) { if (aml_mode_to_resolution(i->c_str(), &res)) resolutions.push_back(res); if (aml_has_frac_rate_policy()) { // Add fractional frame rates: 23.976, 29.97 and 59.94 Hz switch ((int)res.fRefreshRate) { case 24: case 30: case 60: res.fRefreshRate /= 1.001; res.strMode = StringUtils::Format("%dx%d @ %.2f%s - Full Screen", res.iScreenWidth, res.iScreenHeight, res.fRefreshRate, res.dwFlags & D3DPRESENTFLAG_INTERLACED ? "i" : ""); resolutions.push_back(res); break; } } } } return resolutions.size() > 0; } bool aml_get_preferred_resolution(RESOLUTION_INFO *res) { // check display/mode, it gets defaulted at boot if (!aml_get_native_resolution(res)) { // punt to 720p if we get nothing aml_mode_to_resolution("720p", res); } return true; } bool aml_set_display_resolution(const RESOLUTION_INFO &res, std::string framebuffer_name) { std::string mode = res.strId.c_str(); std::string cur_mode; SysfsUtils::GetString("/sys/class/display/mode", cur_mode); if (aml_has_frac_rate_policy()) { if (cur_mode == mode) SysfsUtils::SetString("/sys/class/display/mode", "null"); int fractional_rate = (res.fRefreshRate == floor(res.fRefreshRate)) ? 0 : 1; SysfsUtils::SetInt("/sys/class/amhdmitx/amhdmitx0/frac_rate_policy", fractional_rate); } else if (cur_mode == mode) { // Don't set the same mode as current return true; } SysfsUtils::SetString("/sys/class/display/mode", mode.c_str()); aml_set_framebuffer_resolution(res, framebuffer_name); return true; } void aml_setup_video_scaling(const char *mode) { SysfsUtils::SetInt("/sys/class/graphics/fb0/blank", 1); SysfsUtils::SetInt("/sys/class/graphics/fb0/free_scale", 0); SysfsUtils::SetInt("/sys/class/graphics/fb1/free_scale", 0); SysfsUtils::SetInt("/sys/class/ppmgr/ppscaler", 0); if (strstr(mode, "1080")) { SysfsUtils::SetString("/sys/class/graphics/fb0/request2XScale", "8"); SysfsUtils::SetString("/sys/class/graphics/fb1/scale_axis", "1280 720 1920 1080"); SysfsUtils::SetString("/sys/class/graphics/fb1/scale", "0x10001"); } else { SysfsUtils::SetString("/sys/class/graphics/fb0/request2XScale", "16 1280 720"); } SysfsUtils::SetInt("/sys/class/graphics/fb0/blank", 0); } void aml_handle_scale(const RESOLUTION_INFO &res) { if (res.iScreenWidth > res.iWidth && res.iScreenHeight > res.iHeight) aml_enable_freeScale(res); else aml_disable_freeScale(); } void aml_handle_display_stereo_mode(const int stereo_mode) { static std::string lastHdmiTxConfig = "3doff"; std::string command = "3doff"; switch (stereo_mode) { case RENDER_STEREO_MODE_SPLIT_VERTICAL:<|fim▁hole|> command = "3dlr"; break; case RENDER_STEREO_MODE_SPLIT_HORIZONTAL: command = "3dtb"; break; default: // nothing - command is already initialised to "3doff" break; } CLog::Log(LOGDEBUG, "AMLUtils::aml_handle_display_stereo_mode old mode %s new mode %s", lastHdmiTxConfig.c_str(), command.c_str()); // there is no way to read back current mode from sysfs // so we track state internal. Because even // when setting the same mode again - kernel driver // will initiate a new hdmi handshake which is not // what we want of course. // for 3d mode we are called 2 times and need to allow both calls // to succeed. Because the first call doesn't switch mode (i guessi its // timing issue between switching the refreshrate and switching to 3d mode // which needs to occure in the correct order, else switching refresh rate // might reset 3dmode). // So we set the 3d mode - if the last command is different from the current // command - or in case they are the same - we ensure that its not the 3doff // command that gets repeated here. if (lastHdmiTxConfig != command || command != "3doff") { CLog::Log(LOGDEBUG, "AMLUtils::aml_handle_display_stereo_mode setting new mode"); lastHdmiTxConfig = command; SysfsUtils::SetString("/sys/class/amhdmitx/amhdmitx0/config", command); } else { CLog::Log(LOGDEBUG, "AMLUtils::aml_handle_display_stereo_mode - no change needed"); } } void aml_enable_freeScale(const RESOLUTION_INFO &res) { char fsaxis_str[256] = {0}; sprintf(fsaxis_str, "0 0 %d %d", res.iWidth-1, res.iHeight-1); char waxis_str[256] = {0}; sprintf(waxis_str, "0 0 %d %d", res.iScreenWidth-1, res.iScreenHeight-1); SysfsUtils::SetInt("/sys/class/graphics/fb0/free_scale", 0); SysfsUtils::SetString("/sys/class/graphics/fb0/free_scale_axis", fsaxis_str); SysfsUtils::SetString("/sys/class/graphics/fb0/window_axis", waxis_str); SysfsUtils::SetInt("/sys/class/graphics/fb0/scale_width", res.iWidth); SysfsUtils::SetInt("/sys/class/graphics/fb0/scale_height", res.iHeight); SysfsUtils::SetInt("/sys/class/graphics/fb0/free_scale", 0x10001); } void aml_disable_freeScale() { // turn off frame buffer freescale SysfsUtils::SetInt("/sys/class/graphics/fb0/free_scale", 0); SysfsUtils::SetInt("/sys/class/graphics/fb1/free_scale", 0); } void aml_set_framebuffer_resolution(const RESOLUTION_INFO &res, std::string framebuffer_name) { aml_set_framebuffer_resolution(res.iWidth, res.iHeight, framebuffer_name); } void aml_set_framebuffer_resolution(int width, int height, std::string framebuffer_name) { int fd0; std::string framebuffer = "/dev/" + framebuffer_name; if ((fd0 = open(framebuffer.c_str(), O_RDWR)) >= 0) { struct fb_var_screeninfo vinfo; if (ioctl(fd0, FBIOGET_VSCREENINFO, &vinfo) == 0) { vinfo.xres = width; vinfo.yres = height; vinfo.xres_virtual = 1920; vinfo.yres_virtual = 2160; vinfo.bits_per_pixel = 32; vinfo.activate = FB_ACTIVATE_ALL; ioctl(fd0, FBIOPUT_VSCREENINFO, &vinfo); } close(fd0); } }<|fim▁end|>
<|file_name|>api.py<|end_file_name|><|fim▁begin|>from better_zoom import BetterZoom from better_selecting_zoom import BetterSelectingZoom from broadcaster import BroadcasterTool from dataprinter import DataPrinter from data_label_tool import DataLabelTool from enable.tools.drag_tool import DragTool from draw_points_tool import DrawPointsTool from drag_zoom import DragZoom from highlight_tool import HighlightTool from image_inspector_tool import ImageInspectorTool, ImageInspectorOverlay from lasso_selection import LassoSelection from legend_tool import LegendTool from legend_highlighter import LegendHighlighter from line_inspector import LineInspector<|fim▁hole|>from pan_tool import PanTool from point_marker import PointMarker from range_selection import RangeSelection from range_selection_2d import RangeSelection2D from range_selection_overlay import RangeSelectionOverlay from regression_lasso import RegressionLasso, RegressionOverlay from save_tool import SaveTool from scatter_inspector import ScatterInspector from select_tool import SelectTool from simple_inspector import SimpleInspectorTool from tool_states import ZoomState, PanState, GroupedToolState, SelectedZoomState from tracking_pan_tool import TrackingPanTool from tracking_zoom import TrackingZoom from traits_tool import TraitsTool from zoom_tool import ZoomTool # EOF<|fim▁end|>
from line_segment_tool import LineSegmentTool from move_tool import MoveTool
<|file_name|>boss_twinemperors.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2014-2017 StormCore * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ /* ScriptData SDName: Boss_Twinemperors SD%Complete: 95 SDComment: SDCategory: Temple of Ahn'Qiraj EndScriptData */ #include "ScriptMgr.h" #include "ScriptedCreature.h" #include "temple_of_ahnqiraj.h" #include "WorldPacket.h" #include "Item.h" #include "Spell.h" enum Spells { SPELL_HEAL_BROTHER = 7393, SPELL_TWIN_TELEPORT = 800, // CTRA watches for this spell to start its teleport timer SPELL_TWIN_TELEPORT_VISUAL = 26638, // visual SPELL_EXPLODEBUG = 804, SPELL_MUTATE_BUG = 802, SPELL_BERSERK = 26662, SPELL_UPPERCUT = 26007, SPELL_UNBALANCING_STRIKE = 26613, SPELL_SHADOWBOLT = 26006, SPELL_BLIZZARD = 26607, SPELL_ARCANEBURST = 568, }; enum Sound { SOUND_VL_AGGRO = 8657, //8657 - Aggro - To Late SOUND_VL_KILL = 8658, //8658 - Kill - You will not SOUND_VL_DEATH = 8659, //8659 - Death SOUND_VN_DEATH = 8660, //8660 - Death - Feel SOUND_VN_AGGRO = 8661, //8661 - Aggro - Let none SOUND_VN_KILL = 8662, //8661 - Kill - your fate }; enum Misc { PULL_RANGE = 50, ABUSE_BUG_RANGE = 20, VEKLOR_DIST = 20, // VL will not come to melee when attacking TELEPORTTIME = 30000 }; struct boss_twinemperorsAI : public ScriptedAI { boss_twinemperorsAI(Creature* creature): ScriptedAI(creature) { Initialize(); instance = creature->GetInstanceScript(); } void Initialize() { Heal_Timer = 0; // first heal immediately when they get close together Teleport_Timer = TELEPORTTIME; AfterTeleport = false; tspellcast = false; AfterTeleportTimer = 0; Abuse_Bug_Timer = urand(10000, 17000); BugsTimer = 2000; DontYellWhenDead = false; EnrageTimer = 15 * 60000; } InstanceScript* instance; uint32 Heal_Timer; uint32 Teleport_Timer; bool AfterTeleport; uint32 AfterTeleportTimer; bool DontYellWhenDead; uint32 Abuse_Bug_Timer, BugsTimer; bool tspellcast; uint32 EnrageTimer; virtual bool IAmVeklor() = 0; virtual void Reset() override = 0; virtual void CastSpellOnBug(Creature* target) = 0; void TwinReset() { Initialize(); me->ClearUnitState(UNIT_STATE_STUNNED); } Creature* GetOtherBoss() { return ObjectAccessor::GetCreature(*me, instance->GetGuidData(IAmVeklor() ? DATA_VEKNILASH : DATA_VEKLOR)); } void DamageTaken(Unit* /*done_by*/, uint32 &damage) override { Unit* pOtherBoss = GetOtherBoss(); if (pOtherBoss) { float dPercent = ((float)damage) / ((float)me->GetMaxHealth()); int odmg = (int)(dPercent * ((float)pOtherBoss->GetMaxHealth())); int ohealth = pOtherBoss->GetHealth()-odmg; pOtherBoss->SetHealth(ohealth > 0 ? ohealth : 0); if (ohealth <= 0) { pOtherBoss->setDeathState(JUST_DIED); pOtherBoss->SetFlag(OBJECT_DYNAMIC_FLAGS, UNIT_DYNFLAG_LOOTABLE); } } } void JustDied(Unit* /*killer*/) override { Creature* pOtherBoss = GetOtherBoss(); if (pOtherBoss) { pOtherBoss->SetHealth(0); pOtherBoss->setDeathState(JUST_DIED); pOtherBoss->SetFlag(OBJECT_DYNAMIC_FLAGS, UNIT_DYNFLAG_LOOTABLE); ENSURE_AI(boss_twinemperorsAI, pOtherBoss->AI())->DontYellWhenDead = true; } if (!DontYellWhenDead) // I hope AI is not threaded DoPlaySoundToSet(me, IAmVeklor() ? SOUND_VL_DEATH : SOUND_VN_DEATH); } void KilledUnit(Unit* /*victim*/) override { DoPlaySoundToSet(me, IAmVeklor() ? SOUND_VL_KILL : SOUND_VN_KILL); } void EnterCombat(Unit* who) override { DoZoneInCombat(); Creature* pOtherBoss = GetOtherBoss(); if (pOtherBoss) { /// @todo we should activate the other boss location so he can start attackning even if nobody // is near I dont know how to do that if (!pOtherBoss->IsInCombat()) { ScriptedAI* otherAI = ENSURE_AI(ScriptedAI, pOtherBoss->AI()); DoPlaySoundToSet(me, IAmVeklor() ? SOUND_VL_AGGRO : SOUND_VN_AGGRO); otherAI->AttackStart(who); otherAI->DoZoneInCombat(); } } } <|fim▁hole|> return; Creature* pOtherBoss = GetOtherBoss(); if (entry->Id != SPELL_HEAL_BROTHER || !pOtherBoss) return; // add health so we keep same percentage for both brothers uint32 mytotal = me->GetMaxHealth(), histotal = pOtherBoss->GetMaxHealth(); float mult = ((float)mytotal) / ((float)histotal); if (mult < 1) mult = 1.0f/mult; #define HEAL_BROTHER_AMOUNT 30000.0f uint32 largerAmount = (uint32)((HEAL_BROTHER_AMOUNT * mult) - HEAL_BROTHER_AMOUNT); if (mytotal > histotal) { uint32 h = me->GetHealth()+largerAmount; me->SetHealth(std::min(mytotal, h)); } else { uint32 h = pOtherBoss->GetHealth()+largerAmount; pOtherBoss->SetHealth(std::min(histotal, h)); } } void TryHealBrother(uint32 diff) { if (IAmVeklor()) // this spell heals caster and the other brother so let VN cast it return; if (Heal_Timer <= diff) { Unit* pOtherBoss = GetOtherBoss(); if (pOtherBoss && pOtherBoss->IsWithinDist(me, 60)) { DoCast(pOtherBoss, SPELL_HEAL_BROTHER); Heal_Timer = 1000; } } else Heal_Timer -= diff; } void TeleportToMyBrother() { Teleport_Timer = TELEPORTTIME; if (IAmVeklor()) return; // mechanics handled by veknilash so they teleport exactly at the same time and to correct coordinates Creature* pOtherBoss = GetOtherBoss(); if (pOtherBoss) { //me->MonsterYell("Teleporting ...", LANG_UNIVERSAL, 0); Position thisPos; thisPos.Relocate(me); Position otherPos; otherPos.Relocate(pOtherBoss); pOtherBoss->SetPosition(thisPos); me->SetPosition(otherPos); SetAfterTeleport(); ENSURE_AI(boss_twinemperorsAI, pOtherBoss->AI())->SetAfterTeleport(); } } void SetAfterTeleport() { me->InterruptNonMeleeSpells(false); DoStopAttack(); DoResetThreat(); DoCast(me, SPELL_TWIN_TELEPORT_VISUAL); me->AddUnitState(UNIT_STATE_STUNNED); AfterTeleport = true; AfterTeleportTimer = 2000; tspellcast = false; } bool TryActivateAfterTTelep(uint32 diff) { if (AfterTeleport) { if (!tspellcast) { me->ClearUnitState(UNIT_STATE_STUNNED); DoCast(me, SPELL_TWIN_TELEPORT); me->AddUnitState(UNIT_STATE_STUNNED); } tspellcast = true; if (AfterTeleportTimer <= diff) { AfterTeleport = false; me->ClearUnitState(UNIT_STATE_STUNNED); if (Unit* nearu = me->SelectNearestTarget(100)) { //DoYell(nearu->GetName(), LANG_UNIVERSAL, 0); AttackStart(nearu); me->AddThreat(nearu, 10000); } return true; } else { AfterTeleportTimer -= diff; // update important timers which would otherwise get skipped if (EnrageTimer > diff) EnrageTimer -= diff; else EnrageTimer = 0; if (Teleport_Timer > diff) Teleport_Timer -= diff; else Teleport_Timer = 0; return false; } } else { return true; } } void MoveInLineOfSight(Unit* who) override { if (!who || me->GetVictim()) return; if (me->CanCreatureAttack(who)) { float attackRadius = me->GetAttackDistance(who); if (attackRadius < PULL_RANGE) attackRadius = PULL_RANGE; if (me->IsWithinDistInMap(who, attackRadius) && me->GetDistanceZ(who) <= /*CREATURE_Z_ATTACK_RANGE*/7 /*there are stairs*/) { //if (who->HasStealthAura()) // who->RemoveSpellsCausingAura(SPELL_AURA_MOD_STEALTH); AttackStart(who); } } } Creature* RespawnNearbyBugsAndGetOne() { std::list<Creature*> lUnitList; me->GetCreatureListWithEntryInGrid(lUnitList, 15316, 150.0f); me->GetCreatureListWithEntryInGrid(lUnitList, 15317, 150.0f); if (lUnitList.empty()) return NULL; Creature* nearb = NULL; for (std::list<Creature*>::const_iterator iter = lUnitList.begin(); iter != lUnitList.end(); ++iter) { Creature* c = *iter; if (c) { if (c->isDead()) { c->Respawn(); c->setFaction(7); c->RemoveAllAuras(); } if (c->IsWithinDistInMap(me, ABUSE_BUG_RANGE)) { if (!nearb || (rand32() % 4) == 0) nearb = c; } } } return nearb; } void HandleBugs(uint32 diff) { if (BugsTimer < diff || Abuse_Bug_Timer <= diff) { Creature* c = RespawnNearbyBugsAndGetOne(); if (Abuse_Bug_Timer <= diff) { if (c) { CastSpellOnBug(c); Abuse_Bug_Timer = urand(10000, 17000); } else { Abuse_Bug_Timer = 1000; } } else { Abuse_Bug_Timer -= diff; } BugsTimer = 2000; } else { BugsTimer -= diff; Abuse_Bug_Timer -= diff; } } void CheckEnrage(uint32 diff) { if (EnrageTimer <= diff) { if (!me->IsNonMeleeSpellCast(true)) { DoCast(me, SPELL_BERSERK); EnrageTimer = 60*60000; } else EnrageTimer = 0; } else EnrageTimer-=diff; } }; class boss_veknilash : public CreatureScript { public: boss_veknilash() : CreatureScript("boss_veknilash") { } CreatureAI* GetAI(Creature* creature) const override { return GetInstanceAI<boss_veknilashAI>(creature); } struct boss_veknilashAI : public boss_twinemperorsAI { bool IAmVeklor() override {return false;} boss_veknilashAI(Creature* creature) : boss_twinemperorsAI(creature) { Initialize(); } void Initialize() { UpperCut_Timer = urand(14000, 29000); UnbalancingStrike_Timer = urand(8000, 18000); Scarabs_Timer = urand(7000, 14000); } uint32 UpperCut_Timer; uint32 UnbalancingStrike_Timer; uint32 Scarabs_Timer; void Reset() override { TwinReset(); Initialize(); //Added. Can be removed if its included in DB. me->ApplySpellImmune(0, IMMUNITY_DAMAGE, SPELL_SCHOOL_MASK_MAGIC, true); } void CastSpellOnBug(Creature* target) override { target->setFaction(14); target->AI()->AttackStart(me->getThreatManager().getHostilTarget()); target->AddAura(SPELL_MUTATE_BUG, target); target->SetFullHealth(); } void UpdateAI(uint32 diff) override { //Return since we have no target if (!UpdateVictim()) return; if (!TryActivateAfterTTelep(diff)) return; //UnbalancingStrike_Timer if (UnbalancingStrike_Timer <= diff) { DoCastVictim(SPELL_UNBALANCING_STRIKE); UnbalancingStrike_Timer = 8000 + rand32() % 12000; } else UnbalancingStrike_Timer -= diff; if (UpperCut_Timer <= diff) { Unit* randomMelee = SelectTarget(SELECT_TARGET_RANDOM, 0, NOMINAL_MELEE_RANGE, true); if (randomMelee) DoCast(randomMelee, SPELL_UPPERCUT); UpperCut_Timer = 15000 + rand32() % 15000; } else UpperCut_Timer -= diff; HandleBugs(diff); //Heal brother when 60yrds close TryHealBrother(diff); //Teleporting to brother if (Teleport_Timer <= diff) { TeleportToMyBrother(); } else Teleport_Timer -= diff; CheckEnrage(diff); DoMeleeAttackIfReady(); } }; }; class boss_veklor : public CreatureScript { public: boss_veklor() : CreatureScript("boss_veklor") { } CreatureAI* GetAI(Creature* creature) const override { return GetInstanceAI<boss_veklorAI>(creature); } struct boss_veklorAI : public boss_twinemperorsAI { bool IAmVeklor() override {return true;} boss_veklorAI(Creature* creature) : boss_twinemperorsAI(creature) { Initialize(); } void Initialize() { ShadowBolt_Timer = 0; Blizzard_Timer = urand(15000, 20000); ArcaneBurst_Timer = 1000; Scorpions_Timer = urand(7000, 14000); } uint32 ShadowBolt_Timer; uint32 Blizzard_Timer; uint32 ArcaneBurst_Timer; uint32 Scorpions_Timer; void Reset() override { TwinReset(); Initialize(); //Added. Can be removed if its included in DB. me->ApplySpellImmune(0, IMMUNITY_DAMAGE, SPELL_SCHOOL_MASK_NORMAL, true); } void CastSpellOnBug(Creature* target) override { target->setFaction(14); target->AddAura(SPELL_EXPLODEBUG, target); target->SetFullHealth(); } void UpdateAI(uint32 diff) override { //Return since we have no target if (!UpdateVictim()) return; // reset arcane burst after teleport - we need to do this because // when VL jumps to VN's location there will be a warrior who will get only 2s to run away // which is almost impossible if (AfterTeleport) ArcaneBurst_Timer = 5000; if (!TryActivateAfterTTelep(diff)) return; //ShadowBolt_Timer if (ShadowBolt_Timer <= diff) { if (!me->IsWithinDist(me->GetVictim(), 45.0f)) me->GetMotionMaster()->MoveChase(me->GetVictim(), VEKLOR_DIST, 0); else DoCastVictim(SPELL_SHADOWBOLT); ShadowBolt_Timer = 2000; } else ShadowBolt_Timer -= diff; //Blizzard_Timer if (Blizzard_Timer <= diff) { Unit* target = NULL; target = SelectTarget(SELECT_TARGET_RANDOM, 0, 45, true); if (target) DoCast(target, SPELL_BLIZZARD); Blizzard_Timer = 15000 + rand32() % 15000; } else Blizzard_Timer -= diff; if (ArcaneBurst_Timer <= diff) { if (Unit* mvic = SelectTarget(SELECT_TARGET_NEAREST, 0, NOMINAL_MELEE_RANGE, true)) { DoCast(mvic, SPELL_ARCANEBURST); ArcaneBurst_Timer = 5000; } } else ArcaneBurst_Timer -= diff; HandleBugs(diff); //Heal brother when 60yrds close TryHealBrother(diff); //Teleporting to brother if (Teleport_Timer <= diff) { TeleportToMyBrother(); } else Teleport_Timer -= diff; CheckEnrage(diff); //VL doesn't melee //DoMeleeAttackIfReady(); } void AttackStart(Unit* who) override { if (!who) return; if (who->isTargetableForAttack()) { // VL doesn't melee if (me->Attack(who, false)) { me->GetMotionMaster()->MoveChase(who, VEKLOR_DIST, 0); me->AddThreat(who, 0.0f); } } } }; }; void AddSC_boss_twinemperors() { new boss_veknilash(); new boss_veklor(); }<|fim▁end|>
void SpellHit(Unit* caster, const SpellInfo* entry) override { if (caster == me)
<|file_name|>month.go<|end_file_name|><|fim▁begin|>package chron import ( "time" "github.com/dustinevan/chron/dura" "fmt" "reflect" "database/sql/driver" "strings" ) type Month struct { time.Time } func NewMonth(year int, month time.Month) Month { return Month{time.Date(year, month, 1, 0, 0, 0, 0, time.UTC)} } func ThisMonth() Month { return Now().AsMonth() } func MonthOf(t time.Time) Month { t = t.UTC() return NewMonth(t.Year(), t.Month()) } func (m Month) AsYear() Year { return YearOf(m.Time) } func (m Month) AsMonth() Month { return m } func (m Month) AsDay() Day { return DayOf(m.Time) } func (m Month) AsHour() Hour { return HourOf(m.Time) } func (m Month) AsMinute() Minute { return MinuteOf(m.Time) } func (m Month) AsSecond() Second { return SecondOf(m.Time) } func (m Month) AsMilli() Milli { return MilliOf(m.Time) } func (m Month) AsMicro() Micro { return MicroOf(m.Time) } func (m Month) AsChron() Chron { return TimeOf(m.Time) } func (m Month) AsTime() time.Time { return m.Time } func (m Month) Increment(l dura.Time) Chron { return Chron{m.AddDate(l.Years(), l.Months(), l.Days()).Add(l.Duration())} } func (m Month) Decrement(l dura.Time) Chron { return Chron{m.AddDate(-1*l.Years(), -1*l.Months(), -1*l.Days()).Add(-1 * l.Duration())} } func (m Month) AddN(n int) Month { return Month{m.AddDate(0, n, 0)} } // span.Time implementation func (m Month) Start() Chron { return m.AsChron() } func (m Month) End() Chron { return m.AddN(1).Decrement(dura.Nano) } func (m Month) Contains(t Span) bool { return !m.Before(t) && !m.After(t) } func (m Month) Before(t Span) bool { return m.End().AsTime().Before(t.Start().AsTime()) } func (m Month) After(t Span) bool { return m.Start().AsTime().After(t.End().AsTime()) } func (m Month) Duration() dura.Time { return dura.Month } <|fim▁hole|>func (m Month) AddYears(y int) Month { return m.Increment(dura.Years(y)).AsMonth() } func (m Month) AddMonths(ms int) Month { return m.AddN(ms) } func (m Month) AddDays(d int) Day { return m.AsDay().AddN(d) } func (m Month) AddHours(h int) Hour { return m.AsHour().AddN(h) } func (m Month) AddMinutes(mi int) Minute { return m.AsMinute().AddN(mi) } func (m Month) AddSeconds(s int) Second { return m.AsSecond().AddN(s) } func (m Month) AddMillis(mi int) Milli { return m.AsMilli().AddN(mi) } func (m Month) AddMicros(mi int) Micro { return m.AsMicro().AddN(mi) } func (m Month) AddNanos(n int) Chron { return m.AsChron().AddN(n) } func (m *Month) Scan(value interface{}) error { if value == nil { *m = ZeroValue().AsMonth() return nil } if t, ok := value.(time.Time); ok { *m = MonthOf(t) return nil } return fmt.Errorf("unsupported Scan, storing %s into type *chron.Day", reflect.TypeOf(value)) } func (m Month) Value() (driver.Value, error) { // todo: error check the range. return m.Time, nil } func (m *Month) UnmarshalJSON(data []byte) error { if string(data) == "null" { return nil } s := strings.Trim(string(data), `"`) t, err := Parse(s) *m = MonthOf(t) return err }<|fim▁end|>
<|file_name|>royal-american.js<|end_file_name|><|fim▁begin|>var cheerio = require('cheerio') , request = require('request') , url = 'http://theroyalamerican.com/schedule/' , shows = [] , months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] function royalamerican (done) { request(url, function(err, response, body) { var $ = cheerio.load(body) $('.gig').each(function(i, elem) { var price, time var $$ = cheerio.load(elem) var title = $$('.title').text().trim() if($$('.with').text().trim()) title += $$('.with').text().trim() var date = $$('.date').text().trim() var timePrice = $$('.details').text().trim().split('Show: ').join('').split('|') if(timePrice[0]) time = timePrice[0].trim() if(timePrice[1] && timePrice[1].trim().slice(0,1) === '$') price = timePrice[1].split('Cover')[0].trim() else price = '' var year = $('#gigs_left').children().attr('name')<|fim▁hole|> title: title, time: time.slice(0,5).trim(), price: price, url: $$('.details').first().find('a').attr('href'), date: date } shows.push(show) }) done(null, shows) }) } module.exports = royalamerican function normalizeDate(date, year) { var newDate = [] date = date.split(' ') var day = date.pop() if(day.length < 2) day = '0'+day var month = date.pop() month = (months.indexOf(month)+1).toString() if(month.length < 2) month = '0'+month year = year.split('_')[1] newDate.push(year) newDate.push(month) newDate.push(day) return newDate.join('-') }<|fim▁end|>
date = normalizeDate(date, year) var show = { venue: 'The Royal American', venueUrl: 'http://theroyalamerican.com/',
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ## \file config.py # \brief python package for config # \author T. Lukaczyk, F. Palacios # \version 3.2.9 "eagle" # # SU2 Lead Developers: Dr. Francisco Palacios ([email protected]). # Dr. Thomas D. Economon ([email protected]). # # SU2 Developers: Prof. Juan J. Alonso's group at Stanford University. # Prof. Piero Colonna's group at Delft University of Technology. # Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology. # Prof. Alberto Guardone's group at Polytechnic University of Milan. # Prof. Rafael Palacios' group at Imperial College London. # # Copyright (C) 2012-2015 SU2, the open-source CFD code. # # SU2 is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # SU2 is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with SU2. If not, see <http://www.gnu.org/licenses/>. # ---------------------------------------------------------------------- # Imports # ---------------------------------------------------------------------- import os, sys, shutil, copy import numpy as np from ..util import bunch, ordered_bunch, switch from .tools import * from config_options import * try: from collections import OrderedDict except ImportError: from ..util.ordered_dict import OrderedDict inf = 1.0e20 # ---------------------------------------------------------------------- # Configuration Class # ---------------------------------------------------------------------- class Config(ordered_bunch): """ config = SU2.io.Config(filename="") Starts a config class, an extension of ordered_bunch() use 1: initialize by reading config file config = SU2.io.Config('filename') use 2: initialize from dictionary or bunch config = SU2.io.Config(param_dict) use 3: initialize empty config = SU2.io.Config() Parameters can be accessed by item or attribute ie: config['MESH_FILENAME'] or config.MESH_FILENAME Methods: read() - read from a config file write() - write to a config file (requires existing file) dump() - dump a raw config file unpack_dvs() - unpack a design vector diff() - returns the difference from another config dist() - computes the distance from another config """ _filename = 'config.cfg' def __init__(self,*args,**kwarg): # look for filename in inputs if args and isinstance(args[0],str): filename = args[0] args = args[1:] elif kwarg.has_key('filename'): filename = kwarg['filename'] del kwarg['filename'] else: filename = '' # initialize ordered bunch super(Config,self).__init__(*args,**kwarg) # read config if it exists if filename: try: self.read(filename) except: raise IOError , 'Could not find config file: %s' % filename self._filename = filename def read(self,filename): """ reads from a config file """ konfig = read_config(filename) self.update(konfig) def write(self,filename=''): """ updates an existing config file """ if not filename: filename = self._filename assert os.path.exists(filename) , 'must write over an existing config file' write_config(filename,self) def dump(self,filename=''): """ dumps all items in the config bunch, without comments """ if not filename: filename = self._filename dump_config(filename,self) def __getattr__(self,k): try: return super(Config,self).__getattr__(k) except AttributeError: raise AttributeError , 'Config parameter not found' def __getitem__(self,k): try: return super(Config,self).__getitem__(k) except KeyError: raise KeyError , 'Config parameter not found: %s' % k def unpack_dvs(self,dv_new,dv_old=None): """ updates config with design variable vectors will scale according to each DEFINITION_DV scale parameter Modifies: DV_KIND DV_MARKER DV_PARAM DV_VALUE_OLD DV_VALUE_NEW Inputs: dv_new - list or array of new dv values dv_old - optional, list or array of old dv values, defaults to zeros """ dv_new = copy.deepcopy(dv_new) dv_old = copy.deepcopy(dv_old) # handle unpacking cases def_dv = self['DEFINITION_DV'] n_dv = len(def_dv['KIND']) if not dv_old: dv_old = [0.0]*n_dv assert len(dv_new) == len(dv_old) , 'unexpected design vector length' # handle param param_dv = self['DV_PARAM'] # apply scale dv_scales = def_dv['SCALE'] dv_new = [ dv_new[i]*dv_scl for i,dv_scl in enumerate(dv_scales) ] dv_old = [ dv_old[i]*dv_scl for i,dv_scl in enumerate(dv_scales) ] # Change the parameters of the design variables self['DV_KIND'] = def_dv['KIND'] param_dv['PARAM'] = def_dv['PARAM'] param_dv['FFDTAG'] = def_dv['FFDTAG'] self.update({ 'DV_MARKER' : def_dv['MARKER'][0] , 'DV_VALUE_OLD' : dv_old , 'DV_VALUE_NEW' : dv_new }) def __eq__(self,konfig): return super(Config,self).__eq__(konfig) def __ne__(self,konfig): return super(Config,self).__ne__(konfig) def local_files(self): """ removes path prefix from all *_FILENAME params """ for key,value in self.iteritems(): if key.split('_')[-1] == 'FILENAME': self[key] = os.path.basename(value) def diff(self,konfig): """ compares self to another config Inputs: konfig - a second config Outputs: config_diff - a config containing only the differing keys, each with values of a list of the different config values. for example: config_diff.MATH_PROBLEM = ['DIRECT','ADJOINT'] """ keys = set([]) keys.update( self.keys() ) keys.update( konfig.keys() ) konfig_diff = Config() for key in keys: value1 = self.get(key,None) value2 = konfig.get(key,None) if not value1 == value2: konfig_diff[key] = [value1,value2] return konfig_diff def dist(self,konfig,keys_check='ALL'): """ calculates a distance to another config Inputs: konfig - a second config keys_check - optional, a list of keys to check Outputs: distance - a float Currently only works for DV_VALUE_NEW and DV_VALUE_OLD Returns a large value otherwise """ konfig_diff = self.diff(konfig) if keys_check == 'ALL': keys_check = konfig_diff.keys() distance = 0.0 for key in keys_check: if konfig_diff.has_key(key): val1 = konfig_diff[key][0] val2 = konfig_diff[key][1] if key in ['DV_VALUE_NEW', 'DV_VALUE_OLD']: val1 = np.array( val1 ) val2 = np.array( val2 ) this_diff = np.sqrt( np.sum( (val1-val2)**2 ) ) else: print 'Warning, unexpected config difference' this_diff = inf distance += this_diff #: if key different #: for each keys_check return distance def __repr__(self): #return '<Config> %s' % self._filename return self.__str__() def __str__(self): output = 'Config: %s' % self._filename for k,v in self.iteritems(): output += '\n %s= %s' % (k,v) return output #: class Config # ------------------------------------------------------------------- # Get SU2 Configuration Parameters # ------------------------------------------------------------------- def read_config(filename): """ reads a config file """ # initialize output dictionary data_dict = OrderedDict() input_file = open(filename) # process each line while 1: # read the line line = input_file.readline() if not line: break # remove line returns line = line.strip('\r\n') # make sure it has useful data if (not "=" in line) or (line[0] == '%'): continue # split across equals sign line = line.split("=",1) this_param = line[0].strip() this_value = line[1].strip() assert not data_dict.has_key(this_param) , ('Config file has multiple specifications of %s' % this_param ) for case in switch(this_param): # comma delimited lists of strings with or without paren's if case("MARKER_EULER") : pass if case("MARKER_FAR") : pass if case("MARKER_PLOTTING") : pass if case("MARKER_MONITORING") : pass if case("MARKER_SYM") : pass if case("DV_KIND") : # remove white space this_value = ''.join(this_value.split()) # remove parens this_value = this_value.strip('()') # split by comma data_dict[this_param] = this_value.split(",") break # semicolon delimited lists of comma delimited lists of floats if case("DV_PARAM"): # remove white space info_General = ''.join(this_value.split()) # split by semicolon info_General = info_General.split(';') # build list of dv params, convert string to float dv_Parameters = [] dv_FFDTag = [] for this_dvParam in info_General: this_dvParam = this_dvParam.strip('()') this_dvParam = this_dvParam.split(",") # if FFD change the first element to work with numbers and float(x) if data_dict["DV_KIND"][0] in ['FFD_SETTING','FFD_CONTROL_POINT','FFD_DIHEDRAL_ANGLE','FFD_TWIST_ANGLE','FFD_ROTATION','FFD_CAMBER','FFD_THICKNESS','FFD_CONTROL_POINT_2D','FFD_CAMBER_2D','FFD_THICKNESS_2D']: this_dvFFDTag = this_dvParam[0] this_dvParam[0] = '0' else: this_dvFFDTag = [] this_dvParam = [ float(x) for x in this_dvParam ] dv_FFDTag = dv_FFDTag + [this_dvFFDTag] dv_Parameters = dv_Parameters + [this_dvParam] # store in a dictionary dv_Definitions = { 'FFDTAG' : dv_FFDTag , 'PARAM' : dv_Parameters } data_dict[this_param] = dv_Definitions break # comma delimited lists of floats if case("DV_VALUE_OLD") : pass if case("DV_VALUE_NEW") : pass if case("DV_VALUE") : # remove white space this_value = ''.join(this_value.split()) # split by comma, map to float, store in dictionary data_dict[this_param] = map(float,this_value.split(",")) break # float parameters if case("MACH_NUMBER") : pass if case("AoA") : pass if case("FIN_DIFF_STEP") : pass if case("CFL_NUMBER") : pass if case("WRT_SOL_FREQ") : data_dict[this_param] = float(this_value) break # int parameters if case("NUMBER_PART") : pass if case("AVAILABLE_PROC") : pass if case("EXT_ITER") : pass if case("TIME_INSTANCES") : pass if case("UNST_ADJOINT_ITER") : pass if case("ITER_AVERAGE_OBJ") : pass if case("ADAPT_CYCLES") : data_dict[this_param] = int(this_value) break # unitary design variable definition if case("DEFINITION_DV"): # remove white space this_value = ''.join(this_value.split()) # split into unitary definitions info_Unitary = this_value.split(";") # process each Design Variable dv_Kind = []<|fim▁hole|> dv_Scale = [] dv_Markers = [] dv_FFDTag = [] dv_Parameters = [] for this_General in info_Unitary: if not this_General: continue # split each unitary definition into one general definition info_General = this_General.strip("()").split("|") # check for needed strip()? # split information for dv Kinds info_Kind = info_General[0].split(",") # pull processed dv values this_dvKind = get_dvKind( int( info_Kind[0] ) ) this_dvScale = float( info_Kind[1] ) this_dvMarkers = info_General[1].split(",") if this_dvKind=='MACH_NUMBER' or this_dvKind=='AOA': this_dvParameters = [] else: this_dvParameters = info_General[2].split(",") # if FFD change the first element to work with numbers and float(x), save also the tag if this_dvKind in ['FFD_SETTING','FFD_CONTROL_POINT','FFD_DIHEDRAL_ANGLE','FFD_TWIST_ANGLE','FFD_ROTATION','FFD_CAMBER','FFD_THICKNESS','FFD_CONTROL_POINT_2D','FFD_CAMBER_2D','FFD_THICKNESS_2D']: this_dvFFDTag = this_dvParameters[0] this_dvParameters[0] = '0' else: this_dvFFDTag = [] this_dvParameters = [ float(x) for x in this_dvParameters ] # add to lists dv_Kind = dv_Kind + [this_dvKind] dv_Scale = dv_Scale + [this_dvScale] dv_Markers = dv_Markers + [this_dvMarkers] dv_FFDTag = dv_FFDTag + [this_dvFFDTag] dv_Parameters = dv_Parameters + [this_dvParameters] # store in a dictionary dv_Definitions = { 'KIND' : dv_Kind , 'SCALE' : dv_Scale , 'MARKER' : dv_Markers , 'FFDTAG' : dv_FFDTag , 'PARAM' : dv_Parameters } # save to output dictionary data_dict[this_param] = dv_Definitions break # unitary objective definition if case('OPT_OBJECTIVE'): # remove white space this_value = ''.join(this_value.split()) # split by scale this_value = this_value.split("*") this_name = this_value[0] this_scale = 1.0 if len(this_value) > 1: this_scale = float( this_value[1] ) this_def = { this_name : {'SCALE':this_scale} } # save to output dictionary data_dict[this_param] = this_def break # unitary constraint definition if case('OPT_CONSTRAINT'): # remove white space this_value = ''.join(this_value.split()) # check for none case if this_value == 'NONE': data_dict[this_param] = {'EQUALITY':OrderedDict(), 'INEQUALITY':OrderedDict()} break # split definitions this_value = this_value.split(';') this_def = OrderedDict() for this_con in this_value: if not this_con: continue # if no definition # defaults this_obj = 'NONE' this_sgn = '=' this_scl = 1.0 this_val = 0.0 # split scale if present this_con = this_con.split('*') if len(this_con) > 1: this_scl = float( this_con[1] ) this_con = this_con[0] # find sign for this_sgn in ['<','>','=']: if this_sgn in this_con: break # split sign, store objective and value this_con = this_con.strip('()').split(this_sgn) assert len(this_con) == 2 , 'incorrect constraint definition' this_obj = this_con[0] this_val = float( this_con[1] ) # store in dictionary this_def[this_obj] = { 'SIGN' : this_sgn , 'VALUE' : this_val , 'SCALE' : this_scl } #: for each constraint definition # sort constraints by type this_sort = { 'EQUALITY' : OrderedDict() , 'INEQUALITY' : OrderedDict() } for key,value in this_def.iteritems(): if value['SIGN'] == '=': this_sort['EQUALITY'][key] = value else: this_sort['INEQUALITY'][key] = value #: for each definition # save to output dictionary data_dict[this_param] = this_sort break # otherwise # string parameters if case(): data_dict[this_param] = this_value break #: if case DEFINITION_DV #: for case #: for line #hack - twl if not data_dict.has_key('DV_VALUE_NEW'): data_dict['DV_VALUE_NEW'] = [0] if not data_dict.has_key('DV_VALUE_OLD'): data_dict['DV_VALUE_OLD'] = [0] if not data_dict.has_key('OPT_ITERATIONS'): data_dict['OPT_ITERATIONS'] = 100 if not data_dict.has_key('OPT_ACCURACY'): data_dict['OPT_ACCURACY'] = 1e-10 if not data_dict.has_key('BOUND_DV'): data_dict['BOUND_DV'] = 1e10 return data_dict #: def read_config() # ------------------------------------------------------------------- # Set SU2 Configuration Parameters # ------------------------------------------------------------------- def write_config(filename,param_dict): """ updates an existing config file """ temp_filename = "temp.cfg" shutil.copy(filename,temp_filename) output_file = open(filename,"w") # break pointers param_dict = copy.deepcopy(param_dict) for raw_line in open(temp_filename): # remove line returns line = raw_line.strip('\r\n') # make sure it has useful data if not "=" in line: output_file.write(raw_line) continue # split across equals sign line = line.split("=") this_param = line[0].strip() old_value = line[1].strip() # skip if parameter unwanted if not param_dict.has_key(this_param): output_file.write(raw_line) continue # start writing parameter new_value = param_dict[this_param] output_file.write(this_param + "= ") # handle parameter types for case in switch(this_param): # comma delimited list of floats if case("DV_VALUE_NEW") : pass if case("DV_VALUE_OLD") : pass if case("DV_VALUE") : n_lists = len(new_value) for i_value in range(n_lists): output_file.write("%s" % new_value[i_value]) if i_value+1 < n_lists: output_file.write(", ") break # comma delimited list of strings no paren's if case("DV_KIND") : pass if case("TASKS") : pass if case("GRADIENTS") : if not isinstance(new_value,list): new_value = [ new_value ] n_lists = len(new_value) for i_value in range(n_lists): output_file.write(new_value[i_value]) if i_value+1 < n_lists: output_file.write(", ") break # comma delimited list of strings inside paren's if case("MARKER_EULER") : pass if case("MARKER_FAR") : pass if case("MARKER_PLOTTING") : pass if case("MARKER_MONITORING") : pass if case("MARKER_SYM") : pass if case("DV_MARKER") : if not isinstance(new_value,list): new_value = [ new_value ] output_file.write("( ") n_lists = len(new_value) for i_value in range(n_lists): output_file.write(new_value[i_value]) if i_value+1 < n_lists: output_file.write(", ") output_file.write(" )") break # semicolon delimited lists of comma delimited lists if case("DV_PARAM") : assert isinstance(new_value['PARAM'],list) , 'incorrect specification of DV_PARAM' if not isinstance(new_value['PARAM'][0],list): new_value = [ new_value ] for i_value in range(len(new_value['PARAM'])): output_file.write("( ") this_param_list = new_value['PARAM'][i_value] this_ffd_list = new_value['FFDTAG'][i_value] n_lists = len(this_param_list) if this_ffd_list != []: output_file.write("%s, " % this_ffd_list) for j_value in range(1,n_lists): output_file.write("%s" % this_param_list[j_value]) if j_value+1 < n_lists: output_file.write(", ") else: for j_value in range(n_lists): output_file.write("%s" % this_param_list[j_value]) if j_value+1 < n_lists: output_file.write(", ") output_file.write(") ") if i_value+1 < len(new_value['PARAM']): output_file.write("; ") break # int parameters if case("NUMBER_PART") : pass if case("ADAPT_CYCLES") : pass if case("TIME_INSTANCES") : pass if case("AVAILABLE_PROC") : pass if case("UNST_ADJOINT_ITER") : pass if case("EXT_ITER") : output_file.write("%i" % new_value) break if case("DEFINITION_DV") : n_dv = len(new_value['KIND']) if not n_dv: output_file.write("NONE") for i_dv in range(n_dv): this_kind = new_value['KIND'][i_dv] output_file.write("( ") output_file.write("%i , " % get_dvID(this_kind) ) output_file.write("%s " % new_value['SCALE'][i_dv]) output_file.write("| ") # markers n_mark = len(new_value['MARKER'][i_dv]) for i_mark in range(n_mark): output_file.write("%s " % new_value['MARKER'][i_dv][i_mark]) if i_mark+1 < n_mark: output_file.write(", ") #: for each marker if not this_kind in ['AOA','MACH_NUMBER']: output_file.write(" | ") # params if this_kind in ['FFD_SETTING','FFD_CONTROL_POINT','FFD_DIHEDRAL_ANGLE','FFD_TWIST_ANGLE','FFD_ROTATION','FFD_CAMBER','FFD_THICKNESS','FFD_CONTROL_POINT_2D','FFD_CAMBER_2D','FFD_THICKNESS_2D']: n_param = len(new_value['PARAM'][i_dv]) output_file.write("%s , " % new_value['FFDTAG'][i_dv]) for i_param in range(1,n_param): output_file.write("%s " % new_value['PARAM'][i_dv][i_param]) if i_param+1 < n_param: output_file.write(", ") else: n_param = len(new_value['PARAM'][i_dv]) for i_param in range(n_param): output_file.write("%s " % new_value['PARAM'][i_dv][i_param]) if i_param+1 < n_param: output_file.write(", ") #: for each param output_file.write(" )") if i_dv+1 < n_dv: output_file.write("; ") #: for each dv break if case("OPT_OBJECTIVE"): assert len(new_value.keys())==1 , 'only one OPT_OBJECTIVE is currently supported' i_name = 0 for name,value in new_value.iteritems(): if i_name>0: output_file.write("; ") output_file.write( "%s * %s" % (name,value['SCALE']) ) i_name += 1 break if case("OPT_CONSTRAINT"): i_con = 0 for con_type in ['EQUALITY','INEQUALITY']: this_con = new_value[con_type] for name,value in this_con.iteritems(): if i_con>0: output_file.write("; ") output_file.write( "( %s %s %s ) * %s" % (name, value['SIGN'], value['VALUE'], value['SCALE']) ) i_con += 1 #: for each constraint #: for each constraint type if not i_con: output_file.write("NONE") break # default, assume string, integer or unformatted float if case(): output_file.write('%s' % new_value) break #: for case # remove from param dictionary del param_dict[this_param] # next line output_file.write("\n") #: for each line # check that all params were used for this_param in param_dict.keys(): if not this_param in ['JOB_NUMBER']: print ( 'Warning: Parameter %s not found in config file and was not written' % (this_param) ) output_file.close() os.remove( temp_filename ) #: def write_config() def dump_config(filename,config): ''' dumps a raw config file with all options in config and no comments ''' # HACK - twl if config.has_key('DV_VALUE_NEW'): config.DV_VALUE = config.DV_VALUE_NEW config_file = open(filename,'w') # write dummy file for key in config.keys(): config_file.write( '%s= 0 \n' % key ) config_file.close() # dump data write_config(filename,config)<|fim▁end|>