prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>links.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap; use core::PackageSet; use util::{CargoResult, human}; // Validate that there are no duplicated native libraries among packages and // that all packages with `links` also have a build script. pub fn validate(deps: &PackageSet) -> CargoResult<()> { let mut map = HashMap::new(); for dep in deps.iter() {<|fim▁hole|> match map.get(&lib) { Some(previous) => { return Err(human(format!("native library `{}` is being linked \ to by more than one package, and \ can only be linked to by one \ package\n\n {}\n {}", lib, previous, dep.package_id()))) } None => {} } if !dep.manifest().targets().iter().any(|t| t.is_custom_build()) { return Err(human(format!("package `{}` specifies that it links to \ `{}` but does not have a custom build \ script", dep.package_id(), lib))) } map.insert(lib, dep.package_id()); } Ok(()) }<|fim▁end|>
let lib = match dep.manifest().links() { Some(lib) => lib, None => continue, };
<|file_name|>iworkflow_license_pool.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2017 F5 Networks Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = { 'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.0' } DOCUMENTATION = ''' module: iworkflow_license_pool short_description: Manage license pools in iWorkflow. description: - Manage license pools in iWorkflow. version_added: 2.4 options: name: description: - Name of the license pool to create. required: True state: description: - Whether the license pool should exist, or not. A state of C(present) will attempt to activate the license pool if C(accept_eula) is set to C(yes). required: False default: present choices: - present - absent base_key: description: - Key that the license server uses to verify the functionality that you are entitled to license. This option is required if you are creating a new license. required: False default: None accept_eula: description: - Specifies that you accept the EULA that is part of iWorkflow. Note that this is required to activate the license pool. If this is not specified, or it is set to C(no), then the pool will remain in a state of limbo until you choose to accept the EULA. This option is required when updating a license. It is also suggested that you provide it when creating a license, but if you do not, the license will remain inactive and you will have to run this module again with this option set to C(yes) to activate it. required: False default: 'no' choices: - yes - no notes: - Requires the f5-sdk Python package on the host. This is as easy as pip install f5-sdk. extends_documentation_fragment: f5 requirements: - f5-sdk >= 2.3.0 - iWorkflow >= 2.1.0 author: - Tim Rupp (@caphrim007) ''' EXAMPLES = ''' - name: Create license pool iworkflow_license_pool: accept_eula: "yes" name: "my-lic-pool" base_key: "XXXXX-XXXXX-XXXXX-XXXXX-XXXXXXX" state: "present" server: "iwf.mydomain.com" password: "secret" user: "admin" validate_certs: "no" delegate_to: localhost ''' RETURN = ''' ''' import time from ansible.module_utils.basic import BOOLEANS from ansible.module_utils.f5_utils import ( AnsibleF5Client, AnsibleF5Parameters, F5ModuleError, HAS_F5SDK, iControlUnexpectedHTTPError ) class Parameters(AnsibleF5Parameters): api_map = { 'baseRegKey': 'base_key' } returnables = [] api_attributes = [ 'baseRegKey', 'state' ] updatables = [] def to_return(self): result = {} for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) return result def api_params(self): result = {} for api_attribute in self.api_attributes: if self.api_map is not None and api_attribute in self.api_map: result[api_attribute] = getattr(self, self.api_map[api_attribute]) else: result[api_attribute] = getattr(self, api_attribute) result = self._filter_params(result) return result @property def name(self): if self._values['name'] is None: return None name = str(self._values['name']).strip() if name == '': raise F5ModuleError( "You must specify a name for this module" ) return name class ModuleManager(object): def __init__(self, client): self.client = client self.have = None self.want = Parameters(self.client.module.params) self.changes = Parameters() def _set_changed_options(self): changed = {} for key in Parameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = Parameters(changed) def _update_changed_options(self): changed = {} for key in Parameters.updatables: if getattr(self.want, key) is not None: attr1 = getattr(self.want, key) attr2 = getattr(self.have, key) if attr1 != attr2: changed[key] = attr1 if changed: self.changes = Parameters(changed) return True return False def _pool_is_licensed(self): if self.have.state == 'LICENSED': return True return False def _pool_is_unlicensed_eula_unaccepted(self, current): if current.state != 'LICENSED' and not self.want.accept_eula: return True return False def exec_module(self): changed = False result = dict() state = self.want.state try: if state == "present": changed = self.present() elif state == "absent": changed = self.absent() except iControlUnexpectedHTTPError as e: raise F5ModuleError(str(e)) result.update(**self.changes.to_return()) result.update(dict(changed=changed)) return result def exists(self): collection = self.client.api.cm.shared.licensing.pools_s.get_collection( requests_params=dict( params="$filter=name+eq+'{0}'".format(self.want.name) ) ) if len(collection) == 1: return True elif len(collection) == 0: return False else: raise F5ModuleError( "Multiple license pools with the provided name were found!" ) def present(self):<|fim▁hole|> def should_update(self): if self._pool_is_licensed(): return False if self._pool_is_unlicensed_eula_unaccepted(): return False return True def update(self): self.have = self.read_current_from_device() if not self.should_update(): return False if self.module.check_mode: return True self.update_on_device() return True def update_on_device(self): collection = self.client.api.cm.shared.licensing.pools_s.get_collection( requests_params=dict( params="$filter=name+eq+'{0}'".format(self.want.name) ) ) resource = collection.pop() resource.modify( state='RELICENSE', method='AUTOMATIC' ) return self._wait_for_license_pool_state_to_activate(resource) def create(self): self._set_changed_options() if self.client.check_mode: return True if self.want.base_key is None: raise F5ModuleError( "You must specify a 'base_key' when creating a license pool" ) self.create_on_device() return True def read_current_from_device(self): collection = self.client.api.cm.shared.licensing.pools_s.get_collection( requests_params=dict( params="$filter=name+eq+'{0}'".format(self.want.name) ) ) resource = collection.pop() result = resource.attrs return Parameters(result) def create_on_device(self): resource = self.client.api.cm.shared.licensing.pools_s.pool.create( name=self.want.name, baseRegKey=self.want.base_key, method="AUTOMATIC" ) return self._wait_for_license_pool_state_to_activate(resource) def _wait_for_license_pool_state_to_activate(self, pool): error_values = ['EXPIRED', 'FAILED'] # Wait no more than 5 minutes for x in range(1, 30): pool.refresh() if pool.state == 'LICENSED': return True elif pool.state == 'WAITING_FOR_EULA_ACCEPTANCE': pool.modify( eulaText=pool.eulaText, state='ACCEPTED_EULA' ) elif pool.state in error_values: raise F5ModuleError(pool.errorText) time.sleep(10) def absent(self): if self.exists(): return self.remove() return False def remove(self): if self.client.check_mode: return True self.remove_from_device() if self.exists(): raise F5ModuleError("Failed to delete the license pool") return True def remove_from_device(self): collection = self.client.api.cm.shared.licensing.pools_s.get_collection( requests_params=dict( params="$filter=name+eq+'{0}'".format(self.want.name) ) ) resource = collection.pop() if resource: resource.delete() class ArgumentSpec(object): def __init__(self): self.supports_check_mode = True self.argument_spec = dict( accept_eula=dict( type='bool', default='no', choices=BOOLEANS ), base_key=dict( required=False, no_log=True ), name=dict( required=True ), state=dict( required=False, default='present', choices=['absent', 'present'] ) ) self.f5_product_name = 'iworkflow' def main(): if not HAS_F5SDK: raise F5ModuleError("The python f5-sdk module is required") spec = ArgumentSpec() client = AnsibleF5Client( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode, f5_product_name=spec.f5_product_name ) try: mm = ModuleManager(client) results = mm.exec_module() client.module.exit_json(**results) except F5ModuleError as e: client.module.fail_json(msg=str(e)) if __name__ == '__main__': main()<|fim▁end|>
if self.exists(): return self.update() else: return self.create()
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod application;<|fim▁hole|><|fim▁end|>
pub use application::Application; pub use application::ApplicationMessage;
<|file_name|>term_pygame.py<|end_file_name|><|fim▁begin|>import os import pygame import sys import threading, time from pygame.locals import * import logging log = logging.getLogger('pytality.term.pygame') log.debug("pygame version: %r", pygame.version.ver) """ A mapping of special keycodes into representative strings. Based off the keymap in WConio, but with 'alt', 'ctrl', and 'shift' stripped in order to be portable with the other pytality backends. """ key_map = { K_RETURN: 'enter', K_F1 : 'f1', K_F2 : 'f2', K_F3 : 'f3', K_F4 : 'f4', K_F5 : 'f5', K_F6 : 'f6', K_F7 : 'f7', K_F8 : 'f8', K_F9 : 'f9', K_F10 : 'f10', K_INSERT : 'ins', K_DELETE : 'del', K_HOME : 'home', K_END : 'end', K_PAGEDOWN : 'pgdn', K_PAGEUP : 'pgup', K_DOWN : 'down', K_LEFT : 'left', K_RIGHT : 'right', K_UP : 'up', } #image path #todo: figure out how I want to make this configurable if hasattr(sys, 'frozen'): base_path = os.path.join(os.path.realpath(os.path.dirname(sys.argv[0])), 'data') else: base_path = os.path.join(os.path.dirname(__file__), 'silverlight_html', 'images') #pixel dimensions of each cell W = 8 H = 12 #loaded sprite data sprites = {} #have we quit? quit = False #blinky cursor stuff cursor_thread = None replaced_character = None cursor_x = 0 cursor_y = 0 cursor_type = None class CursorThread(threading.Thread): def __init__(self, *args, **kwargs): super(CursorThread, self).__init__(*args, **kwargs) self.quitEvent = threading.Event() def run(self): blink = True while True: blink = not blink try: pygame.event.post(pygame.event.Event(USEREVENT, blink=blink)) except pygame.error: return if self.quitEvent.wait(timeout=0.5): break def init(use_cp437=True, blink=False): pygame.init() #There are several kinds of event we are patently not interested in pygame.event.set_blocked([ MOUSEBUTTONUP, JOYAXISMOTION, JOYBALLMOTION, JOYHATMOTION, JOYBUTTONDOWN, JOYBUTTONUP, #we only need KEYDOWN KEYUP ]) pygame.mouse.set_visible(False) #prepare the raw_getkey generator prepare_raw_getkey() global quit quit = False #spawn a blinky-cursor manager global cursor_thread, replaced_character, cursor_x, cursor_y, cursor_type cursor_x = 0 cursor_y = 0 replaced_character = None cursor_type = None if blink: cursor_thread = CursorThread() cursor_thread.daemon = True cursor_thread.start() def load_sprites(): if 'bg' in sprites: #we only need to load once return def load_image(key_name, *filepath): full_path = os.path.join(base_path, *filepath) surface = pygame.image.load(full_path).convert_alpha() sprites[key_name] = surface load_image('bg', 'colors.png') for color_id in range(16): load_image(color_id, 'char', '%s.png' % color_id) def blink_cursor(event): global replaced_character if event.blink: replace_character() else: restore_character() def replace_character(): global replaced_character if not cursor_type: return fg, bg, ch = get_at(cursor_x, cursor_y) replaced_character = (cursor_x, cursor_y, fg, bg, ch) new_fg = 15 if bg == 15: new_fg = 7 blit_at(cursor_x, cursor_y, new_fg, bg, cursor_type) pygame.display.flip() def restore_character(): global replaced_character if not replaced_character: return x, y, fg, bg, ch = replaced_character blit_at(x, y, fg, bg, ch) pygame.display.flip() replaced_character = None #---------------------------------------------------------------------------- #Actual functions def flip(): #keep the event queue happy for event in pygame.event.get([ #this should be all the event types we aren't blocking #and aren't about keyboard input QUIT, ACTIVEEVENT, VIDEORESIZE, VIDEOEXPOSE, USEREVENT ]): if event.type == QUIT: raise KeyboardInterrupt() elif event.type == USEREVENT: blink_cursor(event) else: #we don't actually care pass #flip the screen<|fim▁hole|> pygame.display.flip() def clear(): if quit: return screen.fill((0, 0, 0)) global cell_data cell_data = [ [ [0, 0, ' '] for cell in range(max_x) ] for row in range(max_y) ] def resize(width, height): global screen screen = pygame.display.set_mode((width*W, height*H)) #we don't use alpha, and turning it off makes it a tad faster screen.set_alpha(None) #load the console images to blit later load_sprites() #set our max dimensions global max_x, max_y max_x, max_y = width, height clear() flip() def reset(): pygame.display.quit() global quit quit = True if cursor_thread: cursor_thread.quitEvent.set() cursor_thread.join() def move_cursor(x, y): global cursor_x, cursor_y restore_character() cursor_x = x cursor_y = y replace_character() def set_title(title): pygame.display.set_caption(title) def set_cursor_type(i): global cursor_type cursor_map = { 0: None, 1: '_', 2: chr(0xDB) } restore_character() cursor_type = cursor_map[i] def cache_sprite(fg, bg, ch): bg_sprite = sprites['bg'] fg_sprite = sprites[fg] index = ord(ch) #coordinates on the bg sprite map bg_x = bg * W #coordinates on the fg sprite map fg_x = (index % 16) * W fg_y = int(index / 16) * H cell_sprite = pygame.Surface((W, H)) #voodoo: this helps a little bit. cell_sprite.set_alpha(None) #blit the background and foreground to the cell cell_sprite.blit(bg_sprite, dest=(0, 0), area=pygame.Rect(bg_x, 0, W, H)) cell_sprite.blit(fg_sprite, dest=(0, 0), area=pygame.Rect(fg_x, fg_y, W, H)) sprites[(fg, bg, ch)] = cell_sprite return cell_sprite def blit_at(x, y, fg, bg, ch): #blit one character to the screen. #because function calls are pricey, this is also inlined (ew) in draw_buffer, so the contents are kept short. #coordinates on the screen screen_x = x * W screen_y = y * H #cache each (bg, fg, index) cell we draw into a surface so it's easier to redraw. #it's a little bit of a memory waste, and takes longer on the first draw, but we're dealing with ascii here #so there's probably a lot of reuse. try: cell_sprite = sprites[(fg, bg, ch)] except KeyError: #make a new one cell_sprite = cache_sprite(fg, bg, ch) #blit the cell to the screen screen.blit(cell_sprite, dest=(screen_x, screen_y)) def draw_buffer(source, start_x, start_y): """ render the buffer to our backing. This is a hotpath, and there's more microoptimization here than i'd like, but FPS is kindof important. """ y = start_y #lookups we can cache into locals #i know, it's such a microoptimization, but this path qualifies as hot local_cell_data, local_sprites, local_screen_blit = cell_data, sprites, screen.blit local_W, local_H = W, H screen_width, screen_height = max_x, max_y source_width = source.width is_overlay = source.is_overlay for row in source._data: if y < 0: y += 1 continue if y >= screen_height: break x = start_x #do something analogous to row[:source.width] #but without the pointless copy that requires w = 0 for fg, bg, ch in row: if x >= screen_width or w >= source_width: break if x >= 0: #no need to blit if it's already identical old_data = local_cell_data[y][x] new_data = [fg, bg, ch] if new_data != old_data and not (is_overlay and ch == ' '): #draw it and remember the info for our cache #this used to call blit_at but now it's inline. try: cell_sprite = sprites[(fg, bg, ch)] except KeyError: #make a new one cell_sprite = cache_sprite(fg, bg, ch) #blit the cell to the screen local_screen_blit(cell_sprite, dest=(x*local_W, y*local_H)) #remember the info for the cache local_cell_data[y][x] = new_data x += 1 w += 1 y += 1 source.dirty = False return def get_at(x, y): if x < 0 or x >= max_x or y < 0 or y >= max_y: raise ValueError("get_at: Invalid coordinate (%r, %r)" % (x,y)) global cell_data return cell_data[y][x] def prepare_raw_getkey(): """ It looks like pygame fully intends for you to process _all_ keyboard input at the moment you look at the event queue. That won't do here. so we turn raw_getkey into a generator. Worse, pygame.event.wait() can't filter by type and removes the event from the queue, so we have to keep re-adding events we didn't want in the first place. Ugh. """ #this is weird - pygame turns off keyboard repeat by default, which you can re-enable #by setting a delay in ms, but "what the system normally does" is not an option. #it seems like 150ms delay and 15 keys-per-second is normalish. pygame.key.set_repeat(150, 1000 / 15) global raw_getkey def translate(event): if event.type == MOUSEMOTION: x, y = event.pos return ("mouse_motion", x / W, y / H) if event.type == KEYDOWN: log.debug("key event: %r", event.dict) if event.key in key_map: return key_map[event.key] return event.unicode if event.type == MOUSEBUTTONDOWN: x, y = event.pos return ("mouse_down", x / W, y / H) def keypump(): items = [] event_types = [MOUSEMOTION, KEYDOWN, MOUSEBUTTONDOWN] while True: if not items: if pygame.event.peek(event_types): #there's keyboard input pending! great! items.extend(pygame.event.get(event_types)) else: #there's no keyboard input pending, so we need to take a nap until there is. #if we get an event we dont care about, we have to put it back #but if we put it back, .wait() will give it right back to us #so we have to keep it around until we find what we want, then re-add it. #ugh. ignored_items = [] while True: item = pygame.event.wait() if item.type == USEREVENT: blink_cursor(item) elif item.type not in event_types: ignored_items.append(item) else: items.append(item) break for ignored_item in ignored_items: pygame.event.post(ignored_item) yield translate(items.pop(0)) #assign the generator's next() method as raw_getkey raw_getkey = keypump().next<|fim▁end|>
<|file_name|>ice-7868.rs<|end_file_name|><|fim▁begin|>#![warn(clippy::undocumented_unsafe_blocks)] #![allow(clippy::no_effect)] #[path = "auxiliary/ice-7868-aux.rs"] mod zero; <|fim▁hole|><|fim▁end|>
fn main() {}
<|file_name|>_component_store.py<|end_file_name|><|fim▁begin|>__all__ = [ 'ComponentStore', ] from pathlib import Path import copy import requests from typing import Callable from . import _components as comp from .structures import ComponentReference class ComponentStore: def __init__(self, local_search_paths=None, url_search_prefixes=None): self.local_search_paths = local_search_paths or ['.'] self.url_search_prefixes = url_search_prefixes or [] self._component_file_name = 'component.yaml' self._digests_subpath = 'versions/sha256' self._tags_subpath = 'versions/tags' def load_component_from_url(self, url): return comp.load_component_from_url(url) def load_component_from_file(self, path): return comp.load_component_from_file(path) def load_component(self, name, digest=None, tag=None): ''' Loads component local file or URL and creates a task factory function Search locations: <local-search-path>/<name>/component.yaml <url-search-prefix>/<name>/component.yaml If the digest is specified, then the search locations are: <local-search-path>/<name>/versions/sha256/<digest> <url-search-prefix>/<name>/versions/sha256/<digest> If the tag is specified, then the search locations are: <local-search-path>/<name>/versions/tags/<digest> <url-search-prefix>/<name>/versions/tags/<digest> Args: name: Component name used to search and load the component artifact containing the component definition. Component name usually has the following form: group/subgroup/component digest: Strict component version. SHA256 hash digest of the component artifact file. Can be used to load a specific component version so that the pipeline is reproducible. tag: Version tag. Can be used to load component version from a specific branch. The version of the component referenced by a tag can change in future. Returns: A factory function with a strongly-typed signature. Once called with the required arguments, the factory constructs a pipeline task instance (ContainerOp). ''' #This function should be called load_task_factory since it returns a factory function. #The real load_component function should produce an object with component properties (e.g. name, description, inputs/outputs). #TODO: Change this function to return component spec object but it should be callable to construct tasks. component_ref = ComponentReference(name=name, digest=digest, tag=tag) component_ref = self._load_component_spec_in_component_ref(component_ref) return comp._create_task_factory_from_component_spec( component_spec=component_ref.spec, component_ref=component_ref, ) def _load_component_spec_in_component_ref( self, component_ref: ComponentReference, ) -> ComponentReference: '''Takes component_ref, finds the component spec and returns component_ref with .spec set to the component spec. See ComponentStore.load_component for the details of the search logic. ''' if component_ref.spec: return component_ref component_ref = copy.copy(component_ref) if component_ref.url: component_ref.spec = comp._load_component_spec_from_url(component_ref.url) return component_ref name = component_ref.name if not name: raise TypeError("name is required") if name.startswith('/') or name.endswith('/'): raise ValueError('Component name should not start or end with slash: "{}"'.format(name)) digest = component_ref.digest tag = component_ref.tag tried_locations = [] if digest is not None and tag is not None: raise ValueError('Cannot specify both tag and digest') if digest is not None: path_suffix = name + '/' + self._digests_subpath + '/' + digest elif tag is not None:<|fim▁hole|> #Trying local search paths for local_search_path in self.local_search_paths: component_path = Path(local_search_path, path_suffix) tried_locations.append(str(component_path)) if component_path.is_file(): # TODO: Verify that the content matches the digest (if specified). component_ref._local_path = str(component_path) component_ref.spec = comp._load_component_spec_from_file(str(component_path)) return component_ref #Trying URL prefixes for url_search_prefix in self.url_search_prefixes: url = url_search_prefix + path_suffix tried_locations.append(url) try: response = requests.get(url) #Does not throw exceptions on bad status, but throws on dead domains and malformed URLs. Should we log those cases? response.raise_for_status() except: continue if response.content: # TODO: Verify that the content matches the digest (if specified). component_ref.url = url component_ref.spec = comp._load_component_spec_from_yaml_or_zip_bytes(response.content) return component_ref raise RuntimeError('Component {} was not found. Tried the following locations:\n{}'.format(name, '\n'.join(tried_locations))) def _load_component_from_ref(self, component_ref: ComponentReference) -> Callable: component_ref = self._load_component_spec_in_component_ref(component_ref) return comp._create_task_factory_from_component_spec(component_spec=component_ref.spec, component_ref=component_ref) ComponentStore.default_store = ComponentStore( local_search_paths=[ '.', ], url_search_prefixes=[ 'https://raw.githubusercontent.com/kubeflow/pipelines/master/components/' ], )<|fim▁end|>
path_suffix = name + '/' + self._tags_subpath + '/' + tag #TODO: Handle symlinks in GIT URLs else: path_suffix = name + '/' + self._component_file_name
<|file_name|>install.py<|end_file_name|><|fim▁begin|>import zeit.cms.generation<|fim▁hole|> import zeit.calendar.calendar import zeit.calendar.interfaces @zeit.cms.generation.get_root def evolve(root): zeit.cms.generation.install.installLocalUtility( root, zeit.calendar.calendar.Calendar, 'calendar', zeit.calendar.interfaces.ICalendar)<|fim▁end|>
import zeit.cms.generation.install
<|file_name|>crawlForMovies.test.js<|end_file_name|><|fim▁begin|><|fim▁hole|>'use strict' /* globals describe, test, expect, jest */ const path = require('path') // This is the module we are testing const { crawlForMovies } = require('../crawlForMovies') const crawlParams = { rootDirectory: __dirname, searchDirCb: jest.fn(), movieFileCb: jest.fn() } describe('crawlForMovies', () => { test('discovers test movies without crashing', () => { return expect(crawlForMovies(crawlParams)).resolves.toBeUndefined() }) test('calls search directory callback during crawl', () => { const testDirs = crawlParams.searchDirCb.mock.calls.map(dir => { const { name } = path.parse(dir[0]) return name }) expect(testDirs).toMatchSnapshot() }) test('calls movie file callback during crawl', () => { const movies = crawlParams.movieFileCb.mock.calls.map(file => { const { name, ext } = path.parse(file[0]) return `${name}${ext}` }) expect(movies).toMatchSnapshot() }) })<|fim▁end|>
<|file_name|>env.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd. // This file is part of Parity. <|fim▁hole|>// Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. //! Vm environment. use hash::Address; use uint::Uint; /// Vm environment. #[derive(Debug, PartialEq, Deserialize)] pub struct Env { /// Address. #[serde(rename="currentCoinbase")] pub author: Address, /// Difficulty #[serde(rename="currentDifficulty")] pub difficulty: Uint, /// Gas limit. #[serde(rename="currentGasLimit")] pub gas_limit: Uint, /// Number. #[serde(rename="currentNumber")] pub number: Uint, /// Timestamp. #[serde(rename="currentTimestamp")] pub timestamp: Uint, } #[cfg(test)] mod tests { use serde_json; use vm::Env; #[test] fn env_deserialization() { let s = r#"{ "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentDifficulty" : "0x0100", "currentGasLimit" : "0x0f4240", "currentNumber" : "0x00", "currentTimestamp" : "0x01" }"#; let _deserialized: Env = serde_json::from_str(s).unwrap(); // TODO: validate all fields } }<|fim▁end|>
<|file_name|>nmcollector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python from noisemapper.mapper import * #from collectors.lib import utils ### Define the object mapper and start mapping def main():<|fim▁hole|># utils.drop_privileges() mapper = NoiseMapper() mapper.run() if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>phases.go<|end_file_name|><|fim▁begin|>package build type Phase int const ( PhasePrivileged Phase = iota PhasePrivilegeDropped PhasePreInstall PhasePostInstall ) type PhaseCompileable interface {<|fim▁hole|>}<|fim▁end|>
InstructionsForPhase(phase Phase) []Instruction
<|file_name|>media_manipulator.py<|end_file_name|><|fim▁begin|>from typing import List from backend.common.cache_clearing import get_affected_queries from backend.common.manipulators.manipulator_base import ManipulatorBase from backend.common.models.cached_model import TAffectedReferences from backend.common.models.media import Media class MediaManipulator(ManipulatorBase[Media]): """ Handle Media database writes. """ @classmethod def getCacheKeysAndQueries(<|fim▁hole|> cls, affected_refs: TAffectedReferences ) -> List[get_affected_queries.TCacheKeyAndQuery]: return get_affected_queries.media_updated(affected_refs) @classmethod def updateMerge( cls, new_model: Media, old_model: Media, auto_union: bool = True ) -> Media: cls._update_attrs(new_model, old_model, auto_union) return old_model<|fim▁end|>
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import { ArtworkExtraLinks_artwork } from "__generated__/ArtworkExtraLinks_artwork.graphql" import { AuctionTimerState } from "app/Components/Bidding/Components/Timer" import { navigate } from "app/navigation/navigate" import { partnerName } from "app/Scenes/Artwork/Components/ArtworkExtraLinks/partnerName" import { useSelectedTab } from "app/store/GlobalStore" import { sendEmail } from "app/utils/sendEmail" import { Schema, track } from "app/utils/track" import { Sans, Spacer } from "palette" import React from "react" import { Text, View } from "react-native" import { createFragmentContainer, graphql } from "react-relay" import { useTracking } from "react-tracking" export interface ArtworkExtraLinksProps { artwork: ArtworkExtraLinks_artwork auctionState: AuctionTimerState } @track() export class ArtworkExtraLinks extends React.Component<ArtworkExtraLinksProps> { handleReadOurFAQTap = () => { navigate(`/buy-now-feature-faq`) } @track({ action_name: Schema.ActionNames.AskASpecialist, action_type: Schema.ActionTypes.Tap, context_module: Schema.ContextModules.ArtworkExtraLinks, }) // @ts-expect-error STRICTNESS_MIGRATION --- 🚨 Unsafe legacy code 🚨 Please delete this and fix any type errors if you have time 🙏 handleAskASpecialistTap(emailAddress) { const { artwork } = this.props const mailtoSubject = `Inquiry on ${artwork.title}`.concat( artwork.artist && artwork.artist.name ? ` by ${artwork.artist.name}` : "" ) sendEmail(emailAddress, { subject: mailtoSubject }) } @track({ action_name: Schema.ActionNames.AuctionsFAQ, action_type: Schema.ActionTypes.Tap, context_module: Schema.ContextModules.ArtworkExtraLinks, }) handleReadOurAuctionFAQsTap() { navigate(`/auction-faq`) return } @track({ action_name: Schema.ActionNames.ConditionsOfSale, action_type: Schema.ActionTypes.Tap, context_module: Schema.ContextModules.ArtworkExtraLinks, }) handleConditionsOfSaleTap() { navigate(`/conditions-of-sale`) } renderFAQAndSpecialist = () => { const { artwork: { isAcquireable, isOfferable, isInAuction, sale, isForSale }, auctionState, } = this.props if (isInAuction && sale && isForSale && auctionState !== AuctionTimerState.CLOSED) { return ( <> <Sans size="2" color="black60"> By placing a bid you agree to {partnerName(sale)}{" "} <Text style={{ textDecorationLine: "underline" }} onPress={() => this.handleConditionsOfSaleTap()} > Conditions of Sale </Text> . </Sans> <Spacer mb={1} /> <Sans size="2" color="black60"> Have a question?{" "} <Text style={{ textDecorationLine: "underline" }} onPress={() => this.handleReadOurAuctionFAQsTap()} > Read our auction FAQs </Text>{" "} or{" "} <Text style={{ textDecorationLine: "underline" }} onPress={() => this.handleAskASpecialistTap("[email protected]")} > ask a specialist </Text> . </Sans> </> ) } else if (isAcquireable || isOfferable) { return ( <Sans size="2" color="black60"> Have a question?{" "} <Text style={{ textDecorationLine: "underline" }} onPress={() => this.handleReadOurFAQTap()} > Read our FAQ </Text>{" "} or{" "} <Text style={{ textDecorationLine: "underline" }} onPress={() => this.handleAskASpecialistTap("[email protected]")} > ask a specialist </Text> . </Sans> ) } else { return null } } render() { const { artwork: { artists }, } = this.props // @ts-expect-error STRICTNESS_MIGRATION --- 🚨 Unsafe legacy code 🚨 Please delete this and fix any type errors if you have time 🙏 const consignableArtistsCount = artists.filter((artist) => artist.isConsignable).length // @ts-expect-error STRICTNESS_MIGRATION --- 🚨 Unsafe legacy code 🚨 Please delete this and fix any type errors if you have time 🙏 const artistName = artists && artists.length === 1 ? artists[0].name : null return ( <> {this.renderFAQAndSpecialist()} {!!consignableArtistsCount && ( <ConsignmentsLink artistName={consignableArtistsCount > 1 ? "these artists" : artistName ?? "this artist"} /> )} </> ) } } const ConsignmentsLink: React.FC<{ artistName: string }> = ({ artistName }) => { const isSellTab = useSelectedTab() === "sell"<|fim▁hole|> <Sans size="2" color="black60"> Want to sell a work by {artistName}?{" "} <Text style={{ textDecorationLine: "underline" }} onPress={() => { tracking.trackEvent({ action_name: Schema.ActionNames.ConsignWithArtsy, action_type: Schema.ActionTypes.Tap, context_module: Schema.ContextModules.ArtworkExtraLinks, }) navigate(isSellTab ? "/collections/my-collection/marketing-landing" : "/sales") }} > Consign with Artsy </Text> . </Sans> </View> ) } export const ArtworkExtraLinksFragmentContainer = createFragmentContainer(ArtworkExtraLinks, { artwork: graphql` fragment ArtworkExtraLinks_artwork on Artwork { isAcquireable isInAuction isOfferable title isForSale sale { isClosed isBenefit partner { name } } artists { isConsignable name } artist { name } } `, })<|fim▁end|>
const tracking = useTracking() return ( <View>
<|file_name|>arrays.js<|end_file_name|><|fim▁begin|>/* * Meran - MERAN UNLP is a ILS (Integrated Library System) wich provides Catalog, * Circulation and User's Management. It's written in Perl, and uses Apache2 * Web-Server, MySQL database and Sphinx 2 indexing. * Copyright (C) 2009-2013 Grupo de desarrollo de Meran CeSPI-UNLP * * This file is part of Meran. * * Meran is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Meran is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Meran. If not, see <http://www.gnu.org/licenses/>. */ define([],function(){ 'use strict' /** * Implements unique() using the browser's sort(). * * @param a * The array to sort and strip of duplicate values. * Warning: this array will be modified in-place. * @param compFn * A custom comparison function that accepts two values a and * b from the given array and returns -1, 0, 1 depending on * whether a < b, a == b, a > b respectively. * * If no compFn is provided, the algorithm will use the * browsers default sort behaviour and loose comparison to * detect duplicates. * @return * The given array. */ function sortUnique(a, compFn){ var i; if (compFn) { a.sort(compFn); for (i = 1; i < a.length; i++) { if (0 === compFn(a[i], a[i - 1])) { a.splice(i--, 1); }<|fim▁hole|> for (i = 1; i < a.length; i++) { // Use loosely typed comparsion if no compFn is given // to avoid sortUnique( [6, "6", 6] ) => [6, "6", 6] if (a[i] == a[i - 1]) { a.splice(i--, 1); } } } return a; } /** * Shallow comparison of two arrays. * * @param a, b * The arrays to compare. * @param equalFn * A custom comparison function that accepts two values a and * b from the given arrays and returns true or false for * equal and not equal respectively. * * If no equalFn is provided, the algorithm will use the strict * equals operator. * @return * True if all items in a and b are equal, false if not. */ function equal(a, b, equalFn) { var i = 0, len = a.length; if (len !== b.length) { return false; } if (equalFn) { for (; i < len; i++) { if (!equalFn(a[i], b[i])) { return false; } } } else { for (; i < len; i++) { if (a[i] !== b[i]) { return false; } } } return true; } /** * ECMAScript map replacement * See https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Array/map * And http://es5.github.com/#x15.4.4.19 * It's not exactly according to standard, but it does exactly what one expects. */ function map(a, fn) { var i, len, result = []; for (i = 0, len = a.length; i < len; i++) { result.push(fn(a[i])); } return result; } function mapNative(a, fn) { // Call map directly on the object instead of going through // Array.prototype.map. This avoids the problem that we may get // passed an array-like object (NodeList) which may cause an // error if the implementation of Array.prototype.map can only // deal with arrays (Array.prototype.map may be native or // provided by a javscript framework). return a.map(fn); } return { sortUnique: sortUnique, equal: equal, map: Array.prototype.map ? mapNative : map }; });<|fim▁end|>
} } else { a.sort();
<|file_name|>macros_main_return.rs<|end_file_name|><|fim▁begin|>use tests_build::tokio; <|fim▁hole|>#[tokio::main] async fn main() -> Result<(), ()> { return Ok(()); }<|fim▁end|>
<|file_name|>echo.rs<|end_file_name|><|fim▁begin|>extern crate getopts; use std::os; use std::old_io::{print, println}; use std::old_io::stdio; static VERSION: &'static str = "1.0.0"; fn main() { let args = os::args(); let ref program = args[0]; // Set possible flags. // The first argument to `optflag` is the short flag name. // The second argument is the long flag name. // The third argument is the help text. let opts = [ getopts::optflag("n", "", "do not output the trailing newline"), getopts::optflag("h", "help", "display this help and exit"), getopts::optflag("V", "version", "output version information and exit"), ]; let matches = match getopts::getopts(args.tail(), &opts) { Ok(m) => m, Err(f) => { println!("{}", f); os::set_exit_status(1); return; // The exit code is 0 (success) by default.<|fim▁hole|> if matches.opt_present("help") { //^ We could as well have used the short name: "h" println!("echo {} - display a line of text", VERSION); println!(""); println!("Usage:"); println!(" {} [SHORT-OPTION]... [STRING]...", program); println!(" {} LONG-OPTION", program); println!(""); println(getopts::usage("Echo the STRING(s) to standard output.", &opts) .as_slice()); return; } if matches.opt_present("version") { println!("echo version: {}", VERSION); return; } if !matches.free.is_empty() { //^ `matches.free` contains all the arguments that are not options. let string = matches.free.connect(" "); print(string.as_slice()); } if !matches.opt_present("n") { println!("") } else { stdio::flush(); } }<|fim▁end|>
// Any exit code other than 0 indicates failure. } };
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>import os from os.path import expanduser import sys import yaml def getSettings(): if not getSettings.settings: cwd = os.getcwd() path = cwd + '/.settings.yaml' if not os.path.isfile(path): path = cwd + '/.screeps_settings.yaml' if not os.path.isfile(path): path = expanduser('~') + '/.screeps_settings.yaml' if not os.path.isfile(path): path = '/vagrant/.screeps_settings.yaml' <|fim▁hole|> if not os.path.isfile(path): print 'no settings file found' sys.exit(-1) return False with open(path, 'r') as f: getSettings.settings = yaml.load(f) return getSettings.settings getSettings.settings = False<|fim▁end|>
<|file_name|>MonospaceText.test.tsx<|end_file_name|><|fim▁begin|>import { h, mount } from 'bore'; import * as expect from 'expect'; import { MonospaceText } from './index'; describe( MonospaceText.is, () => {<|fim▁hole|> describe( `Custom element`, () => { it( `should be registered`, () => { expect( customElements.get( MonospaceText.is ) ).toBe( MonospaceText ); } ); it( `should render via JSX IntrinsicElement`, () => { return mount( <bl-monospace-text /> ).wait(( element ) => { expect( element.node.localName ).toBe( MonospaceText.is ); } ); } ); it( `should render via JSX class`, () => { return mount( <bl-monospace-text /> ).wait(( element ) => { expect( element.has( '.c-text--mono' ) ).toBe( true ); } ); } ); } ); } );<|fim▁end|>
<|file_name|>internalproperty.cpp<|end_file_name|><|fim▁begin|>/**************************************************************************** ** ** Copyright (C) 2015 The Qt Company Ltd. ** Contact: http://www.qt.io/licensing ** ** This file is part of Qt Creator. ** ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms and ** conditions see http://www.qt.io/terms-conditions. For further information ** use the contact form at http://www.qt.io/contact-us. ** ** GNU General Public License Usage ** Alternatively, this file may be used under the terms of the GNU ** General Public License version 3.0 as published by the Free Software ** Foundation and appearing in the file LICENSE.GPLv3 included in the ** packaging of this file. Please review the following information to ** ensure the GNU General Public License version 3.0 requirements will be ** met: http://www.gnu.org/copyleft/gpl.html. ** ****************************************************************************/ #include "internalproperty.h" #include "internalbindingproperty.h" #include "internalvariantproperty.h" #include "internalnodelistproperty.h" #include "internalnodeproperty.h" #include "internalsignalhandlerproperty.h" #include "internalnode_p.h" namespace QmlDesigner { namespace Internal { // Creates invalid InternalProperty InternalProperty::InternalProperty() { } InternalProperty::~InternalProperty() { } InternalProperty::InternalProperty(const PropertyName &name, const InternalNode::Pointer &propertyOwner) : m_name(name), m_propertyOwner(propertyOwner) { Q_ASSERT_X(!name.isEmpty(), Q_FUNC_INFO, "Name of property cannot be empty"); } InternalProperty::Pointer InternalProperty::internalPointer() const { Q_ASSERT(!m_internalPointer.isNull()); return m_internalPointer.toStrongRef(); } void InternalProperty::setInternalWeakPointer(const Pointer &pointer) { Q_ASSERT(!pointer.isNull()); m_internalPointer = pointer; } bool InternalProperty::isValid() const { return m_propertyOwner && !m_name.isEmpty(); } PropertyName InternalProperty::name() const { return m_name; } bool InternalProperty::isBindingProperty() const { return false; } bool InternalProperty::isVariantProperty() const { return false; }<|fim▁hole|> QSharedPointer<InternalBindingProperty> InternalProperty::toBindingProperty() const { Q_ASSERT(internalPointer().dynamicCast<InternalBindingProperty>()); return internalPointer().staticCast<InternalBindingProperty>(); } bool InternalProperty::isNodeListProperty() const { return false; } bool InternalProperty::isNodeProperty() const { return false; } bool InternalProperty::isNodeAbstractProperty() const { return false; } bool InternalProperty::isSignalHandlerProperty() const { return false; } QSharedPointer<InternalVariantProperty> InternalProperty::toVariantProperty() const { Q_ASSERT(internalPointer().dynamicCast<InternalVariantProperty>()); return internalPointer().staticCast<InternalVariantProperty>(); } InternalNode::Pointer InternalProperty::propertyOwner() const { return m_propertyOwner.toStrongRef(); } QSharedPointer<InternalNodeListProperty> InternalProperty::toNodeListProperty() const { Q_ASSERT(internalPointer().dynamicCast<InternalNodeListProperty>()); return internalPointer().staticCast<InternalNodeListProperty>(); } QSharedPointer<InternalNodeProperty> InternalProperty::toNodeProperty() const { Q_ASSERT(internalPointer().dynamicCast<InternalNodeProperty>()); return internalPointer().staticCast<InternalNodeProperty>(); } QSharedPointer<InternalNodeAbstractProperty> InternalProperty::toNodeAbstractProperty() const { Q_ASSERT(internalPointer().dynamicCast<InternalNodeAbstractProperty>()); return internalPointer().staticCast<InternalNodeAbstractProperty>(); } QSharedPointer<InternalSignalHandlerProperty> InternalProperty::toSignalHandlerProperty() const { Q_ASSERT(internalPointer().dynamicCast<InternalSignalHandlerProperty>()); return internalPointer().staticCast<InternalSignalHandlerProperty>(); } void InternalProperty::remove() { propertyOwner()->removeProperty(name()); m_propertyOwner.clear(); } TypeName InternalProperty::dynamicTypeName() const { return m_dynamicType; } void InternalProperty::setDynamicTypeName(const TypeName &name) { m_dynamicType = name; } void InternalProperty::resetDynamicTypeName() { m_dynamicType.clear(); } } //namespace Internal } //namespace QmlDesigner<|fim▁end|>
<|file_name|>new.js<|end_file_name|><|fim▁begin|>var hook = require("../lib/resources/hook"); var hooks = require("hook.io-hooks"); var bodyParser = require('body-parser'); var mergeParams = require('merge-params'); var config = require('../config'); var themes = require('../lib/resources/themes'); var hooks = require('hook.io-hooks'); module['exports'] = function view (opts, callback) { var req = opts.request, res = opts.response; var $ = this.$, self = this; if (!req.isAuthenticated()) { req.session.redirectTo = "/new"; return res.redirect('/login'); } var user = req.session.user; var boot = { owner: user }; bodyParser()(req, res, function bodyParsed(){<|fim▁hole|> var params = req.resource.params; var gist = params.gist; if (req.method === "POST") { if (params.name.length === 0) { return res.end('Hook name is required!'); } // do not recreate hooks that already exist with that name params.owner = user || "Marak"; // hardcode Marak for testing if (typeof params.theme === 'string' && params.theme.length === 0) { delete params.theme; } if (typeof params.presenter === 'string' && params.presenter.length === 0) { delete params.presenter; } var query = { name: params.name, owner: req.session.user }; return hook.find(query, function(err, results){ if (results.length > 0) { var h = results[0]; return res.end('Hook already exists ' + '/' + h.owner + "/" + h.name); //return res.redirect('/' + h.owner + "/" + h.name + "?alreadyExists=true"); } params.cron = params.cronString; if (params.hookSource === "editor") { delete params.gist; params.source = params.codeEditor; } // TODO: filter params for only specified resource fields? return hook.create(params, function(err, result){ if (err) { return callback(null, err.message); } var h = result; req.hook = h; if (params.hookSource === "editor") { // the source of the hook is coming from the code editor return res.redirect('/' + h.owner + "/" + h.name + ""); } else { // the source of the hook is coming from a github gist opts.gist = gist; opts.req = opts.request; opts.res = opts.response; // fetch the hook from github and check if it has a schema / theme // if so, attach it to the hook document // TODO: can we remove this? it seems like this logic should be in the Hook.runHook execution chain... hook.fetchHookSourceCode(opts, function(err, code){ if (err) { return opts.res.end(err.message); } hook.attemptToRequireUntrustedHook(opts, function(err, _module){ if (err) { return opts.res.end(err.message) } h.mschema = _module.schema; h.theme = _module.theme; h.presenter = _module.presenter; h.save(function(){ // redirect to new hook friendly page return res.redirect('/' + h.owner + "/" + h.name + ""); //return callback(null, JSON.stringify(result, true, 2)); }); }); }); } }); }); } if (typeof req.session.gistLink === 'string') { // todo: after created, unset gistSource so it doesn't keep popping up $('.gist').attr('value', req.session.gistLink); } else { $('.gistStatus').remove(); } var services = hooks.services; var examples = {}; // pull out helloworld examples for every langauge hook.languages.forEach(function(l){ examples[l] = services['examples-' + l + '-helloworld']; }); boot.examples = examples; /* for (var e in examples) { for (var code in examples[e]) { // $('.services').append(examples[e][code]); } } */ self.parent.components.themeSelector.present({}, function(err, html){ var el = $('.themeSelector') el.html(html); var out = $.html(); out = out.replace('{{hook}}', JSON.stringify(boot, true, 2)); callback(null, out); }) }); };<|fim▁end|>
mergeParams(req, res, function(){});
<|file_name|>PySCUBA_design.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # PySCUBA/src/PySCUBA/PySCUBA_design.py # Author: Gregory Giecold for the GC Yuan Lab # Affiliation: Harvard University # Contact: [email protected]; [email protected] from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_MainWindow(object): def setupUi(self, MainWindow): self.setupMainWindow(MainWindow) self.centralwidget = QtGui.QWidget(MainWindow) self.centralwidget.setObjectName(_fromUtf8("centralwidget")) MainWindow.setCentralWidget(self.centralwidget) self.setupMenuBar(MainWindow) self.setupStatusBar(MainWindow) self.mainVerticalLayout = QtGui.QVBoxLayout(self.centralwidget) self.mainVerticalLayout.setObjectName(_fromUtf8("mainVerticalLayout")) self.gridLayout = QtGui.QGridLayout() self.gridLayout.setHorizontalSpacing(3) self.gridLayout.setVerticalSpacing(2) self.gridLayout.setObjectName(_fromUtf8("gridLayout")) self.hline1 = QtGui.QFrame(self.centralwidget) self.hline1.setFrameShadow(QtGui.QFrame.Raised) self.hline1.setLineWidth(7) self.hline1.setFrameShape(QtGui.QFrame.HLine) self.hline1.setObjectName(_fromUtf8("hline1")) self.hline1.raise_() self.cancelOkLayout = QtGui.QHBoxLayout() self.cancelOkLayout.setObjectName(_fromUtf8("cancelOkLayout")) self.hline2 = QtGui.QFrame(self.centralwidget) self.hline2.setFrameShadow(QtGui.QFrame.Raised) self.hline2.setLineWidth(7) self.hline2.setFrameShape(QtGui.QFrame.HLine) self.hline2.setObjectName(_fromUtf8("hline2")) self.hline2.raise_() self.graphicsVerticalLayout = QtGui.QVBoxLayout() self.graphicsVerticalLayout.setObjectName( _fromUtf8("graphicsVerticalLayout")) self.mainVerticalLayout.addLayout(self.gridLayout) self.mainVerticalLayout.addWidget(self.hline1) self.mainVerticalLayout.addLayout(self.cancelOkLayout) self.mainVerticalLayout.addWidget(self.hline2) self.mainVerticalLayout.addLayout(self.graphicsVerticalLayout) self.adornGridLayout(MainWindow) self.adornCancelOkLayout(MainWindow) self.adornGraphicsVerticalLayout(MainWindow) self.retranslateUi(MainWindow) QtCore.QMetaObject.connectSlotsByName(MainWindow) def setupMainWindow(self, MainWindow): MainWindow.setObjectName(_fromUtf8("MainWindow")) MainWindow.setWindowModality(QtCore.Qt.NonModal) MainWindow.setGeometry(150, 100, 564, 635) MainWindow.setMouseTracking(False) MainWindow.setFocusPolicy(QtCore.Qt.ClickFocus) MainWindow.setAutoFillBackground(False) MainWindow.setTabShape(QtGui.QTabWidget.Rounded) def setupMenuBar(self, MainWindow): self.actionExit = QtGui.QAction(MainWindow) self.actionExit.setObjectName(_fromUtf8("actionExit")) self.actionExit.setShortcut('Ctrl+Q') self.actionExit.setStatusTip('Exit application') self.actionExit.triggered.connect(QtGui.qApp.quit) self.actionCredits = QtGui.QAction(MainWindow) self.actionCredits.setObjectName(_fromUtf8("actionCredits")) self.actionCredits.setShortcut('Ctrl+I') self.actionCredits.setStatusTip('Display credits') self.actionCredits.triggered.connect(self.showCredits) self.actionHelp = QtGui.QAction(MainWindow) self.actionHelp.setObjectName(_fromUtf8("actionHelp")) self.actionHelp.setShortcut('Ctrl+H') self.actionHelp.setStatusTip('Help and documentation') self.actionHelp.triggered.connect(self.showDocumentation) self.menubar = QtGui.QMenuBar(MainWindow) self.fileMenu = self.menubar.addMenu('&File') self.fileMenu.addAction(self.actionExit) self.fileMenu = self.menubar.addMenu('&Credits') self.fileMenu.addAction(self.actionCredits) self.fileMenu = self.menubar.addMenu('&Help') self.fileMenu.addAction(self.actionHelp) self.menubar.setGeometry(QtCore.QRect(0, 0, 542, 23)) self.menubar.setDefaultUp(False) self.menubar.setNativeMenuBar(False) self.menubar.setObjectName(_fromUtf8("menubar")) MainWindow.setMenuBar(self.menubar) def setupStatusBar(self, MainWindow): self.statusbar = QtGui.QStatusBar(MainWindow) self.statusbar.showMessage( "Ready - Please select a dataset to process") self.statusbar.setObjectName(_fromUtf8("statusbar")) MainWindow.setStatusBar(self.statusbar) def showCredits(self, MainWindow): QtGui.QMessageBox.information(self, "Credits", "Author: Gregory Giecold\nAffiliation: Harvard " "University & DFCI\nContact: [email protected]\n" "GitHub: https://github.com/GGiecold\n") def showDocumentation(self, MainWindow): url = QtCore.QUrl("https://github.com/GGiecold/PySCUBA") if not QtGui.QDesktopServices.openUrl(url): QtGui.QMessageBox.warning(self, 'Help & Documentation', 'Could not open url to online documentation!') def adornGridLayout(self, MainWindow): self.datasetHorizontalLayout = QtGui.QHBoxLayout() self.datasetHorizontalLayout.setObjectName( _fromUtf8("datasetHorizontalLayout")) self.gridLayout.addLayout(self.datasetHorizontalLayout, 0, 0, 1, 1) self.datasetHorizontalLayout.addStretch(1) self.selectDatasetButton = QtGui.QPushButton(self.centralwidget) self.selectDatasetButton.setToolTip("Click this button to browse " "through\nyour computer's directories and select\na dataset " "to subject to a clustering\nand bifurcation analysis.") self.selectDatasetButton.setObjectName( _fromUtf8("selectDatasetButton")) self.datasetHorizontalLayout.addWidget(self.selectDatasetButton) self.datasetHorizontalLayout.addStretch(1) self.withinGridVerticalLayout_1 = QtGui.QVBoxLayout() self.withinGridVerticalLayout_1.setObjectName( _fromUtf8("withinGridVerticalLayout_1")) self.gridLayout.addLayout(self.withinGridVerticalLayout_1, 1, 0, 1, 1) self.dataTypeLabel = QtGui.QLabel(self.centralwidget) self.dataTypeLabel.setFrameShadow(QtGui.QFrame.Raised) self.dataTypeLabel.setLineWidth(5) self.dataTypeLabel.setAlignment(QtCore.Qt.AlignCenter) self.dataTypeLabel.setObjectName(_fromUtf8("dataTypeLabel")) self.withinGridVerticalLayout_1.addWidget(self.dataTypeLabel) self.dataTypeComboBox = QtGui.QComboBox(self.centralwidget) self.dataTypeComboBox.setToolTip("Does the file to process qualify " "as qPCR, RNAseq\nor flow or mass cytometry data?\nThe latter " "is expected to be in *.fcs format,\nwhereas the first two " "types should be delivered\nas rows of tab-separated entries.") self.dataTypeComboBox.setEditable(True) self.dataTypeComboBox.setObjectName(_fromUtf8("dataTypeComboBox")) self.dataTypeComboBox.addItem(_fromUtf8("")) self.dataTypeComboBox.addItem(_fromUtf8("")) self.dataTypeComboBox.addItem(_fromUtf8("")) self.withinGridVerticalLayout_1.addWidget(self.dataTypeComboBox) self.withinGridVerticalLayout_2 = QtGui.QVBoxLayout() self.withinGridVerticalLayout_2.setObjectName( _fromUtf8("withinGridVerticalLayout_2")) self.gridLayout.addLayout(self.withinGridVerticalLayout_2, 2, 0, 1, 1) self.clusterModeLabel = QtGui.QLabel(self.centralwidget) self.clusterModeLabel.setFrameShadow(QtGui.QFrame.Raised) self.clusterModeLabel.setLineWidth(5) self.clusterModeLabel.setAlignment(QtCore.Qt.AlignCenter) self.clusterModeLabel.setObjectName(_fromUtf8("clusterModeLabel")) self.withinGridVerticalLayout_2.addWidget(self.clusterModeLabel) self.clusterModeComboBox = QtGui.QComboBox(self.centralwidget) self.clusterModeComboBox.setToolTip("For each timestamp binned to " "a particular stage, PySCUBA proceeds\nto several rounds of " "aggregating the corresponding samples\ninto an optimal number " "of clusters.\nBy selecting 'PCA' from this menu, the clustering " "will be based on\na reduction of the original dataset to its " "first few principal components.\nA choice of 'PCA2' specifies " "that the principal components analysis\nwill be based on " "the samples that are part of the final stage\nof the " "temporal ordering.") self.clusterModeComboBox.setEditable(True) self.clusterModeComboBox.setObjectName( _fromUtf8("clusterModeComboBox")) self.clusterModeComboBox.addItem(_fromUtf8("")) self.clusterModeComboBox.addItem(_fromUtf8("")) self.clusterModeComboBox.addItem(_fromUtf8("")) self.withinGridVerticalLayout_2.addWidget(self.clusterModeComboBox) self.logCheckBox = QtGui.QCheckBox(self.centralwidget) self.logCheckBox.setChecked(True) self.logCheckBox.setObjectName(_fromUtf8("logCheckBox")) self.gridLayout.addWidget(self.logCheckBox, 0, 1, 1, 1) self.pseudotimeCheckBox = QtGui.QCheckBox(self.centralwidget) self.pseudotimeCheckBox.setToolTip("If your data is not endowed with " "temporal information of any kind, please\ndo check this box. " "PySCUBA will thereby run a principal curve analysis\nto infer a " "temporal ordering for each sample of your dataset.") self.pseudotimeCheckBox.setChecked(True) self.pseudotimeCheckBox.setObjectName(_fromUtf8("pseudotimeCheckBox")) self.gridLayout.addWidget(self.pseudotimeCheckBox, 1, 1, 1, 1) def adornCancelOkLayout(self, MainWindow): self.cancelOkLayout.addStretch(1) self.cancelButton = QtGui.QPushButton(self.centralwidget) self.cancelButton.setObjectName(_fromUtf8("cancelButton")) self.cancelOkLayout.addWidget(self.cancelButton) self.okButton = QtGui.QPushButton(self.centralwidget) self.okButton.setToolTip("Click this button to browse " "through\nyour computer's directories and select\na dataset " "to subject to a clustering\nand bifurcation analysis.") self.okButton.setObjectName(_fromUtf8("okButton")) self.cancelOkLayout.addWidget(self.okButton) self.cancelOkLayout.addStretch(1) def adornGraphicsVerticalLayout(self, MainWindow): self.scene = QtGui.QGraphicsScene(self.centralwidget) self.graphicsView = QtGui.QGraphicsView(self.scene) self.graphicsView.setFrameShadow(QtGui.QFrame.Raised) self.graphicsView.setLineWidth(3) self.graphicsView.setVerticalScrollBarPolicy( QtCore.Qt.ScrollBarAlwaysOn) self.graphicsView.setHorizontalScrollBarPolicy( QtCore.Qt.ScrollBarAlwaysOn) self.graphicsView.setTransformationAnchor( QtGui.QGraphicsView.AnchorUnderMouse) self.graphicsView.setResizeAnchor( QtGui.QGraphicsView.AnchorUnderMouse) self.graphicsView.setBackgroundBrush( QtGui.QBrush(QtGui.QColor(245,245,245))) self.graphicsView.setFrameShape(QtGui.QFrame.NoFrame) self.graphicsView.setObjectName(_fromUtf8("graphicsView")) self.graphicsVerticalLayout.addWidget(self.graphicsView) self.displayHorizontalLayout = QtGui.QHBoxLayout() self.displayHorizontalLayout.setObjectName( _fromUtf8("displayHorizontalLayout")) self.graphicsVerticalLayout.addLayout(self.displayHorizontalLayout) self.displayHorizontalLayout.addStretch(1) self.displayFileButton = QtGui.QPushButton(self.centralwidget) self.displayFileButton.setToolTip("Various files and figures will " "show up in this box as they are\nbeing produced by the PySCUBA " "analysis of your data.\nClick on any of those and it will be " "displayed in an adjacent\ngraphics box.") self.displayFileButton.setObjectName(_fromUtf8("displayFileButton")) self.displayHorizontalLayout.addWidget(self.displayFileButton) self.displayHorizontalLayout.addStretch(1) def retranslateUi(self, MainWindow): MainWindow.setWindowTitle(_translate("MainWindow", "PySCUBA - GC Yuan Lab", None)) self.selectDatasetButton.setText(_translate("MainWindow", "1. Select dataset to analyze", None)) self.dataTypeLabel.setText(_translate("MainWindow", "2. Specify type of data:", None)) self.dataTypeComboBox.setItemText(0, _translate("MainWindow", "RNASeq", None)) self.dataTypeComboBox.setItemText(1, _translate("MainWindow", "PCR", None)) self.dataTypeComboBox.setItemText(2, _translate("MainWindow", "cytometry", None)) self.clusterModeLabel.setText(_translate("MainWindow", "3. Choose cluster mode:", None)) self.clusterModeComboBox.setItemText(0, _translate("MainWindow", "None", None)) self.clusterModeComboBox.setItemText(1, _translate("MainWindow",<|fim▁hole|> self.logCheckBox.setText(_translate("MainWindow", "4. Apply a log-transform?", None)) self.pseudotimeCheckBox.setText(_translate("MainWindow", "5. Infer temporal ordering?", None)) self.cancelButton.setText(_translate("MainWindow", "Cancel", None)) self.okButton.setText(_translate("MainWindow", "Ok", None)) self.displayFileButton.setText(_translate("MainWindow", "Select file to display", None)) self.actionExit.setText(_translate("MainWindow", "Exit", None)) if __name__ == "__main__": import sys app = QtGui.QApplication(sys.argv) MainWindow = QtGui.QMainWindow() ui = Ui_MainWindow() ui.setupUi(MainWindow) MainWindow.show() sys.exit(app.exec_())<|fim▁end|>
"PCA", None)) self.clusterModeComboBox.setItemText(2, _translate("MainWindow", "PCA2", None))
<|file_name|>dag.py<|end_file_name|><|fim▁begin|># Copyright 2015 Palo Alto Networks, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import import logging import yaml import netaddr import os import re import collections import itertools import shutil import gevent import gevent.queue import gevent.event import pan.xapi from . import base from . import actorbase from . import table from .utils import utc_millisec LOG = logging.getLogger(__name__) SUBRE = re.compile("[^A-Za-z0-9_]") class DevicePusher(gevent.Greenlet): def __init__(self, device, prefix, watermark, attributes, persistent): super(DevicePusher, self).__init__() self.device = device self.xapi = pan.xapi.PanXapi( tag=self.device.get('tag', None), api_username=self.device.get('api_username', None), api_password=self.device.get('api_password', None), api_key=self.device.get('api_key', None), port=self.device.get('port', None), hostname=self.device.get('hostname', None), serial=self.device.get('serial', None) ) self.prefix = prefix self.attributes = attributes self.watermark = watermark self.persistent = persistent self.q = gevent.queue.Queue() def put(self, op, address, value): LOG.debug('adding %s:%s to device queue', op, address) self.q.put([op, address, value]) def _get_registered_ip_tags(self, ip): self.xapi.op( cmd='<show><object><registered-ip><ip>%s</ip></registered-ip></object></show>' % ip, vsys=self.device.get('vsys', None), cmd_xml=False ) entries = self.xapi.element_root.findall('./result/entry') if entries is None or len(entries) == 0: LOG.warning('%s: ip %s has no tags', self.device.get('hostname', None), ip) return None tags = [member.text for member in entries[0].findall('./tag/member') if member.text and member.text.startswith(self.prefix)] return tags def _get_all_registered_ips(self): cmd = ( '<show><object><registered-ip><tag><entry name="%s%s"/></tag></registered-ip></object></show>' % (self.prefix, self.watermark) ) self.xapi.op( cmd=cmd, vsys=self.device.get('vsys', None), cmd_xml=False ) entries = self.xapi.element_root.findall('./result/entry') if not entries: return for entry in entries: ip = entry.get("ip") yield ip, self._get_registered_ip_tags(ip) def _dag_message(self, type_, addresses): message = [ "<uid-message>", "<version>1.0</version>", "<type>update</type>", "<payload>" ] persistent = '' if type_ == 'register': persistent = ' persistent="%d"' % (1 if self.persistent else 0) message.append('<%s>' % type_) if addresses is not None and len(addresses) != 0: akeys = sorted(addresses.keys()) for a in akeys: message.append( '<entry ip="%s"%s>' % (a, persistent) ) tags = sorted(addresses[a]) if tags is not None: message.append('<tag>') for t in tags: message.append('<member>%s</member>' % t) message.append('</tag>') message.append('</entry>') message.append('</%s>' % type_) message.append('</payload></uid-message>') return ''.join(message) def _user_id(self, cmd=None): try: self.xapi.user_id(cmd=cmd, vsys=self.device.get('vsys', None)) except gevent.GreenletExit: raise except pan.xapi.PanXapiError as e: LOG.debug('%s', e) if 'already exists, ignore' in str(e): pass elif 'does not exist, ignore unreg' in str(e): pass elif 'Failed to register' in str(e): pass else: LOG.exception('XAPI exception in pusher for device %s: %s', self.device.get('hostname', None), str(e)) raise def _tags_from_value(self, value): result = [] def _tag(t, v): if type(v) == unicode: v = v.encode('ascii', 'replace') else: v = str(v) v = SUBRE.sub('_', v) tag = '%s%s_%s' % (self.prefix, t, v) return tag for t in self.attributes: if t in value: if t == 'confidence': confidence = value[t] if confidence < 50: tag = '%s%s_low' % (self.prefix, t) elif confidence < 75: tag = '%s%s_medium' % (self.prefix, t) else: tag = '%s%s_high' % (self.prefix, t) result.append(tag) else: LOG.debug('%s %s %s', t, value[t], type(value[t])) if isinstance(value[t], list): for v in value[t]: LOG.debug('%s', v) result.append(_tag(t, v)) else: result.append(_tag(t, value[t])) else: # XXX noop for this case? result.append('%s%s_unknown' % (self.prefix, t)) LOG.debug('%s', result) return set(result) # XXX eliminate duplicates def _push(self, op, address, value): tags = [] tags.append('%s%s' % (self.prefix, self.watermark)) tags += self._tags_from_value(value) if len(tags) == 0: tags = None msg = self._dag_message(op, {address: tags}) self._user_id(cmd=msg)<|fim▁hole|> def _init_resync(self): ctags = collections.defaultdict(set) while True: op, address, value = self.q.get() if op == 'EOI': break if op != 'init': raise RuntimeError( 'DevicePusher %s - wrong op %s received in init phase' % (self.device.get('hostname', None), op) ) ctags[address].add('%s%s' % (self.prefix, self.watermark)) for t in self._tags_from_value(value): ctags[address].add(t) LOG.debug('%s', ctags) register = collections.defaultdict(list) unregister = collections.defaultdict(list) for a, atags in self._get_all_registered_ips(): regtags = set() if atags is not None: for t in atags: regtags.add(t) added = ctags[a] - regtags removed = regtags - ctags[a] for t in added: register[a].append(t) for t in removed: unregister[a].append(t) ctags.pop(a) # ips not in firewall for a, atags in ctags.iteritems(): register[a] = atags LOG.debug('register %s', register) LOG.debug('unregister %s', unregister) # XXX use constant for chunk size if len(register) != 0: addrs = iter(register) for i in xrange(0, len(register), 1000): rmsg = self._dag_message( 'register', {k: register[k] for k in itertools.islice(addrs, 1000)} ) self._user_id(cmd=rmsg) if len(unregister) != 0: addrs = iter(unregister) for i in xrange(0, len(unregister), 1000): urmsg = self._dag_message( 'unregister', {k: unregister[k] for k in itertools.islice(addrs, 1000)} ) self._user_id(cmd=urmsg) def _run(self): self._init_resync() while True: try: op, address, value = self.q.peek() self._push(op, address, value) self.q.get() # discard processed message except gevent.GreenletExit: break except pan.xapi.PanXapiError as e: LOG.exception('XAPI exception in pusher for device %s: %s', self.device.get('hostname', None), str(e)) raise class DagPusher(actorbase.ActorBaseFT): def __init__(self, name, chassis, config): self.devices = [] self.device_pushers = [] self.device_list_glet = None self.device_list_mtime = None self.ageout_glet = None self.last_ageout_run = None self.hup_event = gevent.event.Event() super(DagPusher, self).__init__(name, chassis, config) def configure(self): super(DagPusher, self).configure() self.device_list_path = self.config.get('device_list', None) if self.device_list_path is None: self.device_list_path = os.path.join( os.environ['MM_CONFIG_DIR'], '%s_device_list.yml' % self.name ) self.age_out = self.config.get('age_out', 3600) self.age_out_interval = self.config.get('age_out_interval', None) self.tag_prefix = self.config.get('tag_prefix', 'mmld_') self.tag_watermark = self.config.get('tag_watermark', 'pushed') self.tag_attributes = self.config.get( 'tag_attributes', ['confidence', 'direction'] ) self.persistent_registered_ips = self.config.get( 'persistent_registered_ips', True ) def _initialize_table(self, truncate=False): self.table = table.Table(self.name, truncate=truncate) self.table.create_index('_age_out') def initialize(self): self._initialize_table() def rebuild(self): self.rebuild_flag = True self._initialize_table(truncate=True) def reset(self): self._initialize_table(truncate=True) def _validate_ip(self, indicator, value): type_ = value.get('type', None) if type_ not in ['IPv4', 'IPv6']: LOG.error('%s - invalid indicator type, ignored: %s', self.name, type_) self.statistics['ignored'] += 1 return if '-' in indicator: i1, i2 = indicator.split('-', 1) if i1 != i2: LOG.error('%s - indicator range must be equal, ignored: %s', self.name, indicator) self.statistics['ignored'] += 1 return indicator = i1 try: address = netaddr.IPNetwork(indicator) except netaddr.core.AddrFormatError as e: LOG.error('%s - invalid IP address received, ignored: %s', self.name, e) self.statistics['ignored'] += 1 return if address.size != 1: LOG.error('%s - IP network received, ignored: %s', self.name, address) self.statistics['ignored'] += 1 return if type_ == 'IPv4' and address.version != 4 or \ type_ == 'IPv6' and address.version != 6: LOG.error('%s - IP version mismatch, ignored', self.name) self.statistics['ignored'] += 1 return return address @base._counting('update.processed') def filtered_update(self, source=None, indicator=None, value=None): address = self._validate_ip(indicator, value) if address is None: return current_value = self.table.get(str(address)) now = utc_millisec() age_out = now+self.age_out*1000 value['_age_out'] = age_out self.statistics['added'] += 1 self.table.put(str(address), value) LOG.debug('%s - #indicators: %d', self.name, self.length()) value.pop('_age_out') uflag = False if current_value is not None: for t in self.tag_attributes: cv = current_value.get(t, None) nv = value.get(t, None) if isinstance(cv, list) or isinstance(nv, list): uflag |= set(cv) != set(nv) else: uflag |= cv != nv LOG.debug('uflag %s current %s new %s', uflag, current_value, value) for p in self.device_pushers: if uflag: p.put('unregister', str(address), current_value) p.put('register', str(address), value) @base._counting('withdraw.processed') def filtered_withdraw(self, source=None, indicator=None, value=None): address = self._validate_ip(indicator, value) if address is None: return current_value = self.table.get(str(address)) if current_value is None: LOG.warning('%s - unknown indicator received, ignored: %s', self.name, address) self.statistics['ignored'] += 1 return current_value.pop('_age_out', None) self.statistics['removed'] += 1 self.table.delete(str(address)) LOG.debug('%s - #indicators: %d', self.name, self.length()) for p in self.device_pushers: p.put('unregister', str(address), current_value) def _age_out_run(self): while True: try: now = utc_millisec() LOG.debug('now: %s', now) for i, v in self.table.query(index='_age_out', to_key=now-1, include_value=True): LOG.debug('%s - %s %s aged out', self.name, i, v) for dp in self.device_pushers: dp.put( op='unregister', address=i, value=v ) self.statistics['aged_out'] += 1 self.table.delete(i) self.last_ageout_run = now LOG.debug('%s - #indicators: %d', self.name, self.length()) except gevent.GreenletExit: break except Exception: LOG.exception('Exception in _age_out_loop') try: gevent.sleep(self.age_out_interval) except gevent.GreenletExit: break def _spawn_device_pusher(self, device): dp = DevicePusher( device, self.tag_prefix, self.tag_watermark, self.tag_attributes, self.persistent_registered_ips ) dp.link_exception(self._device_pusher_died) for i, v in self.table.query(include_value=True): LOG.debug('%s - addding %s to init', self.name, i) dp.put('init', i, v) dp.put('EOI', None, None) return dp def _device_pusher_died(self, g): try: g.get() except gevent.GreenletExit: pass except Exception: LOG.exception('%s - exception in greenlet for %s, ' 'respawning in 60 seconds', self.name, g.device['hostname']) for idx in range(len(self.device_pushers)): if self.device_pushers[idx].device == g.device: break else: LOG.info('%s - device pusher for %s removed,' + ' respawning aborted', self.name, g.device['hostname']) g = None return dp = self._spawn_device_pusher(g.device) self.device_pushers[idx] = dp dp.start_later(60) def _load_device_list(self): with open(self.device_list_path, 'r') as dlf: dlist = yaml.safe_load(dlf) added = [d for i, d in enumerate(dlist) if d not in self.devices] removed = [i for i, d in enumerate(self.devices) if d not in dlist] dpushers = [] for d in dlist: if d in added: dp = self._spawn_device_pusher(d) dpushers.append(dp) else: idx = self.devices.index(d) dpushers.append(self.device_pushers[idx]) for idx in removed: self.device_pushers[idx].kill() self.device_pushers = dpushers self.devices = dlist for g in self.device_pushers: if g.value is None and not g.started: g.start() def _huppable_wait(self, wait_time): hup_called = self.hup_event.wait(timeout=wait_time) if hup_called: LOG.debug('%s - clearing poll event', self.name) self.hup_event.clear() def _device_list_monitor(self): if self.device_list_path is None: LOG.warning('%s - no device_list path configured', self.name) return while True: try: mtime = os.stat(self.device_list_path).st_mtime except OSError: LOG.debug('%s - error checking mtime of %s', self.name, self.device_list_path) self._huppable_wait(5) continue if mtime != self.device_list_mtime: self.device_list_mtime = mtime try: self._load_device_list() LOG.info('%s - device list loaded', self.name) except Exception: LOG.exception('%s - exception loading device list', self.name) self._huppable_wait(5) def mgmtbus_status(self): result = super(DagPusher, self).mgmtbus_status() result['devices'] = len(self.devices) return result def length(self, source=None): return self.table.num_indicators def start(self): super(DagPusher, self).start() if self.device_list_glet is not None: return self.device_list_glet = gevent.spawn_later( 2, self._device_list_monitor ) if self.age_out_interval is not None: self.ageout_glet = gevent.spawn(self._age_out_run) def stop(self): super(DagPusher, self).stop() if self.device_list_glet is None: return for g in self.device_pushers: g.kill() self.device_list_glet.kill() if self.ageout_glet is not None: self.ageout_glet.kill() self.table.close() def hup(self, source=None): LOG.info('%s - hup received, reload device list', self.name) self.hup_event.set() @staticmethod def gc(name, config=None): actorbase.ActorBaseFT.gc(name, config=config) shutil.rmtree(name, ignore_errors=True) device_list_path = None if config is not None: device_list_path = config.get('device_list', None) if device_list_path is None: device_list_path = os.path.join( os.environ['MM_CONFIG_DIR'], '{}_device_list.yml'.format(name) ) try: os.remove(device_list_path) except OSError: pass<|fim▁end|>
<|file_name|>alloc-file.js<|end_file_name|><|fim▁begin|>import { Model, belongsTo } from 'ember-cli-mirage'; export default Model.extend({<|fim▁hole|>});<|fim▁end|>
parent: belongsTo('alloc-file'),
<|file_name|>test_dummy.py<|end_file_name|><|fim▁begin|># coding=utf-8 """Dummy test. Pointless dummy test. """ from __future__ import absolute_import from __future__ import print_function from __future__ import division # import pysnapsync.server<|fim▁hole|> def inc(arg): """Return arg incremented by one.""" return arg + 1 def test_answer(): """Assert 3+1 == 4.""" assert inc(3) == 4<|fim▁end|>
# import pysnapsync.client
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|># For simplicity, this file contains only the most important settings by # default. All the other settings are documented here: # # http://doc.scrapy.org/en/latest/topics/settings.html # BOT_NAME = 'appstore' SPIDER_MODULES = ['appstore.spiders'] NEWSPIDER_MODULE = 'appstore.spiders' ITEM_PIPELINES = { 'appstore.pipelines.AppstorePipeline': 300, } DOWNLOAD_DELAY=5 # Crawl responsibly by identifying yourself (and your website) on the user-agent #USER_AGENT = 'appstore (+http://www.yourdomain.com)'<|fim▁end|>
# Scrapy settings for appstore project #
<|file_name|>Exp6_LineFollowing_IRSensors.py<|end_file_name|><|fim▁begin|>"""//*********************************************************************** * Exp6_LineFollowing_IRSensors -- RedBot Experiment 6 * * This code reads the three line following sensors on A3, A6, and A7 * and prints them out to the Serial Monitor. Upload this example to your * RedBot and open up the Serial Monitor by clicking the magnifying glass * in the upper-right hand corner. * * This sketch was written by SparkFun Electronics,with lots of help from * the Arduino community. This code is completely free for any use. * * 8 Oct 2013 M. Hord * Revised, 31 Oct 2014 B. Huang * Revices, 2 Oct 2015 L Mathews ***********************************************************************/""" <|fim▁hole|>import sys import signal from pymata_aio.pymata3 import PyMata3 from library.redbot import RedBotSensor WIFLY_IP_ADDRESS = None # Leave set as None if not using WiFly WIFLY_IP_ADDRESS = "10.0.1.18" # If using a WiFly on the RedBot, set the ip address here. if WIFLY_IP_ADDRESS: board = PyMata3(ip_address=WIFLY_IP_ADDRESS) else: # Use a USB cable to RedBot or an XBee connection instead of WiFly. COM_PORT = None # Use None for automatic com port detection, or set if needed i.e. "COM7" board = PyMata3(com_port=COM_PORT) LEFT_LINE_FOLLOWER = 3 # pin number assignments for each IR sensor CENTRE_LINE_FOLLOWER = 6 RIGHT_LINE_FOLLOWER = 7 IR_sensor_1 = RedBotSensor(board, LEFT_LINE_FOLLOWER) IR_sensor_2 = RedBotSensor(board, CENTRE_LINE_FOLLOWER) IR_sensor_3 = RedBotSensor(board, RIGHT_LINE_FOLLOWER) def signal_handler(sig, frame): """Helper method to shutdown the RedBot if Ctrl-c is pressed""" print('\nYou pressed Ctrl+C') if board is not None: board.send_reset() board.shutdown() sys.exit(0) def setup(): signal.signal(signal.SIGINT, signal_handler) print("Welcome to Experiment 6!") print("------------------------") def loop(): board.sleep(0.1) print("IR Sensor Readings: {}, {}, {}".format(IR_sensor_1.read(), IR_sensor_2.read(), IR_sensor_3.read())) if __name__ == "__main__": setup() while True: loop()<|fim▁end|>
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp'); var gutil = require('gulp-util'); var bower = require('bower'); var concat = require('gulp-concat'); var sass = require('gulp-sass'); var minifyCss = require('gulp-minify'); var rename = require('gulp-rename'); var sh = require('shelljs'); var minify = require('gulp-minify'); var argv = require('yargs').argv; var mid = 0; for (arg in argv){ if (argv[arg] === true){ mid = parseInt(arg,16); // yeah, counting to f ;) } } var libs= [ "./www/lib/ionic/release/js/ionic.bundle.js", "./www/lib/moment/min/moment.min.js","./www/lib/angular-moment/angular-moment.min.js","./www/lib/moment/locale/fr.js","./www/lib/moment-timezone/builds/moment-timezone-with-data-2010-2020.min.js","./www/lib/openfb/openfb.js", "./www/lib/ngstorage/ngStorage.min.js","./www/lib/leaflet/dist/leaflet.js","./www/lib/angular-simple-logger/dist/angular-simple-logger.js","./www/lib/angular-leaflet-directive/dist/angular-leaflet-directive.min.js","./www/lib/ngCordova/dist/ng-cordova.min.js","./www/cordova.js" //, /* BUGGY */ "./www/lib/ng-walkthrough/ng-walkthrough.js" ] var paths = { sass: ['./scss/**/*.scss'], js: [ './www/js/*.js','./www/WhatTheFood/shared/*.js','./www/WhatTheFood/shared/**/*.js','./www/WhatTheFood/components/**/*.js' ], lib: libs, lib1: libs.slice(0,mid), mid : libs[mid], lib2: libs.slice(mid + 1) }; /* gulp.task('watch', function() { gulp.watch(paths.js, ['concat']); }); */ gulp.task('dbg', ['concat-lib1','concat-lib2','concat-mid']); gulp.task('concat-lib1',function(){ return gulp.src(paths.lib1) .pipe(concat('lib1-bundle.js')) .pipe(gulp.dest('./www/dist/')); }) gulp.task('concat-lib2',function(){ return gulp.src(paths.lib2)<|fim▁hole|>gulp.task('concat-mid',function(){ return gulp.src(paths.mid) .pipe(concat('mid-bundle.js')) .pipe(gulp.dest('./www/dist/')); }) gulp.task('concat', ['concat-src','concat-lib']); gulp.task('concat-all', ['minify'],function(){ return gulp.src(['./www/dist/lib-bundle-min.js','./www/dist/app-bundle-min.js']) .pipe(concat('bundle-min.js')) .pipe(gulp.dest('./www/dist/')); }) gulp.task('concat-src', function() { return gulp.src(paths.js) .pipe(concat('app-bundle.js')) .pipe(gulp.dest('./www/dist/')); }); gulp.task('concat-lib', function() { return gulp.src(paths.lib) .pipe(concat('lib-bundle.js')) .pipe(gulp.dest('./www/dist/')); }); gulp.task('minify', ['concat'],function() { return gulp.src(['./www/dist/app-bundle.js','./www/dist/lib-bundle.js']) .pipe(minify({mangle:false})) .pipe(gulp.dest('./www/dist')); }); gulp.task('sass', function(done) { gulp.src('./scss/ionic.app.scss') .pipe(sass()) .pipe(gulp.dest('./www/css/')) .pipe(minifyCss({ keepSpecialComments: 0 })) .pipe(rename({ extname: '.min.css' })) .pipe(gulp.dest('./www/css/')) .on('end', done); }); gulp.task('watch', function() { gulp.watch(paths.sass, ['sass']); }); gulp.task('install', ['git-check'], function() { return bower.commands.install() .on('log', function(data) { gutil.log('bower', gutil.colors.cyan(data.id), data.message); }); }); gulp.task('git-check', function(done) { if (!sh.which('git')) { console.log( ' ' + gutil.colors.red('Git is not installed.'), '\n Git, the version control system, is required to download Ionic.', '\n Download git here:', gutil.colors.cyan('http://git-scm.com/downloads') + '.', '\n Once git is installed, run \'' + gutil.colors.cyan('gulp install') + '\' again.' ); process.exit(1); } done(); }); gulp.task('concat-and-minify', ['concat','minify']); gulp.task('default', ['concat-and-minify']);<|fim▁end|>
.pipe(concat('lib2-bundle.js')) .pipe(gulp.dest('./www/dist/')); })
<|file_name|>TAToolsHandler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding=utf-8 # # Copyright (c) 2013-2015 First Flamingo Enterprise B.V. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # TAToolsHandler.py # firstflamingo/treinenaapje # # Created by Berend Schotanus on 06-Feb-13. # from google.appengine.ext import db from google.appengine.api import memcache import webapp2 import math, logging from datetime import datetime, timedelta from ffe import markup from ffe.ffe_time import mark_cet, utc_from_cet, minutes_from_string, string_from_minutes from TASeries import TASeries from TAMission import TAMission, round_mission_offset from TAStop import TAStop, StopStatuses from TAScheduledPoint import Direction MENU_LIST = (('Home', '/console'), ('Series', '/console/series?page=1'), ('Missies zonder serie', '/console/missions?kind=orphans&page=1'), ('Stations', '/console/stations?page=1'), ('Rapportage', '/console/report')) FIRST_HALF = 0 SECND_HALF = 1 ORD_LABEL = ['Eerste', 'Tweede'] # URL Handlers class RepatternHandler(webapp2.RequestHandler): def get(self): series =TASeries.get(self.request.get('series')) self.results = [{}, {}] self.analyzeStops() self.doc = ToolsDocument('Patroontijden voor serie %s' % series.name) form = markup.form('/tools/repattern', 'post') form.add(markup.input('hidden', 'series', self.request.get('series'))) form.add(markup.heading(2, 'Heenrichting')) form.add(self.patternTimeTable(series, Direction.up)) form.add(markup.heading(2, 'Terugrichting')) form.add(self.patternTimeTable(series, Direction.down)) form.add(markup.input('submit', value='pas aan')) self.doc.main.add(form) self.response.out.write(self.doc.write()) def post(self): series =TASeries.get(self.request.get('series')) self.doc = ToolsDocument('Aangepaste tijden voor serie %s' % series.name) processedObjects = [] processedPoints = {} table = self.doc.add_table('changestable', ['Station', 'A', 'V', 'A', 'V', '']) for index in range(len(series.points)): point = series.points[index] oldTimes = point.scheduled_times upArrival = self.request.get('arr_%d_%d' % (Direction.up, index)) upDeparture = self.request.get('dep_%d_%d' % (Direction.up, index)) downArrival = self.request.get('arr_%d_%d' % (Direction.down, index)) downDeparture = self.request.get('dep_%d_%d' % (Direction.down, index)) newTimes = (minutes_from_string(upArrival), minutes_from_string(upDeparture), minutes_from_string(downArrival), minutes_from_string(downDeparture)) row = table.add_row() row.add_to_cell(0, point.stationName) row.add_to_cell(1, upArrival) row.add_to_cell(2, upDeparture) row.add_to_cell(3, downArrival) row.add_to_cell(4, downDeparture) if oldTimes != newTimes: point.scheduled_times = newTimes processedPoints[point.id] = point processedObjects.append(point) row.add_to_cell(5, 'aangepast') series.cache_set() memcache.set_multi(processedPoints, namespace='TAScheduledPoint') db.put(processedObjects) self.response.out.write(self.doc.write()) def patternTimeTable(self, series, direction): table = markup.HTMLTable('timetable_%d' % direction, ['Station', 'A', 'V', 'meting', '#', 'delta', 'A', 'V']) indexes = range(len(series.points)) if direction == Direction.down: indexes.reverse() for index in indexes: point = series.points[index] station = point.station planArrival, planDeparture = point.times_in_direction(direction) row = table.add_row() row.add_to_cell(0, station.name) row.add_to_cell(1, string_from_minutes(planArrival)) row.add_to_cell(2, string_from_minutes(planDeparture)) stationDict = self.results[direction].get(station.id, None) if stationDict == None: departure, count = ('-', '-') delta = 0 else: departure, count = mostCommonItem(stationDict['v']) delta = departure - planDeparture departure = string_from_minutes(departure) row.add_to_cell(3, departure) row.add_to_cell(4, count) row.add_to_cell(5, delta) row.add_to_cell(6, markup.input('text', 'arr_%d_%d' % (direction, index), string_from_minutes(planArrival + delta), size=4)) row.add_to_cell(7, markup.input('text', 'dep_%d_%d' % (direction, index), string_from_minutes(planDeparture + delta), size=4)) return table def analyzeStops(self): series_id = self.request.get('series') query = db.Query(TAArchivedMission).filter('series_id =', series_id) for mission in query.fetch(50): if mission.up: direction = Direction.up else: direction = Direction.down for stop in mission.stopsList: stopKey = stop.station_id if stop.status == StopStatuses.planned: departureHist = self.histogram(direction, stopKey, 'v') difference = utc_from_cet(stop.departure) - correctedOffsetUTC(mission) self.addDataToHistogram(departureHist, difference.seconds // 60) delayHist = self.histogram(direction, stopKey, 'dv') self.addDataToHistogram(delayHist, int(stop.delay_dep)) platformHist = self.histogram(direction, stopKey, 'p') self.addDataToHistogram(platformHist, stop.platform) def stopDictionary(self, direction, stopKey): dictionary = self.results[direction].get(stopKey, None) if dictionary == None: dictionary = dict() self.results[direction][stopKey] = dictionary return dictionary def histogram(self, direction, stopKey, dataKey): stopDictionary = self.stopDictionary(direction, stopKey) dictionary = stopDictionary.get(dataKey, None) if dictionary == None: dictionary = dict() stopDictionary[dataKey] = dictionary return dictionary def addDataToHistogram(self, histogram, key): histogram[key] = histogram.get(key, 0) + 1 class ReoffsetHandler(webapp2.RequestHandler): tableTitles = ('tijd', 'aantal', 'perc.') tableFormat = (':%02d', '%d', '%.1f%%') def get(self): series =TASeries.get(self.request.get('series')) self.doc = ToolsDocument('Herschik offsets serie %s' % series.name) self.writeReport(series) self.response.out.write(self.doc.write()) def post(self): series = TASeries.get(self.request.get('series')) self.deltaOffset = [int(self.request.get('offset_up')), int(self.request.get('offset_down'))] self.round = [int(self.request.get('round_up')), int(self.request.get('round_down'))] self.processedObjects = [] self.processedMissions = {} self.processedPoints = {} self.doc = ToolsDocument('Aangepaste offsets serie %s' % series.name) self.doc.main.add(markup.heading(2, 'Aangepaste patroontijden')) self.processPoints(series) self.doc.main.add(markup.heading(2, 'Aangepaste offsettijden')) table = self.doc.add_table('adapted_missions', ['Missie', 'Offset']) self.processMissions(series.all_mission_ids(Direction.up), Direction.up, table) self.processMissions(series.all_mission_ids(Direction.down), Direction.down, table) series.cache_set() self.saveChanges() # self.writeReport(series) self.response.out.write(self.doc.write()) def writeReport(self, series): self.departure = [series.first_point.upDeparture, series.last_point.downDeparture] self.startStation = [series.first_point.stationName, series.last_point.stationName] self.foundOffset = [None, None] self.doc.main.add(markup.heading(2, 'Heenrichting')) self.analyzeOffset(series.all_mission_ids(Direction.up)) self.reportOffset(FIRST_HALF, Direction.up) self.reportOffset(SECND_HALF, Direction.up) self.doc.main.add(markup.heading(2, 'Terugrichting')) self.analyzeOffset(series.all_mission_ids(Direction.down)) self.reportOffset(FIRST_HALF, Direction.down) self.reportOffset(SECND_HALF, Direction.down) if self.foundOffset[Direction.up] or self.foundOffset[Direction.down]: self.doc.main.add(markup.heading(2, 'Aanpassen')) self.proposeChanges() def analyzeOffset(self, missionIDs): self.offset = [None, None] self.data=[[], []] firstHalfHist = dict() firstHalfItems = 0 secondHalfHist = dict() secondHalfItems = 0 for missionID in missionIDs: mission = TAMission.get(missionID) num = mission.number if bool(num % 2): num -= 1 key = mission.offset.minute if bool(num % 4): firstHalfHist[key] = firstHalfHist.get(key, 0) + 1 firstHalfItems += 1 else: secondHalfHist[key] = secondHalfHist.get(key, 0) + 1 secondHalfItems += 1 self.generateData(FIRST_HALF, firstHalfHist, firstHalfItems) self.generateData(SECND_HALF, secondHalfHist, secondHalfItems) def generateData(self, halfHour, histogram, count): maxFrequency = 0 for key, value in histogram.iteritems(): self.data[halfHour].append((int(key), value, 100.0 * value/count)) if value > maxFrequency: maxFrequency = value self.offset[halfHour] = int(key) def reportOffset(self, halfHour, direction): if self.offset[halfHour] != None: self.doc.main.add(markup.heading(3, '%s halfuur :%02d' % (ORD_LABEL[halfHour], self.offset[halfHour]))) table = self.doc.add_table('table_%d' % (2 * direction + halfHour), self.tableTitles, self.tableFormat) table.fill_data(self.data[halfHour]) departure = self.offset[halfHour] + self.departure[direction] if departure >= 60: departure -= 60 self.offset[halfHour] -= 60 self.doc.add_paragraph('Vertrek uit %s: %d + %d = :%02d' % (self.startStation[direction], self.offset[halfHour], self.departure[direction], departure)) if self.foundOffset[direction] == None or self.offset[halfHour] < self.foundOffset[direction]: self.foundOffset[direction] = self.offset[halfHour] def proposeChanges(self): table = markup.HTMLTable('submit_table', ['', 'Offset', 'Afronden']) form = markup.form('/tools/reoffset', 'post') form.add(markup.input('hidden', 'series', self.request.get('series'))) form.add(table) self.doc.main.add(form) row = table.add_row() row.add_to_cell(0,'heen') row.add_to_cell(1, markup.input('text', 'offset_up', str(self.foundOffset[Direction.up]), size=6)) row.add_to_cell(2, markup.input('text', 'round_up', '3', size=6)) row = table.add_row() row.add_to_cell(0,'terug') row.add_to_cell(1, markup.input('text', 'offset_down', str(self.foundOffset[Direction.down]), size=6)) row.add_to_cell(2, markup.input('text', 'round_down', '3', size=6)) row = table.add_row() row.add_to_cell(0, markup.input('submit', value='pas aan')) def processPoints(self,series): table = self.doc.add_table('adapted_schedule', ['Station', 'Heen', 'Terug']) for point in series.points: # Change arrival and departure times: oldUp, oldDown = point.times_strings point.upArrival += self.deltaOffset[Direction.up] point.upDeparture += self.deltaOffset[Direction.up] point.downArrival += self.deltaOffset[Direction.down] point.downDeparture += self.deltaOffset[Direction.down] newUp, newDown = point.times_strings # Add point to queue for saveChanges self.processedPoints[point.id] = point self.processedObjects.append(point) # Report the changes: row = table.add_row() row.add_to_cell(0, point.stationName) row.add_to_cell(1, '[%s] %s [%s]' % (oldUp, change_string(self.deltaOffset[Direction.up]), newUp)) row.add_to_cell(2, '[%s] %s [%s]' % (oldDown, change_string(self.deltaOffset[Direction.down]), newDown)) def processMissions(self, missionIDs, direction, table): if self.deltaOffset[direction]: for missionID in missionIDs: # Change mission offset time: mission = TAMission.get(missionID) oldOffset = datetime(2002, 2, 2).replace(hour=mission.offset.hour, minute=mission.offset.minute) newOffset = round_mission_offset(oldOffset - timedelta(minutes=self.deltaOffset[direction]), self.round[direction]) mission.offset = newOffset.time() # Add mission to queue for saveChanges self.processedMissions[missionID] = mission self.processedObjects.append(mission) # Report the changes: row = table.add_row() row.add_to_cell(0, missionID) row.add_to_cell(1, '%s %s %s' % (oldOffset.strftime('%H:%M'), change_string(-self.deltaOffset[direction]), newOffset.strftime('%H:%M'))) def saveChanges(self): memcache.set_multi(self.processedPoints, namespace='TAScheduledPoint') memcache.set_multi(self.processedMissions, namespace='TAMission') db.put(self.processedObjects) # HTML Document class ToolsDocument(markup.HTMLDocument): def __init__(self, title, language='en'): markup.HTMLDocument.__init__(self, title, language) #Stylesheet style_element = markup.link('stylesheet', '/web/style.css') style_element.set_attribute('type', 'css') style_element.set_attribute('media', 'screen') self.head.add(style_element) #Header self.header = markup.XMLElement('header') self.header.add(markup.user_id()) self.header.add(markup.heading(1, title))<|fim▁hole|> #Paper with two columns: sidebar and main paper = markup.div('paper') self.main = markup.div('main_content') paper.add(self.main) self.sidebar = markup.element_with_id('aside', 'sidebar') self.sidebar.add(markup.main_menu(MENU_LIST)) paper.add(self.sidebar) paper.add(markup.div('pushbottom')) self.body.add(paper) #Footer self.footer = markup.XMLElement('footer') self.footer.add(markup.paragraph('First Flamingo Enterprise B.V.')) self.body.add(self.footer) def add_paragraph(self, paragraphText): self.main.add(markup.paragraph(paragraphText)) def add_reference(self, href, content): paragraph = markup.paragraph('') paragraph.add(markup.anchor(href, content)) self.main.add(paragraph) def add_table(self, name, columnTitles, format=None): table = markup.HTMLTable(name, columnTitles) if format != None: table.format = format self.main.add(table) return table def add_page_navigator(self, currentPage, lastPage, urlFormat): self.main.add(markup.page_navigator(currentPage, lastPage, urlFormat)) # Helper functions def change_string(number): if number < 0: return '- %d =' % -number else: return'+ %d =' % number def mostCommonItem(histogram): maxValue = 0 foundKey = None for key, value in histogram.iteritems(): if value > maxValue: foundKey = key maxValue = value return (foundKey, maxValue) def correctedOffsetUTC(archivedMission): ''' Replaces the offset time as stored in the TAArchivedMission with that from the corresponding TAMission, while retaining the date. ''' originalMission = TAMission.get('%s.%d' % (archivedMission.country, archivedMission.baseNumber)) offsetCET = mark_cet(datetime.combine(archivedMission.offset_CET.date(), originalMission.offset)) return utc_from_cet(offsetCET) # WSGI Application app = webapp2.WSGIApplication([('/tools/repattern.*', RepatternHandler), ('/tools/reoffset.*', ReoffsetHandler) ], debug=True)<|fim▁end|>
self.body.add(self.header)
<|file_name|>MapTraitArgument.java<|end_file_name|><|fim▁begin|>package edu.ucsd.arcum.interpreter.ast; import java.util.List; import java.util.Set; import org.eclipse.core.runtime.CoreException; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import edu.ucsd.arcum.exceptions.ArcumError; import edu.ucsd.arcum.exceptions.SourceLocation; import edu.ucsd.arcum.interpreter.ast.expressions.ConstraintExpression; import edu.ucsd.arcum.interpreter.query.EntityDataBase; import edu.ucsd.arcum.interpreter.query.OptionMatchTable; import edu.ucsd.arcum.util.StringUtil; public class MapTraitArgument extends MapNameValueBinding { private RequireMap map; private ConstraintExpression patternExpr; private List<FormalParameter> formals; // TODO: paramNames should be allowed to have the types explicit, just like // any other realize statement public MapTraitArgument(SourceLocation location, RequireMap map, String traitName, List<FormalParameter> formals, ConstraintExpression patternExpr) { super(location, traitName); this.map = map; this.patternExpr = patternExpr; this.formals = formals; } public void initializeValue(EntityDataBase edb, Option option, OptionMatchTable table) throws CoreException { StaticRealizationStatement pseudoStmt; OptionInterface optionIntf = option.getOptionInterface(); List<FormalParameter> allParams = optionIntf.getSingletonParameters(); List<FormalParameter> formals = null; for (FormalParameter param : allParams) { if (param.getIdentifier().equals(getName())) { formals = param.getTraitArguments(); break; } } if (formals == null) { ArcumError.fatalUserError(getLocation(), "Couldn't find %s", getName()); } <|fim▁hole|> List<StaticRealizationStatement> stmts = Lists.newArrayList(pseudoStmt); try { EntityDataBase.pushCurrentDataBase(edb); RealizationStatement.collectivelyRealizeStatements(stmts, edb, table); } finally { EntityDataBase.popMostRecentDataBase(); } } @Override public Object getValue() { return this; } @Override public String toString() { return String.format("%s(%s): %s", getName(), StringUtil.separate(formals), patternExpr.toString()); } public void checkUserDefinedPredicates(List<TraitSignature> tupleSets) { Set<String> names = Sets.newHashSet(); names.addAll(Lists.transform(formals, FormalParameter.getIdentifier)); patternExpr.checkUserDefinedPredicates(tupleSets, names); } }<|fim▁end|>
pseudoStmt = StaticRealizationStatement.makeNested(map, getName(), patternExpr, formals, this.getLocation()); pseudoStmt.typeCheckAndValidate(optionIntf);
<|file_name|>flow.go<|end_file_name|><|fim▁begin|>package surge import ( "fmt" "time" ) type applyCallback func(gwy NodeRunnerInterface, flow FlowInterface) //======================================================================== // // type FlowInterface // //======================================================================== type FlowInterface interface { String() string setOneArg(a interface{}) unicast() bool // more accessors GetCid() int64 GetSid() int64 GetTio() TioInterface GetRb() RateBucketInterface GetRepnum() int getbw() int64 setbw(bw int64) getoffset() int64 setoffset(ev *ReplicaDataEvent) incoffset(add int) bytesToWrite(ev *ReplicaDataEvent) int } //======================================================================== // // type Flow (short-lived: duration = <control> <one chunk> <ack>) // //======================================================================== type Flow struct { from NodeRunnerInterface to NodeRunnerInterface togroup GroupInterface cid int64 sid int64 tio TioInterface rb RateBucketInterface // refill at the tobandwidth rate tobandwidth int64 // bits/sec timeTxDone time.Time // time the last byte of the current packet is sent extension interface{} // protocol-specific flow extension offset int64 totalbytes int64 repnum int // replica num } //======================================================================== // c-tors and helpers //======================================================================== func NewFlow(f NodeRunnerInterface, chunkid int64, args ...interface{}) *Flow { printid := uqrand(chunkid) flow := &Flow{ from: f, cid: chunkid, sid: printid} for i := 0; i < len(args); i++ { flow.setOneArg(args[i]) } // must be the flow initiating tio if flow.tio.GetFlow() == nil { flow.tio.SetFlow(flow) } return flow } func (flow *Flow) setOneArg(a interface{}) { switch a.(type) { case int: flow.repnum = a.(int) case TioInterface: flow.tio = a.(TioInterface) case NodeRunnerInterface: flow.to = a.(NodeRunnerInterface) case GroupInterface: flow.togroup = a.(GroupInterface) default: assert(false, fmt.Sprintf("unexpected type: %#v", a)) } } func (flow *Flow) unicast() bool { return flow.to != nil && flow.togroup == nil } func (flow *Flow) String() string { f := flow.from.String() bwstr := fmt.Sprintf("%.2f", float64(flow.tobandwidth)/1000.0/1000.0/1000.0) var cstr string if flow.repnum != 0 { cstr = fmt.Sprintf("c#%d(%d)", flow.sid, flow.repnum) } else { cstr = fmt.Sprintf("c#%d", flow.sid) } if flow.unicast() { t := flow.to.String() return fmt.Sprintf("[flow %s=>%s[%s],offset=%d,bw=%sGbps]", f, t, cstr, flow.offset, bwstr) } t := flow.togroup.String() return fmt.Sprintf("[flow %s=>%s[%s],offset=%d,bw=%sGbps]", f, t, cstr, flow.offset, bwstr) } func (flow *Flow) getbw() int64 { return flow.tobandwidth } func (flow *Flow) setbw(bw int64) { flow.tobandwidth = bw } func (flow *Flow) getoffset() int64 { return flow.offset } func (flow *Flow) setoffset(ev *ReplicaDataEvent) { flow.offset = ev.offset } func (flow *Flow) incoffset(add int) { flow.offset += int64(add) }<|fim▁hole|>func (flow *Flow) GetRepnum() int { return flow.repnum } func (flow *Flow) bytesToWrite(ev *ReplicaDataEvent) int { if flow.offset < flow.totalbytes { return 0 } return int(flow.totalbytes) } //======================================================================== // // type FlowLong (long-lived unicast flow) // //======================================================================== type FlowLong struct { from NodeRunnerInterface to NodeRunnerInterface rb RateBucketInterface // refill at the tobandwidth rate tobandwidth int64 // bits/sec offset int64 // transmitted bytes timeTxDone time.Time // time the last byte of the current packet is sent } func (flow *FlowLong) setOneArg(a interface{}) { } func (flow *FlowLong) unicast() bool { return true } func (flow *FlowLong) String() string { f := flow.from.String() bwstr := fmt.Sprintf("%.2f", float64(flow.tobandwidth)/1000.0/1000.0/1000.0) t := flow.to.String() return fmt.Sprintf("[flow %s=>%s,bw=%sGbps]", f, t, bwstr) } func (flow *FlowLong) getbw() int64 { return flow.tobandwidth } func (flow *FlowLong) setbw(bw int64) { flow.tobandwidth = bw } func (flow *FlowLong) GetRb() RateBucketInterface { return flow.rb } func (flow *FlowLong) incoffset(add int) { flow.offset += int64(add) } // FIXME: remove from interface func (flow *FlowLong) getoffset() int64 { return 0 } func (flow *FlowLong) setoffset(ev *ReplicaDataEvent) { assert(false) } func (flow *FlowLong) GetCid() int64 { return 0 } func (flow *FlowLong) GetSid() int64 { return 0 } func (flow *FlowLong) GetTio() TioInterface { return nil } func (flow *FlowLong) GetRepnum() int { return 0 } func (flow *FlowLong) bytesToWrite(ev *ReplicaDataEvent) int { return 0 } //======================================================================== // // FlowDir - container: unidirectional flows many-others => myself // //======================================================================== type FlowDir struct { node NodeRunnerInterface flows map[NodeRunnerInterface]FlowInterface } func NewFlowDir(r NodeRunnerInterface, num int) *FlowDir { flows := make(map[NodeRunnerInterface]FlowInterface, num) return &FlowDir{r, flows} } func (fdir *FlowDir) insertFlow(flow *Flow) { assert(flow.unicast()) if fdir.node == flow.from { fdir.flows[flow.to] = flow } else { assert(fdir.node == flow.to) fdir.flows[flow.from] = flow } } func (fdir *FlowDir) deleteFlow(r NodeRunnerInterface) { delete(fdir.flows, r) } func (fdir *FlowDir) count() int { return len(fdir.flows) } func (fdir *FlowDir) get(r NodeRunnerInterface, mustexist bool) FlowInterface { flow, ok := fdir.flows[r] if ok { return flow } if mustexist { n := fdir.node.String() other := r.String() assertstr := fmt.Sprintf("flow %s<...>%s does not exist", n, other) assert(false, assertstr) } return nil } func (fdir *FlowDir) apply(f applyCallback) { for r, flow := range fdir.flows { f(r, flow) } }<|fim▁end|>
func (flow *Flow) GetCid() int64 { return flow.cid } func (flow *Flow) GetSid() int64 { return flow.sid } func (flow *Flow) GetTio() TioInterface { return flow.tio.GetTio() } func (flow *Flow) GetRb() RateBucketInterface { return flow.rb }
<|file_name|>test_affinity_groups_projects.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from marvin.cloudstackTestCase import cloudstackTestCase, unittest from marvin.cloudstackAPI import deleteAffinityGroup from marvin.lib.utils import (cleanup_resources, random_gen) from marvin.lib.base import (Account, Project, ServiceOffering, VirtualMachine, AffinityGroup, Domain) from marvin.lib.common import (get_zone, get_domain, get_template, list_hosts, list_virtual_machines, wait_for_cleanup) from nose.plugins.attrib import attr class Services: """Test Account Services """<|fim▁hole|> self.services = { "domain": { "name": "NonRootDomain" }, "domain_admin_account": { "email": "[email protected]", "firstname": "Test", "lastname": "User", "username": "doadmintest", "password": "password" }, "account": { "email": "[email protected]", "firstname": "Test", "lastname": "User", "username": "acc", "password": "password" }, "account_not_in_project": { "email": "[email protected]", "firstname": "Test", "lastname": "User", "username": "account_not_in_project", "password": "password" }, "project": { "name": "Project", "displaytext": "Project" }, "project2": { "name": "Project2", "displaytext": "Project2" }, "service_offering": { "name": "Tiny Instance", "displaytext": "Tiny Instance", "cpunumber": 1, "cpuspeed": 100, "memory": 64 }, "ostype": 'CentOS 5.3 (64-bit)', "host_anti_affinity": { "name": "", "type": "host anti-affinity" }, "virtual_machine" : { } } class TestCreateAffinityGroup(cloudstackTestCase): """ Test various scenarios for Create Affinity Group API for projects """ @classmethod def setUpClass(cls): cls.testClient = super(TestCreateAffinityGroup, cls).getClsTestClient() cls.api_client = cls.testClient.getApiClient() cls.services = Services().services #Get Zone, Domain and templates cls.rootdomain = get_domain(cls.api_client) cls.domain = Domain.create(cls.api_client, cls.services["domain"]) cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests()) cls.template = get_template( cls.api_client, cls.zone.id, cls.services["ostype"] ) cls.services["virtual_machine"]["zoneid"] = cls.zone.id cls.services["template"] = cls.template.id cls.services["zoneid"] = cls.zone.id cls.domain_admin_account = Account.create( cls.api_client, cls.services["domain_admin_account"], domainid=cls.domain.id, admin=True ) cls.domain_api_client = cls.testClient.getUserApiClient(cls.domain_admin_account.name, cls.domain.name, 2) cls.account = Account.create( cls.api_client, cls.services["account"], domainid=cls.domain.id ) cls.account_api_client = cls.testClient.getUserApiClient(cls.account.name, cls.domain.name, 0) cls.account_not_in_project = Account.create( cls.api_client, cls.services["account_not_in_project"], domainid=cls.domain.id ) cls.account_not_in_project_api_client = cls.testClient.getUserApiClient(cls.account_not_in_project.name, cls.domain.name, 0) cls.project = Project.create( cls.api_client, cls.services["project"], account=cls.domain_admin_account.name, domainid=cls.domain_admin_account.domainid ) cls.project2 = Project.create( cls.api_client, cls.services["project2"], account=cls.domain_admin_account.name, domainid=cls.domain_admin_account.domainid ) cls.debug("Created project with ID: %s" % cls.project.id) cls.debug("Created project2 with ID: %s" % cls.project2.id) # Add user to the project cls.project.addAccount( cls.api_client, cls.account.name ) cls.service_offering = ServiceOffering.create( cls.api_client, cls.services["service_offering"], domainid=cls.account.domainid ) cls._cleanup = [] return def setUp(self): self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.cleanup = [] def tearDown(self): try: # #Clean up, terminate the created instance, volumes and snapshots cleanup_resources(self.apiclient, self.cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return @classmethod def tearDownClass(cls): try: #Clean up, terminate the created templates cls.domain.delete(cls.api_client, cleanup=True) cleanup_resources(cls.api_client, cls._cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) def create_aff_grp(self, api_client=None, aff_grp=None, aff_grp_name=None, projectid=None): if not api_client: api_client = self.api_client if aff_grp is None: aff_grp = self.services["host_anti_affinity"] if aff_grp_name is None: aff_grp["name"] = "aff_grp_" + random_gen(size=6) else: aff_grp["name"] = aff_grp_name if projectid is None: projectid = self.project.id try: return AffinityGroup.create(api_client, aff_grp, None, None, projectid) except Exception as e: raise Exception("Error: Creation of Affinity Group failed : %s" % e) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_01_admin_create_aff_grp_for_project(self): """ Test create affinity group as admin in project @return: """ aff_grp = self.create_aff_grp() self.debug("Created Affinity Group: %s" % aff_grp.name) list_aff_grps = AffinityGroup.list(self.api_client, id=aff_grp.id) self.assert_(isinstance(list_aff_grps, list) and len(list_aff_grps) > 0) self.assert_(list_aff_grps[0].id == aff_grp.id) self.assert_(list_aff_grps[0].projectid == self.project.id) self.cleanup.append(aff_grp) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_02_doadmin_create_aff_grp_for_project(self): """ Test create affinity group as domain admin for projects @return: """ aff_grp = self.create_aff_grp(api_client=self.domain_api_client) list_aff_grps = AffinityGroup.list(self.domain_api_client, id=aff_grp.id) self.assert_(isinstance(list_aff_grps, list) and len(list_aff_grps) > 0) self.assert_(list_aff_grps[0].id == aff_grp.id) self.assert_(list_aff_grps[0].projectid == self.project.id) self.cleanup.append(aff_grp) @attr(tags=["vogxn", "simulator", "basic", "advanced"], required_hardware="false") def test_03_user_create_aff_grp_for_project(self): """ Test create affinity group as user for projects @return: """ aff_grp = self.create_aff_grp(api_client=self.account_api_client) list_aff_grps = AffinityGroup.list(self.api_client, id=aff_grp.id) self.assert_(isinstance(list_aff_grps, list) and len(list_aff_grps) > 0) self.assert_(list_aff_grps[0].id == aff_grp.id) self.assert_(list_aff_grps[0].projectid == self.project.id) self.cleanup.append(aff_grp) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_4_user_create_aff_grp_existing_name_for_project(self): """ Test create affinity group that exists (same name) for projects @return: """ failed_aff_grp = None aff_grp = self.create_aff_grp(api_client=self.account_api_client) with self.assertRaises(Exception): failed_aff_grp = self.create_aff_grp(api_client=self.account_api_client,aff_grp_name = aff_grp.name) if failed_aff_grp: self.cleanup.append(failed_aff_grp) self.cleanup.append(aff_grp) class TestListAffinityGroups(cloudstackTestCase): @classmethod def setUpClass(cls): cls.testClient = super(TestListAffinityGroups, cls).getClsTestClient() cls.api_client = cls.testClient.getApiClient() cls.services = Services().services #Get Zone, Domain and templates cls.rootdomain = get_domain(cls.api_client) cls.domain = Domain.create(cls.api_client, cls.services["domain"]) cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests()) cls.template = get_template( cls.api_client, cls.zone.id, cls.services["ostype"] ) cls.services["virtual_machine"]["zoneid"] = cls.zone.id cls.services["template"] = cls.template.id cls.services["zoneid"] = cls.zone.id cls.domain_admin_account = Account.create( cls.api_client, cls.services["domain_admin_account"], domainid=cls.domain.id, admin=True ) cls.domain_api_client = cls.testClient.getUserApiClient(cls.domain_admin_account.name, cls.domain.name, 2) cls.account = Account.create( cls.api_client, cls.services["account"], domainid=cls.domain.id ) cls.account_api_client = cls.testClient.getUserApiClient(cls.account.name, cls.domain.name, 0) cls.account_not_in_project = Account.create( cls.api_client, cls.services["account_not_in_project"], domainid=cls.domain.id ) cls.account_not_in_project_api_client = cls.testClient.getUserApiClient(cls.account_not_in_project.name, cls.domain.name, 0) cls.project = Project.create( cls.api_client, cls.services["project"], account=cls.domain_admin_account.name, domainid=cls.domain_admin_account.domainid ) cls.project2 = Project.create( cls.api_client, cls.services["project2"], account=cls.domain_admin_account.name, domainid=cls.domain_admin_account.domainid ) cls.debug("Created project with ID: %s" % cls.project.id) cls.debug("Created project2 with ID: %s" % cls.project2.id) # Add user to the project cls.project.addAccount( cls.api_client, cls.account.name ) cls.service_offering = ServiceOffering.create( cls.api_client, cls.services["service_offering"], domainid=cls.account.domainid ) cls._cleanup = [] return def setUp(self): self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.cleanup = [] def tearDown(self): try: # #Clean up, terminate the created instance, volumes and snapshots cleanup_resources(self.api_client, self.cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return @classmethod def tearDownClass(cls): try: cls.domain.delete(cls.api_client, cleanup=True) cleanup_resources(cls.api_client, cls._cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) def create_aff_grp(self, api_client=None, aff_grp=None, aff_grp_name=None, projectid=None): if not api_client: api_client = self.api_client if aff_grp is None: aff_grp = self.services["host_anti_affinity"] if aff_grp_name is None: aff_grp["name"] = "aff_grp_" + random_gen(size=6) else: aff_grp["name"] = aff_grp_name if projectid is None: projectid = self.project.id try: return AffinityGroup.create(api_client, aff_grp, None, None, projectid) except Exception as e: raise Exception("Error: Creation of Affinity Group failed : %s" % e) def create_vm_in_aff_grps(self, api_client=None, ag_list=[], projectid=None): self.debug('Creating VM in AffinityGroups=%s' % ag_list) if api_client is None: api_client = self.api_client if projectid is None: projectid = self.project.id vm = VirtualMachine.create( api_client, self.services["virtual_machine"], projectid=projectid, templateid=self.template.id, serviceofferingid=self.service_offering.id, affinitygroupnames=ag_list ) self.debug('Created VM=%s in Affinity Group=%s' % (vm.id, tuple(ag_list))) list_vm = list_virtual_machines(api_client, id=vm.id, projectid=projectid) self.assertEqual(isinstance(list_vm, list), True,"Check list response returns a valid list") self.assertNotEqual(len(list_vm),0, "Check VM available in List Virtual Machines") vm_response = list_vm[0] self.assertEqual(vm_response.state, 'Running',msg="VM is not in Running state") self.assertEqual(vm_response.projectid, projectid,msg="VM is not in project") return vm, vm_response.hostid @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_01_list_aff_grps_for_vm(self): """ List affinity group for a vm for projects """ aff_grps = [] aff_grps.append(self.create_aff_grp(self.domain_api_client, projectid=self.project.id)) vm, hostid = self.create_vm_in_aff_grps(self.account_api_client,ag_list=[aff_grps[0].name]) list_aff_grps = AffinityGroup.list(self.api_client,virtualmachineid=vm.id) self.assertEqual(list_aff_grps[0].name, aff_grps[0].name,"Listing Affinity Group by VM id failed") self.assertEqual(list_aff_grps[0].projectid, self.project.id,"Listing Affinity Group by VM id failed, vm was not in project") vm.delete(self.api_client) #Wait for expunge interval to cleanup VM wait_for_cleanup(self.apiclient, ["expunge.delay", "expunge.interval"]) self.cleanup.append(aff_grps[0]) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_02_list_multiple_aff_grps_for_vm(self): """ List multiple affinity groups associated with a vm for projects """ aff_grp_01 = self.create_aff_grp(self.account_api_client) aff_grp_02 = self.create_aff_grp(self.account_api_client) aff_grps_names = [aff_grp_01.name, aff_grp_02.name] vm, hostid = self.create_vm_in_aff_grps(ag_list=aff_grps_names) list_aff_grps = AffinityGroup.list(self.api_client, virtualmachineid=vm.id) list_aff_grps_names = [list_aff_grps[0].name, list_aff_grps[1].name] aff_grps_names.sort() list_aff_grps_names.sort() self.assertEqual(aff_grps_names, list_aff_grps_names,"One of the Affinity Groups is missing %s" % list_aff_grps_names) vm.delete(self.api_client) #Wait for expunge interval to cleanup VM wait_for_cleanup(self.apiclient, ["expunge.delay", "expunge.interval"]) self.cleanup.append(aff_grp_01) self.cleanup.append(aff_grp_02) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_03_list_aff_grps_by_id(self): """ List affinity groups by id for projects """ aff_grp = self.create_aff_grp(self.account_api_client) list_aff_grps = AffinityGroup.list(self.account_api_client, id=aff_grp.id, projectid=self.project.id) self.assertEqual(list_aff_grps[0].id, aff_grp.id,"Listing Affinity Group by id failed") with self.assertRaises(Exception): AffinityGroup.list(self.account_not_in_project_api_client, id=aff_grp.id, projectid=self.project.id) self.cleanup.append(aff_grp) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_04_list_aff_grps_by_name(self): """ List Affinity Groups by name for projects """ aff_grp = self.create_aff_grp(self.account_api_client) list_aff_grps = AffinityGroup.list(self.account_api_client, name=aff_grp.name, projectid=self.project.id) self.assertEqual(list_aff_grps[0].name, aff_grp.name,"Listing Affinity Group by name failed") with self.assertRaises(Exception): AffinityGroup.list(self.account_not_in_project_api_client, id=aff_grp.id, projectid=self.project.id) self.cleanup.append(aff_grp) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_05_list_aff_grps_by_non_existing_id(self): """ List Affinity Groups by non-existing id for projects """ aff_grp = self.create_aff_grp(self.account_api_client) list_aff_grps = AffinityGroup.list(self.account_api_client, id=1234, projectid=self.project.id) self.assertEqual(list_aff_grps, None, "Listing Affinity Group by non-existing id succeeded.") self.cleanup.append(aff_grp) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_06_list_aff_grps_by_non_existing_name(self): """ List Affinity Groups by non-existing name for projects """ aff_grp = self.create_aff_grp(self.account_api_client) list_aff_grps = AffinityGroup.list(self.account_api_client, name="inexistantName", projectid=self.project.id) self.assertEqual(list_aff_grps, None, "Listing Affinity Group by non-existing name succeeded.") self.cleanup.append(aff_grp) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_07_list_all_vms_in_aff_grp(self): """ List affinity group should list all for a vms associated with that group for projects """ aff_grp = self.create_aff_grp(self.account_api_client) vm1, hostid1 = self.create_vm_in_aff_grps(ag_list=[aff_grp.name]) vm2, hostid2 = self.create_vm_in_aff_grps(ag_list=[aff_grp.name]) list_aff_grps = AffinityGroup.list(self.api_client, id=aff_grp.id, projectid=self.project.id) self.assertEqual(list_aff_grps[0].name, aff_grp.name, "Listing Affinity Group by id failed") self.assertEqual(list_aff_grps[0].virtualmachineIds[0], vm1.id, "List affinity group response.virtualmachineIds for group: %s doesn't contain vmid : %s" % (aff_grp.name, vm1.id)) self.assertEqual(list_aff_grps[0].virtualmachineIds[1], vm2.id, "List affinity group response.virtualmachineIds for group: %s doesn't contain vmid : %s" % (aff_grp.name, vm2.id)) vm1.delete(self.api_client) vm2.delete(self.api_client) #Wait for expunge interval to cleanup VM wait_for_cleanup(self.apiclient, ["expunge.delay", "expunge.interval"]) self.cleanup.append(aff_grp) class TestDeleteAffinityGroups(cloudstackTestCase): @classmethod def setUpClass(cls): cls.testClient = super(TestDeleteAffinityGroups, cls).getClsTestClient() cls.api_client = cls.testClient.getApiClient() cls.services = Services().services #Get Zone, Domain and templates cls.rootdomain = get_domain(cls.api_client) cls.domain = Domain.create(cls.api_client, cls.services["domain"]) cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests()) cls.template = get_template( cls.api_client, cls.zone.id, cls.services["ostype"] ) cls.services["virtual_machine"]["zoneid"] = cls.zone.id cls.services["template"] = cls.template.id cls.services["zoneid"] = cls.zone.id cls.domain_admin_account = Account.create( cls.api_client, cls.services["domain_admin_account"], domainid=cls.domain.id, admin=True ) cls.domain_api_client = cls.testClient.getUserApiClient(cls.domain_admin_account.name, cls.domain.name, 2) cls.account = Account.create( cls.api_client, cls.services["account"], domainid=cls.domain.id ) cls.account_api_client = cls.testClient.getUserApiClient(cls.account.name, cls.domain.name, 0) cls.account_not_in_project = Account.create( cls.api_client, cls.services["account_not_in_project"], domainid=cls.domain.id ) cls.account_not_in_project_api_client = cls.testClient.getUserApiClient(cls.account_not_in_project.name, cls.domain.name, 0) cls.project = Project.create( cls.api_client, cls.services["project"], account=cls.domain_admin_account.name, domainid=cls.domain_admin_account.domainid ) cls.project2 = Project.create( cls.api_client, cls.services["project2"], account=cls.domain_admin_account.name, domainid=cls.domain_admin_account.domainid ) cls.debug("Created project with ID: %s" % cls.project.id) cls.debug("Created project2 with ID: %s" % cls.project2.id) # Add user to the project cls.project.addAccount( cls.api_client, cls.account.name ) cls.service_offering = ServiceOffering.create( cls.api_client, cls.services["service_offering"], domainid=cls.account.domainid ) cls._cleanup = [] return def setUp(self): self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.cleanup = [] def tearDown(self): try: # #Clean up, terminate the created instance, volumes and snapshots cleanup_resources(self.api_client, self.cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return @classmethod def tearDownClass(cls): try: cls.domain.delete(cls.api_client, cleanup=True) cleanup_resources(cls.api_client, cls._cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) def create_aff_grp(self, api_client=None, aff_grp=None, aff_grp_name=None, projectid=None): if not api_client: api_client = self.api_client if aff_grp is None: aff_grp = self.services["host_anti_affinity"] if aff_grp_name is None: aff_grp["name"] = "aff_grp_" + random_gen(size=6) else: aff_grp["name"] = aff_grp_name if projectid is None: projectid = self.project.id try: return AffinityGroup.create(api_client, aff_grp, None, None, projectid) except Exception as e: raise Exception("Error: Creation of Affinity Group failed : %s" % e) def create_vm_in_aff_grps(self, api_client=None, ag_list=[], projectid=None): self.debug('Creating VM in AffinityGroups=%s' % ag_list) if api_client is None: api_client = self.api_client if projectid is None: projectid = self.project.id vm = VirtualMachine.create( api_client, self.services["virtual_machine"], projectid=projectid, templateid=self.template.id, serviceofferingid=self.service_offering.id, affinitygroupnames=ag_list ) self.debug('Created VM=%s in Affinity Group=%s' % (vm.id, tuple(ag_list))) list_vm = list_virtual_machines(self.api_client, id=vm.id, projectid=projectid) self.assertEqual(isinstance(list_vm, list), True,"Check list response returns an invalid list %s" % list_vm) self.assertNotEqual(len(list_vm),0, "Check VM available in TestDeployVMAffinityGroups") self.assertEqual(list_vm[0].id, vm.id,"Listed vm does not have the same ids") vm_response = list_vm[0] self.assertEqual(vm.state, 'Running',msg="VM is not in Running state") self.assertEqual(vm.projectid, projectid,msg="VM is not in project") self.assertNotEqual(vm_response.hostid, None, "Host id was null for vm %s" % vm_response) return vm, vm_response.hostid def delete_aff_group(self, apiclient, **kwargs): cmd = deleteAffinityGroup.deleteAffinityGroupCmd() [setattr(cmd, k, v) for k, v in kwargs.items()] return apiclient.deleteAffinityGroup(cmd) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_01_delete_aff_grp_by_id(self): """ #Delete Affinity Group by id. """ aff_grp1 = self.create_aff_grp(self.account_api_client) aff_grp2 = self.create_aff_grp(self.account_api_client) aff_grp1.delete(self.account_api_client) with self.assertRaises(Exception): list_aff_grps = AffinityGroup.list(self.api_client, id=aff_grp1.id) self.cleanup.append(aff_grp2) @attr(tags=["simulator", "basic", "advanced"], required_hardware="false") def test_02_delete_aff_grp_by_id_another_user(self): """ #Delete Affinity Group by id should fail for user not in project """ aff_grp1 = self.create_aff_grp(self.account_api_client) aff_grp2 = self.create_aff_grp(self.account_api_client) with self.assertRaises(Exception): aff_grp1.delete(self.account_not_in_project_api_client) self.cleanup.append(aff_grp1) self.cleanup.append(aff_grp2) class TestUpdateVMAffinityGroups(cloudstackTestCase): @classmethod def setUpClass(cls): cls.testClient = super(TestUpdateVMAffinityGroups, cls).getClsTestClient() cls.api_client = cls.testClient.getApiClient() cls.services = Services().services #Get Zone, Domain and templates cls.rootdomain = get_domain(cls.api_client) cls.domain = Domain.create(cls.api_client, cls.services["domain"]) cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests()) cls.template = get_template( cls.api_client, cls.zone.id, cls.services["ostype"] ) cls.services["virtual_machine"]["zoneid"] = cls.zone.id cls.services["template"] = cls.template.id cls.services["zoneid"] = cls.zone.id cls.domain_admin_account = Account.create( cls.api_client, cls.services["domain_admin_account"], domainid=cls.domain.id, admin=True ) cls.domain_api_client = cls.testClient.getUserApiClient(cls.domain_admin_account.name, cls.domain.name, 2) cls.account = Account.create( cls.api_client, cls.services["account"], domainid=cls.domain.id ) cls.account_api_client = cls.testClient.getUserApiClient(cls.account.name, cls.domain.name, 0) cls.account_not_in_project = Account.create( cls.api_client, cls.services["account_not_in_project"], domainid=cls.domain.id ) cls.account_not_in_project_api_client = cls.testClient.getUserApiClient(cls.account_not_in_project.name, cls.domain.name, 0) cls.project = Project.create( cls.api_client, cls.services["project"], account=cls.domain_admin_account.name, domainid=cls.domain_admin_account.domainid ) cls.project2 = Project.create( cls.api_client, cls.services["project2"], account=cls.domain_admin_account.name, domainid=cls.domain_admin_account.domainid ) cls.debug("Created project with ID: %s" % cls.project.id) cls.debug("Created project2 with ID: %s" % cls.project2.id) # Add user to the project cls.project.addAccount( cls.api_client, cls.account.name ) cls.service_offering = ServiceOffering.create( cls.api_client, cls.services["service_offering"], domainid=cls.account.domainid ) cls._cleanup = [] return def setUp(self): self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.cleanup = [] def tearDown(self): try: # #Clean up, terminate the created instance, volumes and snapshots cleanup_resources(self.api_client, self.cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return @classmethod def tearDownClass(cls): try: cls.domain.delete(cls.api_client, cleanup=True) cleanup_resources(cls.api_client, cls._cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) def create_aff_grp(self, api_client=None, aff_grp=None, aff_grp_name=None, projectid=None): if not api_client: api_client = self.api_client if aff_grp is None: aff_grp = self.services["host_anti_affinity"] if aff_grp_name is None: aff_grp["name"] = "aff_grp_" + random_gen(size=6) else: aff_grp["name"] = aff_grp_name if projectid is None: projectid = self.project.id try: return AffinityGroup.create(api_client, aff_grp, None, None, projectid) except Exception as e: raise Exception("Error: Creation of Affinity Group failed : %s" % e) def create_vm_in_aff_grps(self, api_client=None, ag_list=[], projectid=None): self.debug('Creating VM in AffinityGroups=%s' % ag_list) if api_client is None: api_client = self.api_client if projectid is None: projectid = self.project.id vm = VirtualMachine.create( api_client, self.services["virtual_machine"], projectid=projectid, templateid=self.template.id, serviceofferingid=self.service_offering.id, affinitygroupnames=ag_list ) self.debug('Created VM=%s in Affinity Group=%s' % (vm.id, tuple(ag_list))) list_vm = list_virtual_machines(self.api_client, id=vm.id, projectid=projectid) self.assertEqual(isinstance(list_vm, list), True,"Check list response returns an invalid list %s" % list_vm) self.assertNotEqual(len(list_vm),0, "Check VM available in TestDeployVMAffinityGroups") self.assertEqual(list_vm[0].id, vm.id,"Listed vm does not have the same ids") vm_response = list_vm[0] self.assertEqual(vm.state, 'Running',msg="VM is not in Running state") self.assertEqual(vm.projectid, projectid,msg="VM is not in project") self.assertNotEqual(vm_response.hostid, None, "Host id was null for vm %s" % vm_response) return vm, vm_response.hostid @attr(tags=["simulator", "basic", "advanced", "multihost"], required_hardware="false") def test_01_update_aff_grp_by_ids(self): """ Update the list of affinityGroups by using affinity groupids """ aff_grp1 = self.create_aff_grp(self.account_api_client) aff_grp2 = self.create_aff_grp(self.account_api_client) vm1, hostid1 = self.create_vm_in_aff_grps(ag_list=[aff_grp1.name]) vm2, hostid2 = self.create_vm_in_aff_grps(ag_list=[aff_grp1.name]) vm1.stop(self.api_client) list_aff_grps = AffinityGroup.list(self.api_client, projectid=self.project.id) self.assertEqual(len(list_aff_grps), 2 , "2 affinity groups should be present") vm1.update_affinity_group(self.api_client,affinitygroupids=[list_aff_grps[0].id,list_aff_grps[1].id]) list_aff_grps = AffinityGroup.list(self.api_client,virtualmachineid=vm1.id) list_aff_grps_names = [list_aff_grps[0].name, list_aff_grps[1].name] aff_grps_names = [aff_grp1.name, aff_grp2.name] aff_grps_names.sort() list_aff_grps_names.sort() self.assertEqual(aff_grps_names, list_aff_grps_names,"One of the Affinity Groups is missing %s" % list_aff_grps_names) vm1.start(self.api_client) vm_status = VirtualMachine.list(self.api_client, id=vm1.id) self.assertNotEqual(vm_status[0].hostid, hostid2, "The virtual machine started on host %s violating the host anti-affinity rule" %vm_status[0].hostid) vm1.delete(self.api_client) vm2.delete(self.api_client) #Wait for expunge interval to cleanup VM wait_for_cleanup(self.apiclient, ["expunge.delay", "expunge.interval"]) aff_grp1.delete(self.api_client) aff_grp2.delete(self.api_client) class TestDeployVMAffinityGroups(cloudstackTestCase): @classmethod def setUpClass(cls): cls.testClient = super(TestDeployVMAffinityGroups, cls).getClsTestClient() cls.api_client = cls.testClient.getApiClient() cls.services = Services().services #Get Zone, Domain and templates cls.rootdomain = get_domain(cls.api_client) cls.domain = Domain.create(cls.api_client, cls.services["domain"]) cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests()) cls.template = get_template( cls.api_client, cls.zone.id, cls.services["ostype"] ) cls.services["virtual_machine"]["zoneid"] = cls.zone.id cls.services["template"] = cls.template.id cls.services["zoneid"] = cls.zone.id cls.domain_admin_account = Account.create( cls.api_client, cls.services["domain_admin_account"], domainid=cls.domain.id, admin=True ) cls.domain_api_client = cls.testClient.getUserApiClient(cls.domain_admin_account.name, cls.domain.name, 2) cls.account = Account.create( cls.api_client, cls.services["account"], domainid=cls.domain.id ) cls.account_api_client = cls.testClient.getUserApiClient(cls.account.name, cls.domain.name, 0) cls.account_not_in_project = Account.create( cls.api_client, cls.services["account_not_in_project"], domainid=cls.domain.id ) cls.account_not_in_project_api_client = cls.testClient.getUserApiClient(cls.account_not_in_project.name, cls.domain.name, 0) cls.project = Project.create( cls.api_client, cls.services["project"], account=cls.domain_admin_account.name, domainid=cls.domain_admin_account.domainid ) cls.project2 = Project.create( cls.api_client, cls.services["project2"], account=cls.domain_admin_account.name, domainid=cls.domain_admin_account.domainid ) cls.debug("Created project with ID: %s" % cls.project.id) cls.debug("Created project2 with ID: %s" % cls.project2.id) # Add user to the project cls.project.addAccount( cls.api_client, cls.account.name ) cls.service_offering = ServiceOffering.create( cls.api_client, cls.services["service_offering"], domainid=cls.account.domainid ) cls._cleanup = [] return def setUp(self): self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.cleanup = [] def tearDown(self): try: # #Clean up, terminate the created instance, volumes and snapshots cleanup_resources(self.api_client, self.cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return @classmethod def tearDownClass(cls): try: cls.domain.delete(cls.api_client, cleanup=True) cleanup_resources(cls.api_client, cls._cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) def create_aff_grp(self, api_client=None, aff_grp=None, aff_grp_name=None, projectid=None): if not api_client: api_client = self.api_client if aff_grp is None: aff_grp = self.services["host_anti_affinity"] if aff_grp_name is None: aff_grp["name"] = "aff_grp_" + random_gen(size=6) else: aff_grp["name"] = aff_grp_name if projectid is None: projectid = self.project.id try: return AffinityGroup.create(api_client, aff_grp, None, None, projectid) except Exception as e: raise Exception("Error: Creation of Affinity Group failed : %s" % e) def create_vm_in_aff_grps(self, api_client=None, ag_list=[], projectid=None): self.debug('Creating VM in AffinityGroups=%s' % ag_list) if api_client is None: api_client = self.api_client if projectid is None: projectid = self.project.id vm = VirtualMachine.create( api_client, self.services["virtual_machine"], projectid=projectid, templateid=self.template.id, serviceofferingid=self.service_offering.id, affinitygroupnames=ag_list ) self.debug('Created VM=%s in Affinity Group=%s' % (vm.id, tuple(ag_list))) list_vm = list_virtual_machines(self.api_client, id=vm.id, projectid=projectid) self.assertEqual(isinstance(list_vm, list), True,"Check list response returns an invalid list %s" % list_vm) self.assertNotEqual(len(list_vm),0, "Check VM available in TestDeployVMAffinityGroups") self.assertEqual(list_vm[0].id, vm.id,"Listed vm does not have the same ids") vm_response = list_vm[0] self.assertEqual(vm.state, 'Running',msg="VM is not in Running state") self.assertEqual(vm.projectid, projectid,msg="VM is not in project") self.assertNotEqual(vm_response.hostid, None, "Host id was null for vm %s" % vm_response) return vm, vm_response.hostid @attr(tags=["simulator", "basic", "advanced", "multihost"], required_hardware="false") def test_01_deploy_vm_anti_affinity_group(self): """ test DeployVM in anti-affinity groups deploy VM1 and VM2 in the same host-anti-affinity groups Verify that the vms are deployed on separate hosts """ aff_grp = self.create_aff_grp(self.account_api_client) vm1, hostid1 = self.create_vm_in_aff_grps(self.account_api_client,ag_list=[aff_grp.name]) vm2, hostid2 = self.create_vm_in_aff_grps(self.account_api_client, ag_list=[aff_grp.name]) self.assertNotEqual(hostid1, hostid2, msg="Both VMs of affinity group %s are on the same host: %s , %s, %s, %s" % (aff_grp.name, vm1, hostid1, vm2, hostid2)) vm1.delete(self.api_client) vm2.delete(self.api_client) wait_for_cleanup(self.api_client, ["expunge.delay", "expunge.interval"]) self.cleanup.append(aff_grp) @attr(tags=["simulator", "basic", "advanced", "multihost"], required_hardware="false") def test_02_deploy_vm_anti_affinity_group_fail_on_not_enough_hosts(self): """ test DeployVM in anti-affinity groups with more vms than hosts. """ hosts = list_hosts(self.api_client, type="routing") aff_grp = self.create_aff_grp(self.account_api_client) vms = [] for host in hosts: vms.append(self.create_vm_in_aff_grps(self.account_api_client,ag_list=[aff_grp.name])) vm_failed = None with self.assertRaises(Exception): vm_failed = self.create_vm_in_aff_grps(self.account_api_client,ag_list=[aff_grp.name]) self.assertEqual(len(hosts), len(vms), "Received %s and %s " % (hosts, vms)) if vm_failed: vm_failed.expunge(self.api_client) self.cleanup.append(aff_grp)<|fim▁end|>
def __init__(self):
<|file_name|>test_csiactobs.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # ========================================================================== # This scripts performs unit tests for the csiactobs script # # Copyright (C) 2016-2018 Juergen Knoedlseder # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # ========================================================================== import gammalib import cscripts from testing import test # =============================== # # Test class for csiactobs script # # =============================== # class Test(test): """ Test class for csiactobs script This test class makes unit tests for the csiactobs script by using it from the command line and from Python. """ # Constructor def __init__(self): """ Constructor """ # Call base class constructor test.__init__(self) # Set data members self._datapath = self._datadir + '/iactdata' self._runlist = self._datadir + '/iact_runlist.dat' # Return return # Set test functions def set(self): """ Set all test functions """ # Set test name self.name('csiactobs') # Append tests self.append(self._test_cmd, 'Test csiactobs on command line') self.append(self._test_python, 'Test csiactobs from Python') # Return return # Test csiactobs on command line def _test_cmd(self): """ Test csiactobs on the command line """ # Set script name csiactobs = self._script('csiactobs') # Setup csiactobs command cmd = csiactobs+' datapath="'+self._datapath+'"'+ \ ' prodname="unit-test"'+ \ ' infile="'+self._runlist+'"'+ \ ' bkgpars=1'+\ ' outobs="csiactobs_obs_cmd1.xml"'+ \ ' outmodel="csiactobs_bgd_cmd1.xml"'+ \ ' logfile="csiactobs_cmd1.log" chatter=1' # Check if execution was successful self.test_assert(self._execute(cmd) == 0, 'Check successful execution from command line') # Check observation definition XML file self._check_obsdef('csiactobs_obs_cmd1.xml', 6) # Check model definition XML file self._check_moddef('csiactobs_bgd_cmd1.xml', 6) # Setup csiactobs command cmd = csiactobs+' datapath="data_path_that_does_not_exist"'+ \ ' prodname="unit-test"'+ \ ' infile="'+self._runlist+'"'+ \ ' bkgpars=1'+\ ' outobs="csiactobs_obs_cmd2.xml"'+ \ ' outmodel="csiactobs_bgd_cmd2.xml"'+ \ ' logfile="csiactobs_cmd2.log" debug=yes debug=yes'+ \ ' chatter=1' # Check if execution failed self.test_assert(self._execute(cmd, success=False) != 0, 'Check invalid input datapath when executed from command line') # Setup csiactobs command cmd = csiactobs+' datapath="'+self._datapath+'"'+ \ ' prodname="unit-test-doesnt-exist"'+ \ ' infile="'+self._runlist+'"'+ \ ' bkgpars=1'+\ ' outobs="csiactobs_obs_cmd3.xml"'+ \ ' outmodel="csiactobs_bgd_cmd3.xml"'+ \ ' logfile="csiactobs_cmd3.log" debug=yes debug=yes'+ \ ' chatter=1' # Check if execution failed self.test_assert(self._execute(cmd, success=False) != 0, 'Check invalid input prodname when executed from command line') # Check csiactobs --help self._check_help(csiactobs) # Return return # Test csiactobs from Python def _test_python(self): """ Test csiactobs from Python """ # Allocate empty csiactobs script iactobs = cscripts.csiactobs() # Check that empty csiactobs sciript has an empty observation container # and energy boundaries self.test_value(iactobs.obs().size(), 0, 'Check that empty csiactobs has an empty observation container') self.test_value(iactobs.ebounds().size(), 0, 'Check that empty csiactobs has empty energy bins') # Check that saving saves an empty model definition file iactobs['outobs'] = 'csiactobs_obs_py0.xml' iactobs['outmodel'] = 'csiactobs_bgd_py0.xml' iactobs['logfile'] = 'csiactobs_py0.log' iactobs.logFileOpen() iactobs.save() # Check empty observation definition XML file self._check_obsdef('csiactobs_obs_py0.xml', 0) # Check empty model definition XML file self._check_moddef('csiactobs_bgd_py0.xml', 0) # Check that clearing does not lead to an exception or segfault #iactobs.clear() # Set-up csiactobs iactobs = cscripts.csiactobs() iactobs['datapath'] = self._datapath iactobs['prodname'] = 'unit-test' iactobs['infile'] = self._runlist iactobs['bkgpars'] = 1 iactobs['outobs'] = 'csiactobs_obs_py1.xml' iactobs['outmodel'] = 'csiactobs_bgd_py1.xml' iactobs['logfile'] = 'csiactobs_py1.log' iactobs['chatter'] = 2 # Run csiactobs script and save run list iactobs.logFileOpen() # Make sure we get a log file iactobs.run() iactobs.save() # Check observation definition XML file self._check_obsdef('csiactobs_obs_py1.xml', 6) # Check model definition XML file self._check_moddef('csiactobs_bgd_py1.xml', 6) # Create test runlist<|fim▁hole|> # Set-up csiactobs using a runlist with 2 background parameters iactobs = cscripts.csiactobs() iactobs['datapath'] = self._datapath iactobs['prodname'] = 'unit-test' iactobs['bkgpars'] = 2 iactobs['outobs'] = 'csiactobs_obs_py2.xml' iactobs['outmodel'] = 'csiactobs_bgd_py2.xml' iactobs['logfile'] = 'csiactobs_py2.log' iactobs['chatter'] = 3 iactobs.runlist(runlist) # Run csiactobs script and save run list iactobs.logFileOpen() # Make sure we get a log file iactobs.run() iactobs.save() # Test return functions self.test_value(iactobs.obs().size(), 2, 'Check number of observations in container') self.test_value(iactobs.ebounds().size(), 0, 'Check number of energy boundaries') # Check observation definition XML file self._check_obsdef('csiactobs_obs_py2.xml',2) # Check model definition XML file self._check_moddef('csiactobs_bgd_py2.xml',2) # Set-up csiactobs with a large number of free parameters and "aeff" # background iactobs = cscripts.csiactobs() iactobs['datapath'] = self._datapath iactobs['prodname'] = 'unit-test' iactobs['infile'] = self._runlist iactobs['bkgpars'] = 8 iactobs['bkg_mod_hiera'] = 'aeff' iactobs['outobs'] = 'csiactobs_obs_py3.xml' iactobs['outmodel'] = 'csiactobs_bgd_py3.xml' iactobs['logfile'] = 'csiactobs_py3.log' iactobs['chatter'] = 4 # Execute csiactobs script iactobs.execute() # Check observation definition XML file self._check_obsdef('csiactobs_obs_py3.xml',6) # Check model definition XML file self._check_moddef('csiactobs_bgd_py3.xml',6) # Set-up csiactobs with a "gauss" background and "inmodel" parameter iactobs = cscripts.csiactobs() iactobs['datapath'] = self._datapath iactobs['inmodel'] = self._model iactobs['prodname'] = 'unit-test' iactobs['infile'] = self._runlist iactobs['bkgpars'] = 1 iactobs['bkg_mod_hiera'] = 'gauss' iactobs['outobs'] = 'NONE' iactobs['outmodel'] = 'NONE' iactobs['logfile'] = 'csiactobs_py4.log' iactobs['chatter'] = 4 # Run csiactobs script iactobs.logFileOpen() # Make sure we get a log file iactobs.run() # Check number of observations self.test_value(iactobs.obs().size(), 6, 'Check number of observations in container') # Check number of models self.test_value(iactobs.obs().models().size(), 8, 'Check number of models in container') # Set-up csiactobs with a "gauss" background and "inmodel" parameter iactobs = cscripts.csiactobs() iactobs['datapath'] = self._datapath iactobs['inmodel'] = self._model iactobs['prodname'] = 'unit-test' iactobs['infile'] = self._runlist iactobs['bkgpars'] = 1 iactobs['bkg_mod_hiera'] = 'irf' iactobs['outobs'] = 'NONE' iactobs['outmodel'] = 'NONE' iactobs['logfile'] = 'csiactobs_py4.log' iactobs['chatter'] = 4 # Run csiactobs script iactobs.logFileOpen() # Make sure we get a log file iactobs.run() # Check number of observations self.test_value(iactobs.obs().size(), 5, 'Check number of observations in container') # Check number of models self.test_value(iactobs.obs().models().size(), 7, 'Check number of models in container') # Return return # Check observation definition XML file def _check_obsdef(self, filename, obs_expected): """ Check observation definition XML file """ # Load observation definition XML file obs = gammalib.GObservations(filename) # Check number of observations self.test_value(obs.size(), obs_expected, 'Check for '+str(obs_expected)+' observations in XML file') # If there are observations in the XML file then check their content if obs_expected > 0: # Get response rsp = obs[0].response() # Test response self.test_value(obs[0].eventfile().file(), 'events_0.fits.gz', 'Check event file name') self.test_value(obs[0].eventfile().extname(), 'EVENTS', 'Check event extension name') self.test_value(rsp.aeff().filename().file(), 'irf_file.fits.gz', 'Check effective area file name') self.test_value(rsp.aeff().filename().extname(), 'EFFECTIVE AREA', 'Check effective area extension name') self.test_value(rsp.psf().filename().file(), 'irf_file.fits.gz', 'Check point spread function file name') self.test_value(rsp.psf().filename().extname(), 'POINT SPREAD FUNCTION', 'Check point spread function extension name') self.test_value(rsp.edisp().filename().file(), 'irf_file.fits.gz', 'Check energy dispersion file name') self.test_value(rsp.edisp().filename().extname(), 'ENERGY DISPERSION', 'Check energy dispersion extension name') self.test_value(rsp.background().filename().file(), 'irf_file.fits.gz', 'Check background file name') self.test_value(rsp.background().filename().extname(), 'BACKGROUND', 'Check background extension name') # Return return # Check model XML file def _check_moddef(self, filename, models_expected): """ Check model definition XML file """ # Load model definition XML file models = gammalib.GModels(filename) # Check number of models self.test_value(models.size(), models_expected, 'Check for '+str(models_expected)+' models in XML file') # Return return<|fim▁end|>
runlist = ['15000','15001']
<|file_name|>5396-fnUpdate-arrays-mData.js<|end_file_name|><|fim▁begin|>// DATA_TEMPLATE: empty_table oTest.fnStart("5396 - fnUpdate with 2D arrays for a single row"); $(document).ready(function () { $('#example thead tr').append('<th>6</th>'); $('#example thead tr').append('<th>7</th>'); $('#example thead tr').append('<th>8</th>'); $('#example thead tr').append('<th>9</th>'); $('#example thead tr').append('<th>10</th>'); var aDataSet = [ [ "1", "홍길동", "1154315", "etc1", [ [ "[email protected]", "2011-03-04" ], [ "[email protected]", "2009-07-06" ], [ "[email protected]", ",hide" ], [ "[email protected]", "" ] ], "2011-03-04", "show" ], [ "2", "홍길순", "2154315", "etc2", [ [ "[email protected]", "2009-09-26" ], [ "[email protected]", "2009-05-21,hide" ], [ "[email protected]", "2010-03-05" ], [ "[email protected]", ",hide" ], [ "[email protected]", "2010-03-05" ] ], "2010-03-05", "show" ] ] var oTable = $('#example').dataTable({ "aaData": aDataSet, "aoColumns": [ { "mData": "0"}, { "mData": "1"}, { "mData": "2"}, { "mData": "3"}, { "mData": "4.0.0"},<|fim▁hole|> { "mData": "5"}, { "mData": "6"} ] }); oTest.fnTest( "Initialisation", null, function () { return $('#example tbody tr:eq(0) td:eq(0)').html() == '1'; } ); oTest.fnTest( "Update row", function () { $('#example').dataTable().fnUpdate([ "0", "홍길순", "2154315", "etc2", [ [ "[email protected]", "2009-09-26" ], [ "[email protected]", "2009-05-21,hide" ], [ "[email protected]", "2010-03-05" ], [ "[email protected]", ",hide" ], [ "[email protected]", "2010-03-05" ] ], "2010-03-05", "show" ], 1); }, function () { return $('#example tbody tr:eq(0) td:eq(0)').html() == '0'; } ); oTest.fnTest( "Original row preserved", null, function () { return $('#example tbody tr:eq(1) td:eq(0)').html() == '1'; } ); oTest.fnComplete(); });<|fim▁end|>
{ "mData": "4.0.1"}, { "mData": "4.1.0"}, { "mData": "4.1.1"},
<|file_name|>combustible.py<|end_file_name|><|fim▁begin|># -*-coding:Utf-8 -* # Copyright (c) 2010-2017 NOEL-BARON Léo # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Fichier contenant le type combustible.""" from primaires.interpreteur.editeur.entier import Entier from primaires.interpreteur.editeur.selection import Selection<|fim▁hole|> """Type d'objet: combustible. """ nom_type = "combustible" def __init__(self, cle=""): """Constructeur de l'objet""" BaseType.__init__(self, cle) self.terrains = [] self.rarete = 1 self.qualite = 2 # Editeurs self.etendre_editeur("t", "terrains", Selection, self, "terrains", list(importeur.salle.terrains.keys())) self.etendre_editeur("r", "rareté", Entier, self, "rarete", 1, 10) self.etendre_editeur("a", "qualité", Entier, self, "qualite", 1, 10) @property def aff_terrains(self): return ", ".join(self.terrains) if self.terrains else "aucun" def travailler_enveloppes(self, enveloppes): """Travail sur les enveloppes""" l_terrains = sorted(type(self).importeur.salle.terrains.keys()) terrains = enveloppes["t"] terrains.apercu = "{objet.aff_terrains}" terrains.prompt = "Entrez un terrain : " terrains.aide_courte = \ "Entrez les |ent|terrains|ff| où l'on peut trouver ce " \ "combustible.\n\nTerrains disponibles : {}.\n\n" \ "Terrains actuels : {{objet.aff_terrains}}".format( ", ".join(l_terrains)) rarete = enveloppes["r"] rarete.apercu = "{objet.rarete}" rarete.prompt = "Rareté du combustible : " rarete.aide_courte = \ "Entrez la |ent|rareté|ff| du combustible, entre |cmd|1|ff| " \ "(courant) et |cmd|10|ff| (rare).\n\n" \ "Rareté actuelle : {objet.rarete}" qualite = enveloppes["a"] qualite.apercu = "{objet.qualite}" qualite.prompt = "Qualité du combustible : " qualite.aide_courte = \ "Entrez la |ent|qualité|ff| du combustible, entre |cmd|1|ff| " \ "(mauvais) et |cmd|10|ff| (très bon).\n\n" \ "Qualité actuelle : {objet.qualite}"<|fim▁end|>
from primaires.objet.types.base import BaseType class Combustible(BaseType):
<|file_name|>issue-21475.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass #![allow(unused_imports)] // pretty-expanded FIXME #23616 use m::{START, END}; fn main() { match 42 { m::START..=m::END => {}, 0..=m::END => {}, m::START..=59 => {}, _ => {}, } } mod m { pub const START: u32 = 4; pub const END: u32 = 14; }<|fim▁end|>
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT.
<|file_name|>test_config_basic.py<|end_file_name|><|fim▁begin|>import h2o, h2o_config l = h2o_config.setup_test_config(test_config_json='test_config.json') print "\nsetup_test_config returns list of test config objs:", l<|fim▁hole|>print "\nHow to reference.." for i, obj in enumerate(h2o_config.configs): print "keys in config", i, ":", obj.__dict__.keys() print h2o_config.configs[0].trees for t in h2o_config.configs: print "\nTest config_name:", t.config_name print "trees:", t.trees print "params:", t.params print "params['timeoutSecs']:", t.params['timeoutSecs']<|fim▁end|>
# Here are some ways to reference the config state that the json created
<|file_name|>optimizer.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Transforms a display list to produce a visually-equivalent, but cheaper-to-render, one. use display_list::{DisplayItem, DisplayList, StackingContext}; use collections::dlist::DList; use geom::rect::Rect; use servo_util::geometry::{mod, Au}; use sync::Arc; /// Transforms a display list to produce a visually-equivalent, but cheaper-to-render, one. pub struct DisplayListOptimizer { /// The visible rect in page coordinates. visible_rect: Rect<Au>, } impl DisplayListOptimizer { /// Creates a new display list optimizer object. `visible_rect` specifies the visible rect in /// page coordinates. pub fn new(visible_rect: &Rect<f32>) -> DisplayListOptimizer { DisplayListOptimizer { visible_rect: geometry::f32_rect_to_au_rect(*visible_rect), } } /// Optimizes the given display list, returning an equivalent, but cheaper-to-paint, one. pub fn optimize(self, display_list: &DisplayList) -> DisplayList { let mut result = DisplayList::new(); self.add_in_bounds_display_items(&mut result.background_and_borders, display_list.background_and_borders.iter()); self.add_in_bounds_display_items(&mut result.block_backgrounds_and_borders, display_list.block_backgrounds_and_borders.iter()); self.add_in_bounds_display_items(&mut result.floats, display_list.floats.iter()); self.add_in_bounds_display_items(&mut result.content, display_list.content.iter()); self.add_in_bounds_stacking_contexts(&mut result.children, display_list.children.iter()); result } /// Adds display items that intersect the visible rect to `result_list`. fn add_in_bounds_display_items<'a,I>(&self, result_list: &mut DList<DisplayItem>, mut display_items: I) where I: Iterator<&'a DisplayItem> { for display_item in display_items { if self.visible_rect.intersects(&display_item.base().bounds) && self.visible_rect.intersects(&display_item.base().clip_rect) { result_list.push_back((*display_item).clone()) } }<|fim▁hole|> } /// Adds child stacking contexts whose boundaries intersect the visible rect to `result_list`. fn add_in_bounds_stacking_contexts<'a,I>(&self, result_list: &mut DList<Arc<StackingContext>>, mut stacking_contexts: I) where I: Iterator<&'a Arc<StackingContext>> { for stacking_context in stacking_contexts { if self.visible_rect.intersects(&stacking_context.bounds) && self.visible_rect.intersects(&stacking_context.clip_rect) { result_list.push_back((*stacking_context).clone()) } } } }<|fim▁end|>
<|file_name|>proc_txt.py<|end_file_name|><|fim▁begin|>import numpy as np import sys import os from utils import * from utils_procs import * # extract letters and spaces, and transform to lower case # how: python proc_txt.py input_file_name # read input args # _, input_fname = 'temp', 'poetry_2' _, input_fname = sys.argv # constant input_path = '../story/' train_test_ratio = .9 def get_word_level_rep(text, output_path, train_test_ratio): # convert to word-level representation indices, _, words_dict = text_2_one_hot(text) index_string = list_of_int_to_int_string(indices)<|fim▁hole|> # save .npz word file and its dictionary save_list_of_int_to_npz(indices, words_dict, output_path, train_test_ratio) save_dict(words_dict, output_path) # write to output file - char level write2file(text, 'chars_we.txt', output_path) [text] = remove_end_markers([text]) write2file(text, 'chars_woe.txt', output_path) # read input text file input_path = os.path.join(input_path,input_fname) print('Input text from <%s>' % os.path.abspath(input_path)) input_file_path = os.path.join(input_path, input_fname + '.txt') input_file = open(input_file_path, 'r') text = input_file.read() # get output dir name and makr output dirs output_path = input_path make_output_cond_dirs(output_path) # remove pun markers... text = str2cleanstr(text) # create shuffling text_shufw = shuffle_words_in_state(text) text_shufs = shuffle_states_in_story(text) # conver to lower case [text, text_shufw, text_shufs] = to_lower_case([text, text_shufw, text_shufs]) # save word level representation get_word_level_rep(text, os.path.join(output_path, 'shuffle_none'), train_test_ratio) get_word_level_rep(text_shufw, os.path.join(output_path, 'shuffle_words'), train_test_ratio) get_word_level_rep(text_shufs, os.path.join(output_path, 'shuffle_states'), train_test_ratio)<|fim▁end|>
<|file_name|>cache_update.go<|end_file_name|><|fim▁begin|>package dao import ( "context" "fmt" "strconv" "go-common/app/service/main/relation/model" gmc "go-common/library/cache/memcache" "go-common/library/cache/redis" "go-common/library/log" "go-common/library/time" ) const ( _prefixFollowings = "at_" _prefixTags = "tags_" // user tag info. ) func tagsKey(mid int64) string { return _prefixTags + strconv.FormatInt(mid, 10) } func followingsKey(mid int64) string { return _prefixFollowings + strconv.FormatInt(mid, 10) } // ==== redis === // AddFollowingCache add following cache. func (d *Dao) AddFollowingCache(c context.Context, mid int64, following *model.Following) (err error) { var ( ok bool key = followingsKey(mid) ) conn := d.relRedis.Get(c) if ok, err = redis.Bool(conn.Do("EXPIRE", key, d.relExpire)); err != nil { log.Error("redis.Bool(conn.Do(EXPIRE, %s)) error(%v)", key, err) } else if ok { var ef []byte if ef, err = d.encode(following.Attribute, following.MTime, following.Tag, following.Special); err != nil { return } if _, err = conn.Do("HSET", key, following.Mid, ef); err != nil { log.Error("conn.Do(HSET, %s, %d) error(%v)", key, following.Mid, err) } } conn.Close() return } // DelFollowing del following cache. func (d *Dao) DelFollowing(c context.Context, mid int64, following *model.Following) (err error) { var ( ok bool key = followingsKey(mid) ) conn := d.relRedis.Get(c) if ok, err = redis.Bool(conn.Do("EXPIRE", key, d.relExpire)); err != nil { log.Error("redis.Bool(conn.Do(EXPIRE, %s)) error(%v)", key, err) } else if ok { if _, err = conn.Do("HDEL", key, following.Mid); err != nil { log.Error("conn.Do(HDEL, %s, %d) error(%v)", key, following.Mid, err) } } conn.Close() return } // encode func (d *Dao) encode(attribute uint32, mtime time.Time, tagids []int64, special int32) (res []byte, err error) { ft := &model.FollowingTags{Attr: attribute, Ts: mtime, TagIds: tagids, Special: special} return ft.Marshal() } // ===== memcache ===== const ( _prefixFollowing = "pb_a_" _prefixTagCount = "rs_tmtc_%d" // key of relation tag by mid & tag's count ) func followingKey(mid int64) string { return _prefixFollowing + strconv.FormatInt(mid, 10) } func tagCountKey(mid int64) string { return fmt.Sprintf(_prefixTagCount, mid) } // DelFollowingCache delete following cache. func (d *Dao) DelFollowingCache(c context.Context, mid int64) (err error) { return d.delFollowingCache(c, followingKey(mid)) } // delFollowingCache delete following cache. func (d *Dao) delFollowingCache(c context.Context, key string) (err error) {<|fim▁hole|> } else { log.Error("conn.Delete(%s) error(%v)", key, err) } } conn.Close() return } // DelTagCountCache del tag count cache. func (d *Dao) DelTagCountCache(c context.Context, mid int64) (err error) { conn := d.mc.Get(c) if err = conn.Delete(tagCountKey(mid)); err != nil { if err == gmc.ErrNotFound { err = nil } else { log.Error("conn.Delete(%s) error(%v)", tagCountKey(mid), err) } } conn.Close() return } // DelTagsCache is func (d *Dao) DelTagsCache(c context.Context, mid int64) (err error) { conn := d.mc.Get(c) if err = conn.Delete(tagsKey(mid)); err != nil { if err == gmc.ErrNotFound { err = nil } else { log.Error("conn.Delete(%s) error(%v)", tagCountKey(mid), err) } } conn.Close() return }<|fim▁end|>
conn := d.mc.Get(c) if err = conn.Delete(key); err != nil { if err == gmc.ErrNotFound { err = nil
<|file_name|>Style.ts<|end_file_name|><|fim▁begin|>import { Type } from '@ephox/katamari'; // some elements, such as mathml, don't have style attributes // others, such as angular elements, have style attributes that aren't a CSSStyleDeclaration const isSupported = (dom: Node): dom is HTMLStyleElement =><|fim▁hole|> (dom as HTMLStyleElement).style !== undefined && Type.isFunction((dom as HTMLStyleElement).style.getPropertyValue); export { isSupported };<|fim▁end|>
// eslint-disable-next-line @typescript-eslint/unbound-method
<|file_name|>librarylogger.py<|end_file_name|><|fim▁begin|># Copyright 2008-2015 Nokia Solutions and Networks #<|fim▁hole|># # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Implementation of the public test library logging API. This is exposed via :py:mod:`robot.api.logger`. Implementation must reside here to avoid cyclic imports. """ import sys import threading from robot.errors import DataError from robot.utils import unic, encode_output from .logger import LOGGER from .loggerhelper import Message LOGGING_THREADS = ('MainThread', 'RobotFrameworkTimeoutThread') def write(msg, level, html=False): # Callable messages allow lazy logging internally, but we don't want to # expose this functionality publicly. See the following issue for details: # http://code.google.com/p/robotframework/issues/detail?id=1505 if callable(msg): msg = unic(msg) if level.upper() not in ('TRACE', 'DEBUG', 'INFO', 'HTML', 'WARN', 'ERROR'): raise DataError("Invalid log level '%s'." % level) if threading.currentThread().getName() in LOGGING_THREADS: LOGGER.log_message(Message(msg, level, html)) def trace(msg, html=False): write(msg, 'TRACE', html) def debug(msg, html=False): write(msg, 'DEBUG', html) def info(msg, html=False, also_console=False): write(msg, 'INFO', html) if also_console: console(msg) def warn(msg, html=False): write(msg, 'WARN', html) def error(msg, html=False): write(msg, 'ERROR', html) def console(msg, newline=True, stream='stdout'): msg = unic(msg) if newline: msg += '\n' stream = sys.__stdout__ if stream.lower() != 'stderr' else sys.__stderr__ stream.write(encode_output(msg)) stream.flush()<|fim▁end|>
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at
<|file_name|>TroubleshootingGuide.tsx<|end_file_name|><|fim▁begin|>/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @format */ <|fim▁hole|>import React from 'react'; import {NoDevices} from '../NoDevices'; export function TroubleshootingGuide(_props: { showGuide: boolean; devicesDetected: number; }) { if (_props.devicesDetected == 0) return <NoDevices />; else { return <></>; } }<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/** * Core dependencies. */ var path = require('path'); var dirname = path.dirname; /** * Create path. * * @param {String} pattern * @returns {Object} * @api private */ function createPattern(pattern) { return { pattern: pattern, included: true, served: true, watched: false }; } /** * Insert hydro into the loaded files. * * @param {Array} files * @api public */ function init(config) { var hydroConfig = config.hydro || {}; var hydroJs = hydroConfig.path || dirname(dirname(require.resolve('hydro'))) + '/dist/hydro.js'; var before = hydroConfig.before || []; config.files.unshift(createPattern(__dirname + '/adapter.js')); config.files.unshift(createPattern(hydroJs)); <|fim▁hole|> /** * Inject. */ init.$inject = [ 'config' ]; /** * Primary export. */ module.exports = { 'framework:hydro': [ 'factory', init ] };<|fim▁end|>
before.reverse().forEach(function(file) { config.files.unshift(createPattern(path.resolve(file))); }); }
<|file_name|>EmployeeWebSocketHandler.java<|end_file_name|><|fim▁begin|>package com.baeldung.webflux; import static java.time.LocalDateTime.now; import static java.util.UUID.randomUUID; import java.time.Duration; import org.springframework.stereotype.Component; import org.springframework.web.reactive.socket.WebSocketHandler; import org.springframework.web.reactive.socket.WebSocketSession; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; <|fim▁hole|>@Component("EmployeeWebSocketHandler") public class EmployeeWebSocketHandler implements WebSocketHandler { ObjectMapper om = new ObjectMapper(); @Override public Mono<Void> handle(WebSocketSession webSocketSession) { Flux<String> employeeCreationEvent = Flux.generate(sink -> { EmployeeCreationEvent event = new EmployeeCreationEvent(randomUUID().toString(), now().toString()); try { sink.next(om.writeValueAsString(event)); } catch (JsonProcessingException e) { sink.error(e); } }); return webSocketSession.send(employeeCreationEvent .map(webSocketSession::textMessage) .delayElements(Duration.ofSeconds(1))); } }<|fim▁end|>
import reactor.core.publisher.Flux; import reactor.core.publisher.Mono;
<|file_name|>hermite.py<|end_file_name|><|fim▁begin|># Hermite polynomials H_n(x) on the real line for n=0,1,2,3,4 f0 = lambda x: hermite(0,x) f1 = lambda x: hermite(1,x) f2 = lambda x: hermite(2,x) f3 = lambda x: hermite(3,x)<|fim▁hole|><|fim▁end|>
f4 = lambda x: hermite(4,x) plot([f0,f1,f2,f3,f4],[-2,2],[-25,25])
<|file_name|>test_dialect.py<|end_file_name|><|fim▁begin|># coding: utf-8 import datetime from sqlalchemy import bindparam from sqlalchemy import Column from sqlalchemy import DateTime from sqlalchemy import func from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import Table from sqlalchemy import testing from sqlalchemy.dialects import mysql from sqlalchemy.engine.url import make_url from sqlalchemy.testing import engines from sqlalchemy.testing import eq_ from sqlalchemy.testing import expect_warnings from sqlalchemy.testing import fixtures from sqlalchemy.testing import mock from ...engine import test_execute class DialectTest(fixtures.TestBase): __backend__ = True __only_on__ = "mysql" @testing.combinations( (None, "cONnection was kILLEd", "InternalError", "pymysql", True), (None, "cONnection aLREady closed", "InternalError", "pymysql", True), (None, "something broke", "InternalError", "pymysql", False), (2006, "foo", "OperationalError", "mysqldb", True), (2006, "foo", "OperationalError", "pymysql", True), (2007, "foo", "OperationalError", "mysqldb", False), (2007, "foo", "OperationalError", "pymysql", False), ) def test_is_disconnect( self, arg0, message, exc_cls_name, dialect_name, is_disconnect ): class Error(Exception): pass dbapi = mock.Mock() dbapi.Error = Error dbapi.ProgrammingError = type("ProgrammingError", (Error,), {}) dbapi.OperationalError = type("OperationalError", (Error,), {}) dbapi.InterfaceError = type("InterfaceError", (Error,), {}) dbapi.InternalError = type("InternalError", (Error,), {}) dialect = getattr(mysql, dialect_name).dialect(dbapi=dbapi) error = getattr(dbapi, exc_cls_name)(arg0, message) eq_(dialect.is_disconnect(error, None, None), is_disconnect) def test_ssl_arguments_mysqldb(self): from sqlalchemy.dialects.mysql import mysqldb dialect = mysqldb.dialect() self._test_ssl_arguments(dialect) def test_ssl_arguments_oursql(self): from sqlalchemy.dialects.mysql import oursql dialect = oursql.dialect() self._test_ssl_arguments(dialect) def _test_ssl_arguments(self, dialect): kwarg = dialect.create_connect_args( make_url( "mysql://scott:tiger@localhost:3306/test" "?ssl_ca=/ca.pem&ssl_cert=/cert.pem&ssl_key=/key.pem" ) )[1] # args that differ among mysqldb and oursql for k in ("use_unicode", "found_rows", "client_flag"): kwarg.pop(k, None) eq_( kwarg, { "passwd": "tiger", "db": "test", "ssl": { "ca": "/ca.pem", "cert": "/cert.pem", "key": "/key.pem", }, "host": "localhost", "user": "scott", "port": 3306, }, ) @testing.combinations( ("compress", True), ("connect_timeout", 30), ("read_timeout", 30), ("write_timeout", 30), ("client_flag", 1234), ("local_infile", 1234), ("use_unicode", False), ("charset", "hello"), ) def test_normal_arguments_mysqldb(self, kwarg, value): from sqlalchemy.dialects.mysql import mysqldb dialect = mysqldb.dialect() connect_args = dialect.create_connect_args( make_url( "mysql://scott:tiger@localhost:3306/test" "?%s=%s" % (kwarg, value) ) ) eq_(connect_args[1][kwarg], value) def test_mysqlconnector_buffered_arg(self): from sqlalchemy.dialects.mysql import mysqlconnector dialect = mysqlconnector.dialect() kw = dialect.create_connect_args( make_url("mysql+mysqlconnector://u:p@host/db?buffered=true") )[1] eq_(kw["buffered"], True) kw = dialect.create_connect_args( make_url("mysql+mysqlconnector://u:p@host/db?buffered=false") )[1] eq_(kw["buffered"], False) kw = dialect.create_connect_args( make_url("mysql+mysqlconnector://u:p@host/db") )[1] eq_(kw["buffered"], True) def test_mysqlconnector_raise_on_warnings_arg(self): from sqlalchemy.dialects.mysql import mysqlconnector dialect = mysqlconnector.dialect() kw = dialect.create_connect_args( make_url( "mysql+mysqlconnector://u:p@host/db?raise_on_warnings=true" ) )[1] eq_(kw["raise_on_warnings"], True) kw = dialect.create_connect_args( make_url( "mysql+mysqlconnector://u:p@host/db?raise_on_warnings=false" ) )[1] eq_(kw["raise_on_warnings"], False) kw = dialect.create_connect_args( make_url("mysql+mysqlconnector://u:p@host/db") )[1] assert "raise_on_warnings" not in kw @testing.only_on("mysql") def test_random_arg(self): dialect = testing.db.dialect kw = dialect.create_connect_args( make_url("mysql://u:p@host/db?foo=true") )[1] eq_(kw["foo"], "true") @testing.only_on("mysql") @testing.skip_if("mysql+mysqlconnector", "totally broken for the moment") @testing.fails_on("mysql+oursql", "unsupported") def test_special_encodings(self): for enc in ["utf8mb4", "utf8"]: eng = engines.testing_engine( options={"connect_args": {"charset": enc, "use_unicode": 0}} ) conn = eng.connect() eq_(conn.dialect._connection_charset, enc) def test_no_show_variables(self): from sqlalchemy.testing import mock engine = engines.testing_engine() def my_execute(self, statement, *args, **kw): if statement.startswith("SHOW VARIABLES"): statement = "SELECT 1 FROM DUAL WHERE 1=0" return real_exec(self, statement, *args, **kw) real_exec = engine._connection_cls.exec_driver_sql with mock.patch.object( engine._connection_cls, "exec_driver_sql", my_execute ): with expect_warnings( "Could not retrieve SQL_MODE; please ensure the " "MySQL user has permissions to SHOW VARIABLES" ): engine.connect() def test_no_default_isolation_level(self): from sqlalchemy.testing import mock engine = engines.testing_engine() real_isolation_level = testing.db.dialect.get_isolation_level def fake_isolation_level(connection): connection = mock.Mock( cursor=mock.Mock( return_value=mock.Mock( fetchone=mock.Mock(return_value=None) ) ) ) return real_isolation_level(connection) with mock.patch.object( engine.dialect, "get_isolation_level", fake_isolation_level ): with expect_warnings( "Could not retrieve transaction isolation level for MySQL " "connection." ): engine.connect() def test_autocommit_isolation_level(self): c = testing.db.connect().execution_options( isolation_level="AUTOCOMMIT" ) assert c.exec_driver_sql("SELECT @@autocommit;").scalar() c = c.execution_options(isolation_level="READ COMMITTED") assert not c.exec_driver_sql("SELECT @@autocommit;").scalar() def test_isolation_level(self): values = [ "READ UNCOMMITTED", "READ COMMITTED", "REPEATABLE READ", "SERIALIZABLE", ] for value in values: c = testing.db.connect().execution_options(isolation_level=value) eq_(testing.db.dialect.get_isolation_level(c.connection), value) class ParseVersionTest(fixtures.TestBase): @testing.combinations( ((10, 2, 7), "10.2.7-MariaDB", (10, 2, 7, "MariaDB"), True), ( (10, 2, 7), "5.6.15.10.2.7-MariaDB", (5, 6, 15, 10, 2, 7, "MariaDB"), True, ), ((10, 2, 10), "10.2.10-MariaDB", (10, 2, 10, "MariaDB"), True), ((5, 7, 20), "5.7.20", (5, 7, 20), False), ((5, 6, 15), "5.6.15", (5, 6, 15), False), ( (10, 2, 6), "10.2.6.MariaDB.10.2.6+maria~stretch-log", (10, 2, 6, "MariaDB", 10, 2, "6+maria~stretch", "log"), True, ), ( (10, 1, 9), "10.1.9-MariaDBV1.0R050D002-20170809-1522", (10, 1, 9, "MariaDB", "V1", "0R050D002", 20170809, 1522), True, ), ) def test_mariadb_normalized_version( self, expected, raw_version, version, is_mariadb ): dialect = mysql.dialect() eq_(dialect._parse_server_version(raw_version), version) dialect.server_version_info = version eq_(dialect._mariadb_normalized_version_info, expected) assert dialect._is_mariadb is is_mariadb @testing.combinations( (True, (10, 2, 7, "MariaDB")), (True, (5, 6, 15, 10, 2, 7, "MariaDB")), (False, (10, 2, 10, "MariaDB")), (False, (5, 7, 20)), (False, (5, 6, 15)), (True, (10, 2, 6, "MariaDB", 10, 2, "6+maria~stretch", "log")), ) def test_mariadb_check_warning(self, expect_, version): dialect = mysql.dialect() dialect.server_version_info = version if expect_: with expect_warnings( ".*before 10.2.9 has known issues regarding " "CHECK constraints" ): dialect._warn_for_known_db_issues() else: dialect._warn_for_known_db_issues() class RemoveUTCTimestampTest(fixtures.TablesTest): """This test exists because we removed the MySQL dialect's override of the UTC_TIMESTAMP() function, where the commit message for this feature stated that "it caused problems with executemany()". Since no example was provided, we are trying lots of combinations here. [ticket:3966] """ __only_on__ = "mysql" __backend__ = True @classmethod def define_tables(cls, metadata): Table( "t", metadata, Column("id", Integer, primary_key=True), Column("x", Integer), Column("data", DateTime), ) Table( "t_default", metadata, Column("id", Integer, primary_key=True), Column("x", Integer), Column("idata", DateTime, default=func.utc_timestamp()), Column("udata", DateTime, onupdate=func.utc_timestamp()), ) def test_insert_executemany(self): with testing.db.connect() as conn: conn.execute( self.tables.t.insert().values(data=func.utc_timestamp()), [{"x": 5}, {"x": 6}, {"x": 7}], ) def test_update_executemany(self): with testing.db.connect() as conn: timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2) conn.execute( self.tables.t.insert(), [ {"x": 5, "data": timestamp}, {"x": 6, "data": timestamp}, {"x": 7, "data": timestamp}, ], ) conn.execute( self.tables.t.update() .values(data=func.utc_timestamp()) .where(self.tables.t.c.x == bindparam("xval")), [{"xval": 5}, {"xval": 6}, {"xval": 7}], ) def test_insert_executemany_w_default(self): with testing.db.connect() as conn: conn.execute( self.tables.t_default.insert(), [{"x": 5}, {"x": 6}, {"x": 7}] ) def test_update_executemany_w_default(self): with testing.db.connect() as conn: timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2) conn.execute( self.tables.t_default.insert(), [ {"x": 5, "idata": timestamp}, {"x": 6, "idata": timestamp}, {"x": 7, "idata": timestamp}, ], ) conn.execute( self.tables.t_default.update() .values(idata=func.utc_timestamp()) .where(self.tables.t_default.c.x == bindparam("xval")), [{"xval": 5}, {"xval": 6}, {"xval": 7}], ) class SQLModeDetectionTest(fixtures.TestBase): __only_on__ = "mysql" __backend__ = True def _options(self, modes): def connect(con, record): cursor = con.cursor() cursor.execute("set sql_mode='%s'" % (",".join(modes))) e = engines.testing_engine( options={ "pool_events": [ (connect, "first_connect"), (connect, "connect"), ] } ) return e def test_backslash_escapes(self): engine = self._options(["NO_BACKSLASH_ESCAPES"]) c = engine.connect() assert not engine.dialect._backslash_escapes c.close() engine.dispose() engine = self._options([]) c = engine.connect() assert engine.dialect._backslash_escapes c.close() engine.dispose() def test_ansi_quotes(self): engine = self._options(["ANSI_QUOTES"]) c = engine.connect() assert engine.dialect._server_ansiquotes c.close() engine.dispose() def test_combination(self): engine = self._options(["ANSI_QUOTES,NO_BACKSLASH_ESCAPES"]) c = engine.connect() assert engine.dialect._server_ansiquotes assert not engine.dialect._backslash_escapes c.close() engine.dispose() class ExecutionTest(fixtures.TestBase): """Various MySQL execution special cases.""" __only_on__ = "mysql" __backend__ = True def test_charset_caching(self): engine = engines.testing_engine() cx = engine.connect() meta = MetaData() charset = engine.dialect._detect_charset(cx) meta.reflect(cx) eq_(cx.dialect._connection_charset, charset) cx.close() def test_sysdate(self): d = testing.db.scalar(func.sysdate())<|fim▁hole|> assert isinstance(d, datetime.datetime) class AutocommitTextTest(test_execute.AutocommitTextTest): __only_on__ = "mysql" def test_load_data(self): self._test_keyword("LOAD DATA STUFF") def test_replace(self): self._test_keyword("REPLACE THING")<|fim▁end|>
<|file_name|>places.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
PLACES = ["Virginia", "Lexington", "Washington", "New York"]
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Defines the Weld abstract syntax tree. //! //! Most of Weld's optimizations occur over the AST, which doubles as a "High-Level IR." The AST //! captures the expressions in Weld using a tree data structure. pub use self::ast::*; // Various convinience methods on the AST. pub use self::builder::NewExpr; pub use self::cmp::CompareIgnoringSymbols; pub use self::hash::HashIgnoringSymbols; pub use self::pretty_print::{PrettyPrint, PrettyPrintConfig}; pub use self::type_inference::InferTypes; pub use self::uniquify::Uniquify; pub mod prelude; mod ast;<|fim▁hole|>mod builder; mod cmp; mod hash; mod pretty_print; mod type_inference; mod uniquify;<|fim▁end|>
<|file_name|>rustwin.rs<|end_file_name|><|fim▁begin|>// Wraps GLUT in an event queue using Rust enums // hides use of callbacks acessing a global, allows users to poll input events from // a user implemented mainloop. // provides a tablet/console oriented MVC framework //use r3d::Vec2f; use common::*; #[deriving(Show,Clone,PartialEq)] pub struct Window{handle:i32} // todo - a trait object ? pub type Buttons=i32; pub type Key_t=i32; pub type Modifiers=i32; pub type Milliseconds=i64; // milliseconds pub type ScreenPos=(i32,i32); pub type Vec2d=(i32,i32); pub type TouchId=i32; pub type Frames=i32; pub type FramesPerSec=f32; #[deriving(Show,Clone)] pub enum WinEvent { EventNone, MouseMotion(Window, Buttons,ScreenPos), MouseButtonDown(Window,Buttons,ScreenPos), MouseButtonUp(Window,Buttons,ScreenPos), KeyDown(Window,Key_t,Modifiers,ScreenPos), KeyUp(Window,Key_t,Modifiers,ScreenPos), Accelerometer(Window,(i32,i32,i32)), JoypadEvent(Window,Vec2d,Vec2d,Buttons), MultiTouchDown(Window,(TouchId,ScreenPos)), MultiTouchUp(Window,(TouchId,ScreenPos)), MultiTouchMove(Window,(TouchId,ScreenPos)), WindowMove(Window, ScreenPos), WindowResize(Window,ScreenPos,ScreenPos), WindowFocusIn(Window), WindowFocusOut(Window), WindowClose(Window), WindowShow(Window), WindowHide(Window) } /* impl ToStr for WinEvent { fn to_str(&self)->~str { format!("{:?}",*self) } } */ pub enum Mode { Verbose, Silent } pub enum Placement { Fullscreen, Default, PlaceAt(ScreenPos,ScreenPos) } static g_root_window:Window =Window{handle:0}; pub fn init() { unsafe { g_rustwin.head=0; g_rustwin.tail=0; init_window(); init_input(); } } static mut g_init_input:bool=false; pub fn get_event()->WinEvent { unsafe { if g_init_input==false {init_input() } glut::glutMainLoopEvent(); // println!("pop event from {:?}", g_rustwin); // println!("pos={:?}",g_cursor); let mut rw=&mut g_rustwin; if rw.head==rw.tail { EventNone } else { let ev=rw.event_queue.get(rw.tail).unwrap().clone(); rw.tail=(rw.tail+1)&255; ev } } } /// Sample main loop wrapper. Possible to write a mainloop easier than this. pub fn run_loop<APP>(process_event:&mut |ev:WinEvent|, on_idle:&mut |win:Window|) { loop { while { let ev=get_event(); match ev { EventNone=>false, _=>{(*process_event)(ev);true} } } {}; (*on_idle)(g_root_window); // todo - use ... } } pub fn is_root_window(w:&Window)->bool { *w==g_root_window } pub fn get_key_state(k:char)->i32 { unsafe { if g_keys[(k as uint) & 255] {1} else {0} } } pub struct Joypad { sticks:[Vec2f,..2], buttons:i32, press:i32, unpress:i32 // rumble ? tilt ? accelerometers? } impl Joypad { pub fn new()->Joypad { Joypad{ sticks:[zero(),zero()], buttons:0, press:0, unpress:0 } } } // todo - read actual joypads. ///////////////////////////////////////////////////// struct RustWin { event_queue:[WinEvent,..256], head:uint, tail:uint, focus_window:Option<Window>, // windows:~[Window] } static mut g_rustwin:RustWin=RustWin{ event_queue:[EventNone,..256], head:0, tail:0, focus_window:None, // windows:~[] }; /* unsafe fn get_rust_win<'a>()->*mut RustWin { println!("rustwin={:?}", g_rustwin); match g_rustwin { None=>ptr::mut_null(), Some(ref x)=> { let rw=&**x as *RustWin as *mut RustWin; println!("rustwin ret={:?}", rw); rw } } } */ fn push_event(ev:WinEvent) { unsafe { // let rw = get_rust_win(); // println!("rustwin={:?}", rw); // println!("rustwin evq={:?}", (*rw).event_queue); // println!("event={:?}", ev); // println!("push_event rustwin={:?}",g_rustwin); // (*rw).event_queue.push_front(ev); let rw=&mut g_rustwin; // println!("push event{:?}",ev); rw.event_queue[rw.head]=ev; rw.head=(rw.head+1)&255; if rw.head==rw.tail { rw.head=(rw.head-1)&255} } } fn ev_println(s:String) { // silent } fn curr_win()->Window { unsafe {match g_rustwin.focus_window {Some(w)=>w, None=>g_root_window}} } static mut g_prev_buttons:i32=0; static mut g_curr_buttons:i32=0; static mut g_key_modifiers:i32=0; static mut g_cursor:ScreenPos=(0,0); static mut g_drag_start:ScreenPos=(0,0); static mut g_prev_cursor:ScreenPos=(0,0); static mut g_keys:[bool,..256]=[false,..256]; pub struct MouseState { pos:ScreenPos, delta:ScreenPos, drag_start:ScreenPos, buttons:Buttons, press:Buttons, unpress:Buttons } fn sub2d<T:Num>((x0,y0):(T,T),(x1,y1):(T,T))->(T,T){ (x0-x1,y0-y1)} fn add2d<T:Num>((x0,y0):(T,T),(x1,y1):(T,T))->(T,T){ (x0+x1,y0+y1)} pub fn peek_mouse()->MouseState{ unsafe { MouseState{ pos:g_cursor, delta:sub2d(g_cursor,g_prev_cursor), drag_start:g_drag_start, buttons:g_curr_buttons, press:(g_curr_buttons^g_prev_buttons)&g_curr_buttons, unpress:(g_curr_buttons^g_prev_buttons)&g_prev_buttons, } } } pub fn get_mouse()->MouseState{ unsafe { let ms=peek_mouse(); g_prev_buttons=g_curr_buttons; g_prev_cursor=g_cursor; ms } } fn special_key_to_mask(k:c_int)->i32 { println!("special key: {}",k); 0 } extern "C" fn tablet_motion(x: c_int,y: c_int) { println!("TabletMotion {}",(x,y)); } extern "C" fn tablet_button(button:c_int,state:c_int,x:c_int,y:c_int) { println!("TabletButton:{} pos {}",(button,state),(x,y)); } extern "C" fn mouse(button:c_int,state:c_int,x:c_int,y:c_int) { let mask = 1<<(button as uint); unsafe { if state==0 { g_drag_start=g_cursor; } g_cursor = (x,y); g_curr_buttons=match state{ 0=>g_curr_buttons|mask, _=>g_curr_buttons& !mask }; push_event( match state { 0=>MouseButtonUp(curr_win(),button,(x,y)), _=>MouseButtonDown(curr_win(),button,(x,y)), } ); } ev_println(format!("MouseButton:{} at {}",(button,state),(x,y))); } extern "C" fn motion(x:c_int,y:c_int){ unsafe { g_cursor=(x,y); ev_println(format!("MousePos:{}",(x,y))); push_event(MouseMotion(curr_win(), g_curr_buttons,(x,y))); } } extern "C" fn passive_motion(x:c_int,y:c_int){ unsafe { g_cursor=(x,y); ev_println(format!("MousePos:{}",(x,y))); push_event(MouseMotion(curr_win(), g_curr_buttons, (x,y))); } } extern "C" fn keyboard(k:c_uchar,x:c_int,y:c_int){ unsafe { ev_println(format!("keyDown:{} at{}",k,(x,y))); g_keys[k as uint]=true; push_event(KeyDown(curr_win(), k as Key_t,g_key_modifiers,(x,y))); } } extern "C" fn keyboard_up(k:c_uchar,x:c_int,y:c_int){ unsafe { ev_println(format!("keyUp:{} at {}",k,(x,y))); g_keys[k as uint]=false; push_event(KeyUp(curr_win(),k as Key_t,g_key_modifiers, (x,y))); } } extern "C" fn special(k:c_int,x:c_int,y:c_int) { ev_println(format!("specialKeyDown:{} at{}",k,(x,y))); // todo - translate special key into modifiers, thru glut enum unsafe { g_key_modifiers|=special_key_to_mask(k); push_event(KeyDown(curr_win(),k as Key_t,g_key_modifiers, (x,y))); } } extern "C" fn special_up(k:c_int,x:c_int,y:c_int) { ev_println(format!("specialKeyUp:{} at {}",k,(x,y))); // todo - translate special key into modifiers, thru glut enum unsafe{ g_key_modifiers&=special_key_to_mask(k); push_event(KeyUp(curr_win(), k as Key_t,g_key_modifiers,(x,y))); } } pub fn init_input() { unsafe { g_init_input=true; glut::glutMouseFunc(mouse); glut::glutMotionFunc(motion); glut::glutPassiveMotionFunc(passive_motion); glut::glutKeyboardFunc(keyboard); glut::glutKeyboardUpFunc(keyboard_up); glut::glutSpecialFunc(special); glut::glutSpecialUpFunc(special_up); glut::glutTabletMotionFunc(tablet_motion); glut::glutTabletButtonFunc(tablet_button); } } fn init_window() { unsafe { glut::glutInit(&mut 0 as *mut c_int,0 as *const *const c_char); glut::glutInitDisplayMode(GLUT_RGB|GLUT_DOUBLE|GLUT_DEPTH|GLUT_MULTISAMPLE); glut::glutInitWindowSize(640,480); glut::glutInitWindowPosition(0,0); glut::glutSetKeyRepeat(0); // c_glut_init(); glut::glutCreateWindow(c_str("rust window")); glut::glutPopWindow(); glut::glutIdleFunc(null_func as *const u8); glut::glutDisplayFunc(null_func as *const u8); gl::glEnable(GL_DEPTH_TEST); } } pub enum ScreenChange { ScContinue, ScReplace(Box<Screen+ 'static>), ScPush(Box<Screen+ 'static>), ScRoot(Box<Screen+ 'static>), ScPop, ScCycleNext, ScCyclePrev }<|fim▁hole|> fn display_create(&mut self){} fn display_destroy(&mut self){} fn on_select(&mut self){} fn on_deselect(&mut self){} fn render(&self) {} fn update(&mut self)->ScreenChange {ScContinue} fn win_event(&mut self, ev: WinEvent)->ScreenChange{ScContinue} fn dump(&self){} } extern "C" fn null_func() {}<|fim▁end|>
pub trait Screen {
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import axios from 'axios' const basePath = '/api' axios.defaults.xsrfCookieName = 'csrftoken' axios.defaults.xsrfHeaderName = 'X-CSRFToken' export default { Utils: { getYearsChoiceList: (app, model) => axios({ url: `${basePath}/${app}/${model}/years/`, method: 'GET' }), getModelOrderedList: (app, model, ordering = '', page = 1, query_string = '') => axios({<|fim▁hole|> method: 'GET' }), getModelList: (app, model, page = 1, query_string = '') => axios({ url: `${basePath}/${app}/${model}/?page=${page}${query_string}`, method: 'GET' }), getModel: (app, model, id) => axios({ url: `${basePath}/${app}/${model}/${id}/`, method: 'GET' }), getModelAction: (app, model, id, action) => axios({ url: `${basePath}/${app}/${model}/${id}/${action}/`, method: 'GET' }), getModelListAction: (app, model, action, page = 1) => axios({ url: `${basePath}/${app}/${model}/${action}/?page=${page}`, method: 'GET' }), getByMetadata: (m, query_string = '') => axios({ url: `${basePath}/${m.app}/${m.model}/${m.id}${m.id !== '' ? '/' : ''}${m.action}${m.action !== '' ? '/' : ''}${query_string !== '' ? '?' : ''}${query_string}`, method: 'GET' }) } }<|fim▁end|>
url: `${basePath}/${app}/${model}/?o=${ordering}&page=${page}${query_string}`,
<|file_name|>main.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>}<|fim▁end|>
fn main() { let rust = "rust"; println!("{}", rust);
<|file_name|>flashErrors.js<|end_file_name|><|fim▁begin|>module.exports = { Errors: {<|fim▁hole|> set: (req, error_type) => (req.appSession.flashError = error_type), clear: (req) => { if (!req.appSession.flashError) return; const error_type = req.appSession.flashError; delete req.appSession.flashError; return error_type; }, };<|fim▁end|>
WrongAccount: "wrong-acount", },
<|file_name|>matrix_store.cpp<|end_file_name|><|fim▁begin|>/* * Copyright 2014 Open Connectome Project (http://openconnecto.me) * Written by Da Zheng ([email protected]) *<|fim▁hole|> * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "matrix_store.h" #include "local_matrix_store.h" #include "mem_matrix_store.h" #include "EM_dense_matrix.h" #include "EM_object.h" #include "matrix_config.h" #include "local_mem_buffer.h" namespace fm { namespace detail { std::atomic<size_t> matrix_store::mat_counter; matrix_store::ptr matrix_store::create(size_t nrow, size_t ncol, matrix_layout_t layout, const scalar_type &type, int num_nodes, bool in_mem, safs::safs_file_group::ptr group) { if (in_mem) return mem_matrix_store::create(nrow, ncol, layout, type, num_nodes); else return EM_matrix_store::create(nrow, ncol, layout, type, group); } matrix_store::matrix_store(size_t nrow, size_t ncol, bool in_mem, const scalar_type &_type): type(_type) { this->nrow = nrow; this->ncol = ncol; this->in_mem = in_mem; this->entry_size = type.get_size(); this->cache_portion = true; } size_t matrix_store::get_num_portions() const { std::pair<size_t, size_t> chunk_size = get_portion_size(); if (is_wide()) return ceil(((double) get_num_cols()) / chunk_size.second); else return ceil(((double) get_num_rows()) / chunk_size.first); } local_matrix_store::ptr matrix_store::get_portion(size_t id) { size_t start_row; size_t start_col; size_t num_rows; size_t num_cols; std::pair<size_t, size_t> chunk_size = get_portion_size(); if (is_wide()) { start_row = 0; start_col = chunk_size.second * id; num_rows = get_num_rows(); num_cols = std::min(chunk_size.second, get_num_cols() - start_col); } else { start_row = chunk_size.first * id; start_col = 0; num_rows = std::min(chunk_size.first, get_num_rows() - start_row); num_cols = get_num_cols(); } return get_portion(start_row, start_col, num_rows, num_cols); } local_matrix_store::const_ptr matrix_store::get_portion(size_t id) const { size_t start_row; size_t start_col; size_t num_rows; size_t num_cols; std::pair<size_t, size_t> chunk_size = get_portion_size(); if (is_wide()) { start_row = 0; start_col = chunk_size.second * id; num_rows = get_num_rows(); num_cols = std::min(chunk_size.second, get_num_cols() - start_col); } else { start_row = chunk_size.first * id; start_col = 0; num_rows = std::min(chunk_size.first, get_num_rows() - start_row); num_cols = get_num_cols(); } return get_portion(start_row, start_col, num_rows, num_cols); } namespace { class reset_op: public set_operate { const scalar_type &type; size_t entry_size; public: reset_op(const scalar_type &_type): type(_type) { this->entry_size = type.get_size(); } virtual void set(void *arr, size_t num_eles, off_t row_idx, off_t col_idx) const { memset(arr, 0, num_eles * entry_size); } virtual const scalar_type &get_type() const { return type; } virtual set_operate::const_ptr transpose() const { return set_operate::const_ptr(); } }; class set_task: public thread_task { detail::local_matrix_store::ptr local_store; const set_operate &op; public: set_task(detail::local_matrix_store::ptr local_store, const set_operate &_op): op(_op) { this->local_store = local_store; } void run() { local_store->set_data(op); } }; /* * These two functions define the length and portion size for 1D partitioning * on a matrix. */ static inline size_t get_tot_len(const matrix_store &mat) { return mat.is_wide() ? mat.get_num_cols() : mat.get_num_rows(); } static inline size_t get_portion_size(const matrix_store &mat) { return mat.is_wide() ? mat.get_portion_size().second : mat.get_portion_size().first; } class EM_mat_setdata_dispatcher: public EM_portion_dispatcher { const set_operate &op; matrix_store &to_mat; public: EM_mat_setdata_dispatcher(matrix_store &store, const set_operate &_op); virtual void create_task(off_t global_start, size_t length); }; EM_mat_setdata_dispatcher::EM_mat_setdata_dispatcher(matrix_store &store, const set_operate &_op): EM_portion_dispatcher(get_tot_len(store), fm::detail::get_portion_size(store)), op(_op), to_mat(store) { } void EM_mat_setdata_dispatcher::create_task(off_t global_start, size_t length) { size_t global_start_row, global_start_col; size_t num_rows, num_cols; if (to_mat.is_wide()) { global_start_row = 0; global_start_col = global_start; num_rows = to_mat.get_num_rows(); num_cols = length; } else { global_start_row = global_start; global_start_col = 0; num_rows = length; num_cols = to_mat.get_num_cols(); } local_matrix_store::ptr buf; if (to_mat.store_layout() == matrix_layout_t::L_COL) buf = local_matrix_store::ptr(new local_buf_col_matrix_store( global_start_row, global_start_col, num_rows, num_cols, to_mat.get_type(), -1)); else buf = local_matrix_store::ptr(new local_buf_row_matrix_store( global_start_row, global_start_col, num_rows, num_cols, to_mat.get_type(), -1)); buf->set_data(op); to_mat.write_portion_async(buf, global_start_row, global_start_col); } } void matrix_store::reset_data() { set_data(reset_op(get_type())); } void matrix_store::set_data(const set_operate &op) { size_t num_chunks = get_num_portions(); if (is_in_mem() && num_chunks == 1) { local_matrix_store::ptr buf; if (store_layout() == matrix_layout_t::L_ROW) buf = local_matrix_store::ptr(new local_buf_row_matrix_store(0, 0, get_num_rows(), get_num_cols(), get_type(), -1)); else buf = local_matrix_store::ptr(new local_buf_col_matrix_store(0, 0, get_num_rows(), get_num_cols(), get_type(), -1)); buf->set_data(op); write_portion_async(buf, 0, 0); // After computation, some matrices buffer local portions in the thread, // we should try to clean these local portions. These local portions // may contain pointers to some matrices that don't exist any more. // We also need to clean them to reduce memory consumption. // We might want to keep the memory buffer for I/O on dense matrices. if (matrix_conf.is_keep_mem_buf()) detail::local_mem_buffer::clear_bufs( detail::local_mem_buffer::MAT_PORTION); else detail::local_mem_buffer::clear_bufs(); } else if (is_in_mem()) { detail::mem_thread_pool::ptr mem_threads = detail::mem_thread_pool::get_global_mem_threads(); for (size_t i = 0; i < num_chunks; i++) { detail::local_matrix_store::ptr local_store = get_portion(i); int node_id = local_store->get_node_id(); // If the local matrix portion is not assigned to any node, // assign the tasks in round robin fashion. if (node_id < 0) node_id = i % mem_threads->get_num_nodes(); mem_threads->process_task(node_id, new set_task(local_store, op)); } mem_threads->wait4complete(); } else { mem_thread_pool::ptr threads = mem_thread_pool::get_global_mem_threads(); EM_mat_setdata_dispatcher::ptr dispatcher( new EM_mat_setdata_dispatcher(*this, op)); EM_matrix_store *em_this = dynamic_cast<EM_matrix_store *>(this); assert(em_this); em_this->start_stream(); for (size_t i = 0; i < threads->get_num_threads(); i++) { io_worker_task *task = new io_worker_task(dispatcher); const EM_object *obj = dynamic_cast<const EM_object *>(this); task->register_EM_obj(const_cast<EM_object *>(obj)); threads->process_task(i % threads->get_num_nodes(), task); } threads->wait4complete(); em_this->end_stream(); } } matrix_stream::ptr matrix_stream::create(matrix_store::ptr store) { if (store->is_in_mem()) { mem_matrix_store::ptr mem_store = std::dynamic_pointer_cast<mem_matrix_store>(store); if (mem_store == NULL) { BOOST_LOG_TRIVIAL(error) << "The in-mem matrix store isn't writable"; return matrix_stream::ptr(); } else return mem_matrix_stream::create(mem_store); } else { EM_matrix_store::ptr em_store = std::dynamic_pointer_cast<EM_matrix_store>(store); if (em_store == NULL) { BOOST_LOG_TRIVIAL(error) << "The ext-mem matrix store isn't writable"; return matrix_stream::ptr(); } else return EM_matrix_stream::create(em_store); } } matrix_store::const_ptr matrix_store::get_cols( const std::vector<off_t> &idxs) const { matrix_store::const_ptr tm = transpose(); matrix_store::const_ptr rows = tm->get_rows(idxs); if (rows == NULL) return matrix_store::const_ptr(); else return rows->transpose(); } matrix_store::const_ptr matrix_store::get_cols(off_t start, off_t end) const { if (start < 0 || end < 0 || end - start < 0) { BOOST_LOG_TRIVIAL(error) << "invalid range for selecting columns"; return matrix_store::const_ptr(); } std::vector<off_t> idxs(end - start); for (size_t i = 0; i < idxs.size(); i++) idxs[i] = start + i; return get_cols(idxs); } matrix_store::const_ptr matrix_store::get_rows(off_t start, off_t end) const { if (start < 0 || end < 0 || end - start < 0) { BOOST_LOG_TRIVIAL(error) << "invalid range for selecting rows"; return matrix_store::const_ptr(); } std::vector<off_t> idxs(end - start); for (size_t i = 0; i < idxs.size(); i++) idxs[i] = start + i; return get_rows(idxs); } bool matrix_store::share_data(const matrix_store &store) const { // By default, we can use data id to determine if two matrices have // the same data. return get_data_id() == store.get_data_id() && get_data_id() != INVALID_MAT_ID; } matrix_append::matrix_append(matrix_store::ptr store) { this->res = store; q.resize(1000); last_append = -1; written_eles = 0; empty_portion = local_matrix_store::const_ptr(new local_buf_row_matrix_store( 0, 0, 0, 0, store->get_type(), -1, false)); } void matrix_append::write_async(local_matrix_store::const_ptr portion, off_t seq_id) { if (seq_id <= last_append) { BOOST_LOG_TRIVIAL(error) << "Append a repeated portion"; return; } if (portion == NULL) portion = empty_portion; std::vector<local_matrix_store::const_ptr> data; lock.lock(); // Add the new portion to the queue. If the queue is too small, // we should resize the queue first. off_t loc = seq_id - last_append - 1; assert(loc >= 0); if ((size_t) loc >= q.size()) q.resize(q.size() * 2); q[loc] = portion; off_t start_loc = -1; if (q.front()) start_loc = written_eles; // Get the portions from the queue. while (q.front()) { auto mat = q.front(); // If the portion isn't empty. if (mat->get_num_rows() > 0 && mat->get_num_cols() > 0) data.push_back(mat); q.pop_front(); q.push_back(local_matrix_store::const_ptr()); last_append++; written_eles += mat->get_num_rows() * mat->get_num_cols(); } lock.unlock(); for (size_t i = 0; i < data.size(); i++) { assert(start_loc >= 0); // TODO this works if the result matrix is stored in memory. if (res->is_wide()) { off_t start_row = 0; off_t start_col = start_loc / res->get_num_rows(); res->write_portion_async(data[i], start_row, start_col); } else { off_t start_row = start_loc / res->get_num_cols(); off_t start_col = 0; res->write_portion_async(data[i], start_row, start_col); } start_loc += data[i]->get_num_rows() * data[i]->get_num_cols(); } } matrix_append::~matrix_append() { for (size_t i = 0; i < q.size(); i++) assert(q[i] == NULL); } void matrix_append::flush() { for (size_t i = 0; i < q.size(); i++) assert(q[i] == NULL); } } }<|fim▁end|>
* This file is part of FlashMatrix. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License.
<|file_name|>jwt-optional.hook.ts<|end_file_name|><|fim▁begin|>// 3p import { HookDecorator } from '@foal/core'; // FoalTS<|fim▁hole|> * Hook factory to authenticate users using JSON Web Tokens. * * The hook does not return any error when no user could be authenticated. * * If `options.cookie` is not defined, the hook expects the JWT to be included in the * `Authorization` header using the `Bearer` schema. Once the token is verified and decoded, * `ctx.user` is set with the payload (by default) or a custom object (see `options.user`). * * The content of the header should look like the following: Authorization: Bearer <token> * * @export * @param {JWTOptions} [options={}] - Hook options. * @param {FetchUser} [options.user] - A function that takes an id * and returns the corresponding user. If no user could be found, it returns undefined. * @param {(token: string) => boolean|Promise<boolean>} [options.blacklist] - A function that takes a token * and returns true or false. If the returned value is true, then the hook returns a 401 error. * @param {boolean} [options.cookie] - If true, the hook expects the token to be sent in a cookie * named `auth`. You can change the cookie name with the key `settings.jwt.cookie.name` in the configuration. * @param {VerifyOptions} [verifyOptions={}] - Options of the `jsonwebtoken` package. * @returns {HookDecorator} The hook. */ export function JWTOptional(options: JWTOptions = {}, verifyOptions: VerifyOptions = {}): HookDecorator { return JWT(false, options, verifyOptions); }<|fim▁end|>
import { JWT, JWTOptions, VerifyOptions } from './jwt.hook'; /**
<|file_name|>i2c_esp.py<|end_file_name|><|fim▁begin|># i2c_esp.py Test program for asi2c.py # Tests Responder on ESP8266 # The MIT License (MIT) # # Copyright (c) 2018 Peter Hinch # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE.<|fim▁hole|># sync X11 - 5 # ack Y8 - 4 # gnd - gnd import uasyncio as asyncio from machine import Pin, I2C import asi2c import ujson i2c = I2C(scl=Pin(0),sda=Pin(2)) # software I2C syn = Pin(5) ack = Pin(4) chan = asi2c.Responder(i2c, syn, ack) async def receiver(): sreader = asyncio.StreamReader(chan) await chan.ready() print('started') for _ in range(5): # Test flow control res = await sreader.readline() print('Received', ujson.loads(res)) await asyncio.sleep(4) while True: res = await sreader.readline() print('Received', ujson.loads(res)) async def sender(): swriter = asyncio.StreamWriter(chan, {}) txdata = [0, 0] while True: await swriter.awrite(''.join((ujson.dumps(txdata), '\n'))) txdata[1] += 1 await asyncio.sleep_ms(1500) loop = asyncio.get_event_loop() loop.create_task(receiver()) loop.create_task(sender()) try: loop.run_forever() finally: chan.close() # for subsequent runs<|fim▁end|>
# pyb esp8266 # scl X9 - 0 # sda X10 - 2
<|file_name|>ErrorProvider.js<|end_file_name|><|fim▁begin|><|fim▁hole|> errors: [], }); const ErrorProvider = ({ children }) => { const [errors, setErrors] = useState([]); return ( <ErrorContext.Provider value={{ errors, setErrors, }} > {children} </ErrorContext.Provider> ); }; ErrorProvider.propTypes = { children: PropTypes.node, }; export { ErrorContext, ErrorProvider };<|fim▁end|>
import React, { createContext, useState } from 'react'; import PropTypes from 'prop-types'; const ErrorContext = createContext({
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- __all__ = ["photometry", "epic", "Data", "Inject", "Likelihood", "Summary", "FP", "fit_traptransit"] <|fim▁hole|>from . import photometry, epic from .data import Data from .inject import Inject from .likelihood import Likelihood from .summary import Summary from .fp import FP from .traptransit import fit_traptransit<|fim▁end|>
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>from rest_framework import status from rest_framework.exceptions import APIException, ParseError def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for error_key, error_description in message.iteritems(): if error_key in top_level_error_keys: errors.append({error_key: error_description}) else: if isinstance(error_description, basestring): error_description = [error_description] errors.extend([{'source': {'pointer': '/data/attributes/' + error_key}, 'detail': reason} for reason in error_description]) else: if isinstance(message, basestring): message = [message] errors.extend([{'detail': error} for error in message]) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.') <|fim▁hole|> """Raised when client passes an invalid filter in the querystring.""" default_detail = 'Querystring contains an invalid filter.'<|fim▁end|>
class InvalidFilterError(ParseError):
<|file_name|>straight_line_trajectory.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, print_function, division from six.moves import range, map, filter, zip from six import iteritems from collections import deque, defaultdict from .polygon import is_same_direction, line_intersection from .surface_objects import SaddleConnection # Vincent question: # using deque has the disadvantage of losing the initial points # ideally doig # my_line[i] # we should always access to the same element # I wanted to be able to flow backward thus inserting at the beginning of a list. # Perhaps it would be better to model this on a deque-like class that is indexed by # all integers rather than just the non-negative ones? Do you know of such # a class? Alternately, we could store an offset. def get_linearity_coeff(u, v): r""" Given the two 2-dimensional vectors ``u`` and ``v``, return ``a`` so that ``v = a*u`` If the vectors are not colinear, a ``ValueError`` is raised. EXAMPLES:: sage: from flatsurf.geometry.straight_line_trajectory import get_linearity_coeff sage: V = VectorSpace(QQ,2) sage: get_linearity_coeff(V((1,0)), V((2,0))) 2 sage: get_linearity_coeff(V((2,0)), V((1,0))) 1/2 sage: get_linearity_coeff(V((0,1)), V((0,2))) 2 sage: get_linearity_coeff(V((0,2)), V((0,1))) 1/2 sage: get_linearity_coeff(V((1,2)), V((-2,-4))) -2 sage: get_linearity_coeff(V((1,1)), V((-1,1))) Traceback (most recent call last): ... ValueError: non colinear """ if u[0]: a = v[0]/u[0] if v[1] != a*u[1]: raise ValueError("non colinear") return a elif v[0]: raise ValueError("non colinear") elif u[1]: return v[1]/u[1] else: raise ValueError("zero vector") class SegmentInPolygon: r""" Maximal segment in a polygon of a similarity surface EXAMPLES:: sage: from flatsurf import * sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon sage: s = similarity_surfaces.example() sage: v = s.tangent_vector(0, (1/3,-1/4), (0,1)) sage: SegmentInPolygon(v) Segment in polygon 0 starting at (1/3, -1/3) and ending at (1/3, 0) """ def __init__(self, start, end=None): if not end is None: # WARNING: here we assume that both start and end are on the # boundary self._start = start self._end = end else: self._end = start.forward_to_polygon_boundary() self._start = self._end.forward_to_polygon_boundary() def __eq__(self, other):<|fim▁hole|> self._end == other._end def __ne__(self, other): return type(self) is not type(other) or \ self._start != other._start or \ self._end != other._end def __repr__(self): r""" TESTS:: sage: from flatsurf import * sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon sage: s = similarity_surfaces.example() sage: v = s.tangent_vector(0, (0,0), (3,-1)) sage: SegmentInPolygon(v) Segment in polygon 0 starting at (0, 0) and ending at (2, -2/3) """ return "Segment in polygon {} starting at {} and ending at {}".format( self.polygon_label(), self.start().point(), self.end().point()) def start(self): r""" Return the tangent vector associated to the start of a trajectory pointed forward. """ return self._start def start_is_singular(self): return self._start.is_based_at_singularity() def end(self): r""" Return a TangentVector associated to the end of a trajectory, pointed backward. """ return self._end def end_is_singular(self): return self._end.is_based_at_singularity() def is_edge(self): if not self.start_is_singular() or not self.end_is_singular(): return False vv=self.start().vector() vertex=self.start().vertex() ww=self.start().polygon().edge(vertex) from flatsurf.geometry.polygon import is_same_direction return is_same_direction(vv,ww) def edge(self): if not self.is_edge(): raise ValueError("Segment asked for edge when not an edge") return self.start().vertex() def polygon_label(self): return self._start.polygon_label() def invert(self): return SegmentInPolygon(self._end, self._start) def next(self): r""" Return the next segment obtained by continuing straight through the end point. EXAMPLES:: sage: from flatsurf import * sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon sage: s = similarity_surfaces.example() sage: s.polygon(0) Polygon: (0, 0), (2, -2), (2, 0) sage: s.polygon(1) Polygon: (0, 0), (2, 0), (1, 3) sage: v = s.tangent_vector(0, (0,0), (3,-1)) sage: seg = SegmentInPolygon(v) sage: seg Segment in polygon 0 starting at (0, 0) and ending at (2, -2/3) sage: seg.next() Segment in polygon 1 starting at (2/3, 2) and ending at (14/9, 4/3) """ if self.end_is_singular(): raise ValueError("Cannot continue from singularity") return SegmentInPolygon(self._end.invert()) def previous(self): if self.end_is_singular(): raise ValueError("Cannot continue from singularity") return SegmentInPolygon(self._start.invert()).invert() # DEPRECATED STUFF THAT WILL BE REMOVED def start_point(self): from sage.misc.superseded import deprecation deprecation(1, "do not use start_point but start().point()") return self._start.point() def start_direction(self): from sage.misc.superseded import deprecation deprecation(1, "do not use start_direction but start().vector()") return self._start.vector() def end_point(self): from sage.misc.superseded import deprecation deprecation(1, "do not use end_point but end().point()") return self._end.point() def end_direction(self): from sage.misc.superseded import deprecation deprecation(1, "do not use end_direction but end().vector()") return self._end.vector() class AbstractStraightLineTrajectory: r""" You need to implement: - ``def segment(self, i)`` - ``def segments(self)`` """ def surface(self): raise NotImplementedError def __repr__(self): start = self.segment(0).start() end = self.segment(-1).end() return "Straight line trajectory made of {} segments from {} in polygon {} to {} in polygon {}".format( self.combinatorial_length(), start.point(), start.polygon_label(), end.point(), end.polygon_label()) def plot(self, *args, **options): r""" Plot this trajectory by converting to a graphical trajectory. If any arguments are provided in `*args` it must be only one argument containing a GraphicalSurface. The keyword arguments in `**options` are passed on to :func:`GraphicalStraightLineTrajectory.plot`. EXAMPLES:: sage: from flatsurf import * sage: T = translation_surfaces.square_torus() sage: v = T.tangent_vector(0, (0,0), (5,7)) sage: L = v.straight_line_trajectory() sage: L.plot() # not tested (problem with matplotlib font caches on Travis) Graphics object consisting of 1 graphics primitive sage: L.plot(color='red') # not tested (problem with matplotlib font caches on Travis) Graphics object consisting of 1 graphics primitive """ if len(args) > 1: raise ValueError("SimilaritySurface.plot() can take at most one non-keyword argument.") if len(args)==1: from flatsurf.graphical.surface import GraphicalSurface if not isinstance(args[0], GraphicalSurface): raise ValueError("If an argument is provided, it must be a GraphicalSurface.") return self.graphical_trajectory(graphical_surface = args[0]).plot(**options) return self.graphical_trajectory().plot(**options) def graphical_trajectory(self, graphical_surface=None, **options): r""" Returns a ``GraphicalStraightLineTrajectory`` corresponding to this trajectory in the provided ``GraphicalSurface``. """ from flatsurf.graphical.straight_line_trajectory import GraphicalStraightLineTrajectory if graphical_surface is None: graphical_surface = self.surface().graphical_surface() return GraphicalStraightLineTrajectory(self, graphical_surface, **options) def cylinder(self): r""" If this is a closed orbit, return the associated maximal cylinder. Raises a ValueError if this trajectory is not closed. EXAMPLES:: sage: from flatsurf import * sage: s = translation_surfaces.regular_octagon() sage: v = s.tangent_vector(0,(1/2,0),(sqrt(2),1)) sage: traj = v.straight_line_trajectory() sage: traj.flow(4) sage: traj.is_closed() True sage: cyl = traj.cylinder() sage: cyl.area() # a = sqrt(2) a + 1 sage: cyl.holonomy() (3*a + 4, 2*a + 3) sage: cyl.edges() (2, 3, 3, 2, 4) """ # Note may not be defined. if not self.is_closed(): raise ValueError("Cylinder is only defined for closed straight-line trajectories.") from .surface_objects import Cylinder coding = self.coding() label = coding[0][0] edges = [ e for l,e in coding[1:] ] edges.append(self.surface().opposite_edge(coding[0][0],coding[0][1])[1]) return Cylinder(self.surface(), label, edges) def coding(self, alphabet=None): r""" Return the coding of this trajectory with respect to the sides of the polygons INPUT: - ``alphabet`` -- an optional dictionary ``(lab,nb) -> letter``. If some labels are avoided then these crossings are ignored. EXAMPLES:: sage: from flatsurf import * sage: t = translation_surfaces.square_torus() sage: v = t.tangent_vector(0, (1/2,0), (5,6)) sage: l = v.straight_line_trajectory() sage: alphabet = {(0,0): 'a', (0,1): 'b', (0,2):'a', (0,3): 'b'} sage: l.coding() [(0, 0), (0, 1)] sage: l.coding(alphabet) ['a', 'b'] sage: l.flow(10); l.flow(-10) sage: l.coding() [(0, 2), (0, 1), (0, 2), (0, 1), (0, 2), (0, 1), (0, 2), (0, 1), (0, 2)] sage: print(''.join(l.coding(alphabet))) ababababa sage: v = t.tangent_vector(0, (1/2,0), (7,13)) sage: l = v.straight_line_trajectory() sage: l.flow(10); l.flow(-10) sage: print(''.join(l.coding(alphabet))) aabaabaababaabaabaab For a closed trajectory, the last label (corresponding also to the starting point) is not considered:: sage: v = t.tangent_vector(0, (1/5,1/7), (1,1)) sage: l = v.straight_line_trajectory() sage: l.flow(10) sage: l.is_closed() True sage: l.coding(alphabet) ['a', 'b'] Check that the saddle connections that are obtained in the torus get the expected coding:: sage: for _ in range(10): ....: x = ZZ.random_element(1,30) ....: y = ZZ.random_element(1,30) ....: x,y = x/gcd(x,y), y/gcd(x,y) ....: v = t.tangent_vector(0, (0,0), (x,y)) ....: l = v.straight_line_trajectory() ....: l.flow(200); l.flow(-200) ....: w = ''.join(l.coding(alphabet)) ....: assert Word(w+'ab'+w).is_balanced() ....: assert Word(w+'ba'+w).is_balanced() ....: assert w.count('a') == y-1 ....: assert w.count('b') == x-1 """ ans = [] segments = self.segments() s = segments[0] start = s.start() if start._position._position_type == start._position.EDGE_INTERIOR: p = s.polygon_label() e = start._position.get_edge() lab = (p,e) if alphabet is None else alphabet.get((p,e)) if lab is not None: ans.append(lab) for i in range(len(segments)-1): s = segments[i] end = s.end() p = s.polygon_label() e = end._position.get_edge() lab = (p,e) if alphabet is None else alphabet.get((p,e)) if lab is not None: ans.append(lab) s = segments[-1] end = s.end() if end._position._position_type == end._position.EDGE_INTERIOR and \ end.invert() != start: p = s.polygon_label() e = end._position.get_edge() lab = (p,e) if alphabet is None else alphabet.get((p,e)) if lab is not None: ans.append(lab) return ans def initial_tangent_vector(self): return self.segment(0).start() def terminal_tangent_vector(self): return self.segment(-1).end() def intersects(self, traj, count_singularities = False): r""" Return true if this trajectory intersects the other trajectory. """ try: next(self.intersections(traj, count_singularities = count_singularities)) except StopIteration: return False return True def intersections(self, traj, count_singularities = False, include_segments = False): r""" Return the set of SurfacePoints representing the intersections of this trajectory with the provided trajectory or SaddleConnection. Singularities will be included only if count_singularities is set to True. If include_segments is True, it iterates over triples consisting of the SurfacePoint, and two sets. The first set consists of segments of this trajectory that contain the point and the second set consists of segments of traj that contain the point. EXAMPLES:: sage: from flatsurf import * sage: s=translation_surfaces.square_torus() sage: traj1 = s.tangent_vector(0,(1/2,0),(1,1)).straight_line_trajectory() sage: traj1.flow(3) sage: traj1.is_closed() True sage: traj2 = s.tangent_vector(0,(1/2,0),(-1,1)).straight_line_trajectory() sage: traj2.flow(3) sage: traj2.is_closed() True sage: sum(1 for _ in traj1.intersections(traj2)) 2 """ # Partition the segments making up the trajectories by label. if isinstance(traj,SaddleConnection): traj = traj.trajectory() lab_to_seg1 = {} for seg1 in self.segments(): label = seg1.polygon_label() if label in lab_to_seg1: lab_to_seg1[label].append(seg1) else: lab_to_seg1[label] = [seg1] lab_to_seg2 = {} for seg2 in traj.segments(): label = seg2.polygon_label() if label in lab_to_seg2: lab_to_seg2[label].append(seg2) else: lab_to_seg2[label] = [seg2] intersection_points = set() if include_segments: segments={} for label,seg_list_1 in iteritems(lab_to_seg1): if label in lab_to_seg2: seg_list_2 = lab_to_seg2[label] for seg1 in seg_list_1: for seg2 in seg_list_2: x = line_intersection(seg1.start().point(), seg1.start().point()+seg1.start().vector(), seg2.start().point(), seg2.start().point()+seg2.start().vector()) if x is not None: pos = self._s.polygon(seg1.polygon_label()).get_point_position(x) if pos.is_inside() and (count_singularities or not pos.is_vertex()): new_point = self._s.surface_point(seg1.polygon_label(),x) if new_point not in intersection_points: intersection_points.add(new_point) if include_segments: segments[new_point]=({seg1},{seg2}) else: yield new_point elif include_segments: segments[new_point][0].append(seg1) segments[new_point][1].append(seg2) if include_segments: for x in iteritems(segments): yield x class StraightLineTrajectory(AbstractStraightLineTrajectory): r""" Straight-line trajectory in a similarity surface. EXAMPLES:: # Demonstrate the handling of edges sage: from flatsurf import * sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectory sage: p = SymmetricGroup(2)('(1,2)') sage: s = translation_surfaces.origami(p,p) sage: traj = StraightLineTrajectory(s.tangent_vector(1,(0,0),(1,0))) sage: traj Straight line trajectory made of 1 segments from (0, 0) in polygon 1 to (1, 1) in polygon 2 sage: traj.is_saddle_connection() True sage: traj2 = StraightLineTrajectory(s.tangent_vector(1,(0,0),(0,1))) sage: traj2 Straight line trajectory made of 1 segments from (1, 0) in polygon 2 to (0, 1) in polygon 1 sage: traj2.is_saddle_connection() True """ def __init__(self, tangent_vector): self._segments = deque() seg = SegmentInPolygon(tangent_vector) self._segments.append(seg) self._setup_forward() self._setup_backward() self._s=tangent_vector.surface() def surface(self): return self._s def segment(self, i): r""" EXAMPLES:: sage: from flatsurf import * sage: O = translation_surfaces.regular_octagon() sage: v = O.tangent_vector(0, (1,1), (33,45)) sage: L = v.straight_line_trajectory() sage: L.segment(0) Segment in polygon 0 starting at (4/15, 0) and ending at (11/26*a + 1, 15/26*a + 1) sage: L.flow(-1) sage: L.segment(0) Segment in polygon 0 starting at (-1/2*a, 7/22*a + 7/11) and ending at (4/15, a + 1) sage: L.flow(1) sage: L.segment(2) Segment in polygon 0 starting at (-1/13*a, 1/13*a) and ending at (9/26*a + 11/13, 17/26*a + 15/13) """ return self.segments()[i] def combinatorial_length(self): return len(self.segments()) def segments(self): return self._segments def _setup_forward(self): v = self.terminal_tangent_vector() if v.is_based_at_singularity(): self._forward = None else: self._forward = v.invert() def _setup_backward(self): v = self.initial_tangent_vector() if v.is_based_at_singularity(): self._backward = None else: self._backward = v.invert() def is_forward_separatrix(self): return self._forward is None def is_backward_separatrix(self): return self._backward is None def is_saddle_connection(self): return (self._forward is None) and (self._backward is None) def is_closed(self): r""" Test whether this is a closed trajectory. By convention, by a closed trajectory we mean a trajectory without any singularities. .. SEEALSO:: :meth:`is_saddle_connection` EXAMPLES: An example in a cone surface covered by the torus:: sage: from flatsurf import * sage: p = polygons.square() sage: s = Surface_list(base_ring=p.base_ring()) sage: s.add_polygon(p,[(0,3),(0,2),(0,1),(0,0)]) 0 sage: s.set_immutable() sage: t = RationalConeSurface(s) sage: v = t.tangent_vector(0, (1/2,0), (1/3,7/5)) sage: l = v.straight_line_trajectory() sage: l.is_closed() False sage: l.flow(100) sage: l.is_closed() True sage: v = t.tangent_vector(0, (1/2,0), (1/3,2/5)) sage: l = v.straight_line_trajectory() sage: l.flow(100) sage: l.is_closed() False sage: l.is_saddle_connection() False sage: l.flow(-100) sage: l.is_saddle_connection() True """ return (not self.is_forward_separatrix()) and \ self._forward.differs_by_scaling(self.initial_tangent_vector()) def flow(self, steps): r""" Append or preprend segments to the trajectory. If steps is positive, attempt to append this many segments. If steps is negative, attempt to prepend this many segments. Will fail gracefully the trajectory hits a singularity or closes up. EXAMPLES:: sage: from flatsurf import * sage: s = similarity_surfaces.example() sage: v = s.tangent_vector(0, (1,-1/2), (3,-1)) sage: traj = v.straight_line_trajectory() sage: traj Straight line trajectory made of 1 segments from (1/4, -1/4) in polygon 0 to (2, -5/6) in polygon 0 sage: traj.flow(1) sage: traj Straight line trajectory made of 2 segments from (1/4, -1/4) in polygon 0 to (61/36, 11/12) in polygon 1 sage: traj.flow(-1) sage: traj Straight line trajectory made of 3 segments from (15/16, 45/16) in polygon 1 to (61/36, 11/12) in polygon 1 """ while steps>0 and \ (not self.is_forward_separatrix()) and \ (not self.is_closed()): self._segments.append(SegmentInPolygon(self._forward)) self._setup_forward() steps -= 1 while steps<0 and \ (not self.is_backward_separatrix()) and \ (not self.is_closed()): self._segments.appendleft(SegmentInPolygon(self._backward).invert()) self._setup_backward() steps += 1 class StraightLineTrajectoryTranslation(AbstractStraightLineTrajectory): r""" Straight line trajectory in a translation surface. This is similar to :class:`StraightLineTrajectory` but implemented using interval exchange maps. It should be faster than the implementation via segments and flowing in polygons. This class only stores a list of triples ``(p, e, x)`` where: - ``p`` is a label of a polygon - ``e`` is the number of some edge in ``p`` - ``x`` is the position of the point in ``e`` (be careful that it is not necessarily a number between 0 and 1. It is given relatively to the length of the induced interval in the iet) (see the methods :meth:`_prev` and :meth:`_next`) """ def __init__(self, tangent_vector): t = tangent_vector.polygon_label() self._vector = tangent_vector.vector() self._s = tangent_vector.surface() seg = SegmentInPolygon(tangent_vector) if seg.is_edge(): self._points = None self._edge = seg return start = seg.start() pos = start._position if pos._position_type == pos.EDGE_INTERIOR: i = pos.get_edge() elif pos._position_type == pos.VERTEX: i = pos.get_vertex() else: raise RuntimeError("PROBLEM!") p = start.polygon_label() poly = self._s.polygon(p) T = self._get_iet(p) x = get_linearity_coeff(poly.vertex(i+1) - poly.vertex(i), start.point() - poly.vertex(i)) x *= T.length_bot(i) self._points = deque() # we store triples (lab, edge, rel_pos) self._points.append((p, i, x)) def _next(self, p, e, x): r""" Return the image of ``(p, e, x)`` EXAMPLES:: sage: from flatsurf import * sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation sage: S = SymmetricGroup(3) sage: r = S('(1,2)') sage: u = S('(1,3)') sage: o = translation_surfaces.origami(r,u) sage: v = o.tangent_vector(1, (1/3,1/7), (5,13)) sage: L = StraightLineTrajectoryTranslation(v) sage: t0 = (1,0,1/3) sage: t1 = L._next(*t0) sage: t2 = L._next(*t1) sage: t0,t1,t2 ((1, 0, 1/3), (3, 0, 16/3), (1, 0, 31/3)) sage: assert L._previous(*t2) == t1 sage: assert L._previous(*t1) == t0 """ e, x = self._get_iet(p).forward_image(e, x) p, e = self._s.opposite_edge(p, e) return (p, e, x) def _previous(self, p, e, x): r""" Return the preimage of ``(p, e, x)`` """ p, e = self._s.opposite_edge(p, e) e, x = self._get_iet(p).backward_image(e, x) return (p, e, x) def combinatorial_length(self): if self._points is None: return 1 return len(self._points) def _get_iet(self, label): polygon = self._s.polygon(label) try: return self._iets[polygon] except AttributeError: self._iets = {polygon: polygon.flow_map(self._vector)} except KeyError: self._iets[polygon] = polygon.flow_map(self._vector) return self._iets[polygon] def segment(self, i): r""" EXAMPLES:: sage: from flatsurf import * sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation sage: O = translation_surfaces.regular_octagon() sage: v = O.tangent_vector(0, (1,1), (33,45)) sage: L = StraightLineTrajectoryTranslation(v) sage: L.segment(0) Segment in polygon 0 starting at (4/15, 0) and ending at (11/26*a + 1, 15/26*a + 1) sage: L.flow(-1) sage: L.segment(0) Segment in polygon 0 starting at (-1/2*a, 7/22*a + 7/11) and ending at (4/15, a + 1) sage: L.flow(1) sage: L.segment(2) Segment in polygon 0 starting at (-1/13*a, 1/13*a) and ending at (9/26*a + 11/13, 17/26*a + 15/13) """ if self._points is None: return self._edge lab, e0, x0 = self._points[i] iet = self._get_iet(lab) e1, x1 = iet.forward_image(e0, x0) poly = self._s.polygon(lab) l0 = iet.length_bot(e0) l1 = iet.length_top(e1) point0 = poly.vertex(e0) + poly.edge(e0) * x0/l0 point1 = poly.vertex(e1) + poly.edge(e1) * (l1-x1)/l1 v0 = self._s.tangent_vector(lab, point0, self._vector, ring=self._vector.base_ring()) v1 = self._s.tangent_vector(lab, point1, -self._vector, ring=self._vector.base_ring()) return SegmentInPolygon(v0,v1) def segments(self): r""" EXAMPLES:: sage: from flatsurf import * sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation sage: s = translation_surfaces.square_torus() sage: v = s.tangent_vector(0, (0,0), (1,1+AA(5).sqrt()), ring=AA) sage: L = StraightLineTrajectoryTranslation(v) sage: L.flow(2) sage: L.segments() [Segment in polygon 0 starting at (0, 0) and ending at (0.3090169943749474?, 1), Segment in polygon 0 starting at (0.3090169943749474?, 0) and ending at (0.618033988749895?, 1), Segment in polygon 0 starting at (0.618033988749895?, 0) and ending at (0.9270509831248423?, 1)] """ return [self.segment(i) for i in range(self.combinatorial_length())] def is_closed(self): if self._points is None: raise NotImplementedError return self._points[0] == self._next(*self._points[-1]) def is_forward_separatrix(self): if self._points is None: return True p1,e1,x1 = self._next(*self._points[-1]) return x1.is_zero() def is_backward_separatrix(self): return self._points is None or self._points[0][2].is_zero() def is_saddle_connection(self): r""" EXAMPLES:: sage: from flatsurf import * sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation sage: torus = translation_surfaces.square_torus() sage: v = torus.tangent_vector(0, (1/2,1/2), (1,1)) sage: S = StraightLineTrajectoryTranslation(v) sage: S.is_saddle_connection() True sage: v = torus.tangent_vector(0, (1/3,2/3), (1,2)) sage: S = StraightLineTrajectoryTranslation(v) sage: S.is_saddle_connection() False sage: S.flow(1) sage: S.is_saddle_connection() True """ return self._points is None or (self.is_forward_separatrix() and self.is_backward_separatrix()) def flow(self, steps): if self._points is None: return if steps > 0: t = self._points[-1] for i in range(steps): t = self._next(*t) if t == self._points[0] or t[2].is_zero(): break self._points.append(t) elif steps < 0: t = self._points[0] for i in range(-steps): if t[2].is_zero(): break t = self._previous(*t) if t == self._points[-1]: # closed curve or backward separatrix break self._points.appendleft(t)<|fim▁end|>
return type(self) is type(other) and \ self._start == other._start and \
<|file_name|>index.js<|end_file_name|><|fim▁begin|>module.exports = { audioFilter: require('./audioFilter'),<|fim▁hole|>}<|fim▁end|>
destination: require('./destination'), filename: require('./filename'), multer: require('./multer')
<|file_name|>ons-back-button.js<|end_file_name|><|fim▁begin|>import Ember from 'ember'; import layout from '../templates/components/ons-back-button'; export default Ember.Component.extend({<|fim▁hole|>});<|fim▁end|>
layout, tagName: 'ons-back-button', attributeBindings: ['modifier']
<|file_name|>MedianFilter.py<|end_file_name|><|fim▁begin|>import numpy as np from scipy.signal import medfilt import manager.operations.method as method from manager.operations.methodsteps.confirmation import Confirmation from manager.exceptions import VoltPyNotAllowed class MedianFilter(method.ProcessingMethod): can_be_applied = True _steps = [ { 'class': Confirmation, 'title': 'Apply median filter', 'desc': 'Press Forward to apply Median Filter.', }, ] description = """ Median filter is smoothing algorithm similar to the Savitzky-Golay, however instead of fitting of the polynomial, the middle point of the window is moved to the value of median of the points in the window. The median filter is most usefull for removal of spikes from the signal (single point, large amplitude errors). """ @classmethod def __str__(cls): return "Median Filter" def apply(self, user, dataset): if self.model.completed is not True: raise VoltPyNotAllowed('Incomplete procedure.') self.__perform(dataset) def __perform(self, dataset): for cd in dataset.curves_data.all():<|fim▁hole|> def finalize(self, user): self.__perform(self.model.dataset) self.model.step = None self.model.completed = True self.model.save() return True<|fim▁end|>
yvec = cd.yVector newyvec = medfilt(yvec) dataset.updateCurve(self.model, cd, newyvec) dataset.save()
<|file_name|>KeyspaceParams.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.schema; import java.util.Map; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.base.Objects; /** * An immutable class representing keyspace parameters (durability and replication). */ public final class KeyspaceParams { public static final boolean DEFAULT_DURABLE_WRITES = true; /** * This determines durable writes for the {@link org.apache.cassandra.config.SchemaConstants#SCHEMA_KEYSPACE_NAME} * and {@link org.apache.cassandra.config.SchemaConstants#SYSTEM_KEYSPACE_NAME} keyspaces, * the only reason it is not final is for commitlog unit tests. It should only be changed for testing purposes. */ @VisibleForTesting public static boolean DEFAULT_LOCAL_DURABLE_WRITES = true; public enum Option { DURABLE_WRITES, REPLICATION; @Override public String toString() { return name().toLowerCase(); } } public final boolean durableWrites; public final ReplicationParams replication; public KeyspaceParams(boolean durableWrites, ReplicationParams replication) { this.durableWrites = durableWrites; this.replication = replication; } public static KeyspaceParams create(boolean durableWrites, Map<String, String> replication) { return new KeyspaceParams(durableWrites, ReplicationParams.fromMap(replication)); } public static KeyspaceParams local() {<|fim▁hole|> public static KeyspaceParams simple(int replicationFactor) { return new KeyspaceParams(true, ReplicationParams.simple(replicationFactor)); } public static KeyspaceParams simpleTransient(int replicationFactor) { return new KeyspaceParams(false, ReplicationParams.simple(replicationFactor)); } public static KeyspaceParams nts(Object... args) { return new KeyspaceParams(true, ReplicationParams.nts(args)); } public void validate(String name) { replication.validate(name); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof KeyspaceParams)) return false; KeyspaceParams p = (KeyspaceParams) o; return durableWrites == p.durableWrites && replication.equals(p.replication); } @Override public int hashCode() { return Objects.hashCode(durableWrites, replication); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add(Option.DURABLE_WRITES.toString(), durableWrites) .add(Option.REPLICATION.toString(), replication) .toString(); } }<|fim▁end|>
return new KeyspaceParams(DEFAULT_LOCAL_DURABLE_WRITES, ReplicationParams.local()); }
<|file_name|>Sidebar.ts<|end_file_name|><|fim▁begin|>import { FieldSchema, ValueSchema } from '@ephox/boulder'; import { Fun, Optional, Result } from '@ephox/katamari'; export interface SidebarInstanceApi { element: () => HTMLElement; } export interface SidebarSpec { icon?: string; tooltip?: string; onShow?: (api: SidebarInstanceApi) => void; onSetup?: (api: SidebarInstanceApi) => (api: SidebarInstanceApi) => void; onHide?: (api: SidebarInstanceApi) => void; } export interface Sidebar { icon: Optional<string>; tooltip: Optional<string>; onShow: (api: SidebarInstanceApi) => void; onSetup: (api: SidebarInstanceApi) => (api: SidebarInstanceApi) => void; onHide: (api: SidebarInstanceApi) => void; } export const sidebarSchema = ValueSchema.objOf([ FieldSchema.optionString('icon'), FieldSchema.optionString('tooltip'), FieldSchema.defaultedFunction('onShow', Fun.noop), FieldSchema.defaultedFunction('onHide', Fun.noop), FieldSchema.defaultedFunction('onSetup', () => Fun.noop) ]);<|fim▁hole|><|fim▁end|>
export const createSidebar = (spec: SidebarSpec): Result<Sidebar, ValueSchema.SchemaError<any>> => ValueSchema.asRaw('sidebar', sidebarSchema, spec);
<|file_name|>hero-button.component.avoid.ts<|end_file_name|><|fim▁begin|>// #docregion import { Component, EventEmitter } from '@angular/core'; // #docregion example /* avoid */ <|fim▁hole|>@Component({ selector: 'toh-hero-button', template: `<button></button>`, inputs: [ 'label' ], outputs: [ 'change' ] }) export class HeroButtonComponent { change = new EventEmitter<any>(); label: string; } // #enddocregion example<|fim▁end|>
<|file_name|>test_copy_in_out_cpp.cpp<|end_file_name|><|fim▁begin|>/*************************************************************************************/ /* Copyright 2009 Barcelona Supercomputing Center */ /* */ /* This file is part of the NANOS++ library. */ /* */ /* NANOS++ is free software: you can redistribute it and/or modify */ /* it under the terms of the GNU Lesser General Public License as published by */ /* the Free Software Foundation, either version 3 of the License, or */ /* (at your option) any later version. */ /* */ /* NANOS++ is distributed in the hope that it will be useful, */ /* but WITHOUT ANY WARRANTY; without even the implied warranty of */ /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */ /* GNU Lesser General Public License for more details. */ /* */ /* You should have received a copy of the GNU Lesser General Public License */ /* along with NANOS++. If not, see <http://www.gnu.org/licenses/>. */ /*************************************************************************************/ /* <testinfo> test_generator=gens/core-generator </testinfo> */ #include "config.hpp" #include <iostream> #include "smpprocessor.hpp" #include "system.hpp" #include "copydata.hpp" #include <string.h> #include <unistd.h> using namespace std; using namespace nanos; using namespace nanos::ext; typedef struct { int a; std::string b; } hello_world_args; void hello_world ( void *args ); void hello_world ( void *args ) { WD *wd = getMyThreadSafe()->getCurrentWD(); hello_world_args *hargs = ( hello_world_args * ) args;<|fim▁hole|> if ( (void *)cd[0].getAddress() != (void *)&(hargs->a) ) { std::cout << "Error: CopyData address '" << cd[0].getAddress() << "' does not match argument with address '" << &(hargs->a) << "'." << std::endl; abort(); } else { std::cout << "Checking for CopyData address correctness... PASS" << std::endl; } if ( (void *)( (char *)hargs + (unsigned long)cd[1].getAddress() ) != (void *) &(hargs->b) ) { std::cout << "Error: CopyData address '" << cd[1].getAddress() << "' does not match argument with address '" << &(hargs->b) << "'." << std::endl; abort(); } else { std::cout << "Checking for CopyData address correctness... PASS" << std::endl; } if ( cd[0].getSize() != sizeof(hargs->a) ) { std::cout << "Error: CopyData size '" << cd[0].getSize() << "' does not match argument with size '" << sizeof((hargs->b)) << "'." << std::endl; abort(); } else { std::cout << "Checking for CopyData size correctness... PASS" << std::endl; } if ( cd[1].getSize() != sizeof(hargs->b) ) { std::cout << "Error: CopyData size '" << cd[1].getSize() << "' does not match argument with size '" << sizeof((hargs->b)) << "'." << std::endl; abort(); } else { std::cout << "Checking for CopyData size correctness... PASS" << std::endl; } if ( !cd[0].isInput() ) { std::cout << "Error: CopyData was supposed to be input." << std::endl; abort(); } else { std::cout << "Checking for CopyData direction correctness... PASS" << std::endl; } if ( !cd[1].isOutput() ) { std::cout << "Error: CopyData was supposed to be output." << std::endl; abort(); } else { std::cout << "Checking for CopyData direction correctness... PASS" << std::endl; } if ( !cd[0].isShared() ) { std::cout << "Error: CopyData was supposed to be NANOS_SHARED." << std::endl; abort(); } else { std::cout << "Checking for CopyData sharing... PASS" << std::endl; } if ( !cd[1].isPrivate() ) { std::cout << "Error: CopyData was supposed to be NANOS_PRIVATE." << std::endl; abort(); } else { std::cout << "Checking for CopyData sharing... PASS" << std::endl; } } int main ( int argc, char **argv ) { const char *a = "alex"; hello_world_args *data = new hello_world_args(); data->a = 1; data->b = a; nanos_region_dimension_internal_t dims[2]; dims[0] = (nanos_region_dimension_internal_t) {sizeof(data->a), 0, sizeof(data->a)}; dims[1] = (nanos_region_dimension_internal_t) {sizeof(data->b), 0, sizeof(data->b)}; CopyData cd[2] = { CopyData( (uint64_t)&data->a, NANOS_SHARED, true, false, 1, &dims[0], 0 ), CopyData( (uint64_t)&data->b, NANOS_PRIVATE, true, true, 1, &dims[1], 0 ) }; WD * wd = new WD( new SMPDD( hello_world ), sizeof( hello_world_args ), __alignof__( hello_world_args ), data, 2, cd ); WG *wg = getMyThreadSafe()->getCurrentWD(); wg->addWork( *wd ); sys.setupWD(*wd, (nanos::WD *) wg); sys.submit( *wd ); usleep( 500 ); wg->waitCompletion(); }<|fim▁end|>
CopyData* cd = wd->getCopies();
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin from .models import BackgroundImages, Widget class WidgetAdmin(admin.ModelAdmin): list_display = ('name', 'link', 'is_featured') ordering = ('-id',) <|fim▁hole|>class BackgroundAdmin(admin.ModelAdmin): list_display = ('name', 'created_at') ordering = ('-id',) admin.site.register(Widget, WidgetAdmin) admin.site.register(BackgroundImages, BackgroundAdmin)<|fim▁end|>
<|file_name|>fake_apiserver.go<|end_file_name|><|fim▁begin|>// Code generated by client-gen. DO NOT EDIT. package fake import ( "context" configv1 "github.com/openshift/api/config/v1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" labels "k8s.io/apimachinery/pkg/labels" schema "k8s.io/apimachinery/pkg/runtime/schema" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" testing "k8s.io/client-go/testing" ) // FakeAPIServers implements APIServerInterface type FakeAPIServers struct { Fake *FakeConfigV1 } var apiserversResource = schema.GroupVersionResource{Group: "config.openshift.io", Version: "v1", Resource: "apiservers"} var apiserversKind = schema.GroupVersionKind{Group: "config.openshift.io", Version: "v1", Kind: "APIServer"} // Get takes name of the aPIServer, and returns the corresponding aPIServer object, and an error if there is any. func (c *FakeAPIServers) Get(ctx context.Context, name string, options v1.GetOptions) (result *configv1.APIServer, err error) { obj, err := c.Fake. Invokes(testing.NewRootGetAction(apiserversResource, name), &configv1.APIServer{}) if obj == nil { return nil, err } return obj.(*configv1.APIServer), err } // List takes label and field selectors, and returns the list of APIServers that match those selectors. func (c *FakeAPIServers) List(ctx context.Context, opts v1.ListOptions) (result *configv1.APIServerList, err error) { obj, err := c.Fake. Invokes(testing.NewRootListAction(apiserversResource, apiserversKind, opts), &configv1.APIServerList{}) if obj == nil { return nil, err } label, _, _ := testing.ExtractFromListOptions(opts) if label == nil { label = labels.Everything() } list := &configv1.APIServerList{ListMeta: obj.(*configv1.APIServerList).ListMeta} for _, item := range obj.(*configv1.APIServerList).Items { if label.Matches(labels.Set(item.Labels)) { list.Items = append(list.Items, item) } } return list, err } // Watch returns a watch.Interface that watches the requested aPIServers. func (c *FakeAPIServers) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. InvokesWatch(testing.NewRootWatchAction(apiserversResource, opts)) } // Create takes the representation of a aPIServer and creates it. Returns the server's representation of the aPIServer, and an error, if there is any. func (c *FakeAPIServers) Create(ctx context.Context, aPIServer *configv1.APIServer, opts v1.CreateOptions) (result *configv1.APIServer, err error) { obj, err := c.Fake. Invokes(testing.NewRootCreateAction(apiserversResource, aPIServer), &configv1.APIServer{}) if obj == nil { return nil, err } return obj.(*configv1.APIServer), err } // Update takes the representation of a aPIServer and updates it. Returns the server's representation of the aPIServer, and an error, if there is any. func (c *FakeAPIServers) Update(ctx context.Context, aPIServer *configv1.APIServer, opts v1.UpdateOptions) (result *configv1.APIServer, err error) { obj, err := c.Fake. Invokes(testing.NewRootUpdateAction(apiserversResource, aPIServer), &configv1.APIServer{}) if obj == nil { return nil, err } return obj.(*configv1.APIServer), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). func (c *FakeAPIServers) UpdateStatus(ctx context.Context, aPIServer *configv1.APIServer, opts v1.UpdateOptions) (*configv1.APIServer, error) {<|fim▁hole|> obj, err := c.Fake. Invokes(testing.NewRootUpdateSubresourceAction(apiserversResource, "status", aPIServer), &configv1.APIServer{}) if obj == nil { return nil, err } return obj.(*configv1.APIServer), err } // Delete takes name of the aPIServer and deletes it. Returns an error if one occurs. func (c *FakeAPIServers) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { _, err := c.Fake. Invokes(testing.NewRootDeleteAction(apiserversResource, name), &configv1.APIServer{}) return err } // DeleteCollection deletes a collection of objects. func (c *FakeAPIServers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { action := testing.NewRootDeleteCollectionAction(apiserversResource, listOpts) _, err := c.Fake.Invokes(action, &configv1.APIServerList{}) return err } // Patch applies the patch and returns the patched aPIServer. func (c *FakeAPIServers) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *configv1.APIServer, err error) { obj, err := c.Fake. Invokes(testing.NewRootPatchSubresourceAction(apiserversResource, name, pt, data, subresources...), &configv1.APIServer{}) if obj == nil { return nil, err } return obj.(*configv1.APIServer), err }<|fim▁end|>
<|file_name|>frontend.go<|end_file_name|><|fim▁begin|>package main import ( "net/http" "os" "os/signal" "syscall" "go.uber.org/zap" "github.com/rakyll/statik/fs" "gopkg.in/urfave/cli.v1" "context" "time" "fmt" _ "github.com/jirwin/ipfs-archive/frontend/statik" "github.com/jirwin/ipfs-archive/version" ) func main() { app := cli.NewApp() app.Name = "ipfs-archive-frontend" app.Usage = "ipfs-archive frontend service" app.Version = version.Version app.Flags = []cli.Flag{ cli.StringFlag{ Name: "address,a", Usage: "The address to listen on", }, } app.Action = run app.Run(os.Args) } func run(cliCtx *cli.Context) error { ctx := context.Background() logger, err := zap.NewProduction() if err != nil { panic(err) } if !cliCtx.IsSet("address") { cli.ShowAppHelpAndExit(cliCtx, -1) } statikFS, err := fs.New() if err != nil { logger.Error(err.Error()) return cli.NewExitError(err.Error(), -1) } stop := make(chan os.Signal, 2) signal.Notify(stop, os.Interrupt, syscall.SIGTERM) mux := http.NewServeMux() mux.Handle("/", http.FileServer(statikFS)) addr := cliCtx.String("address") server := &http.Server{ Addr: addr, Handler: mux, }<|fim▁hole|> if err := server.ListenAndServe(); err != nil { logger.Fatal(err.Error(), zap.Error(err)) } }() <-stop logger.Info("Shutting down server.") shutdownCtx, _ := context.WithTimeout(ctx, time.Second*10) server.Shutdown(shutdownCtx) logger.Info("Server shutdown.") return nil }<|fim▁end|>
logger.Info(fmt.Sprintf("Listening on %s", addr)) go func() {
<|file_name|>eval_kanungo_est.py<|end_file_name|><|fim▁begin|>import env import numpy as np import metaomr<|fim▁hole|>import metaomr.kanungo as kan from metaomr.page import Page import glob import pandas as pd import itertools import os.path import sys from datetime import datetime from random import random, randint IDEAL = [path for path in sorted(glob.glob('testset/modern/*.png')) if 'nostaff' not in path] def random_params(): if random() < 0.25: nu = 0 else: nu = random() * 0.05 if random() < 0.25: a0 = a = 0 else: a0 = random() * 0.2 a = 0.5 + random() * 2 if random() < 0.25: b0 = b = 0 else: b0 = random() * 0.2 b = 0.5 + random() * 2 k = randint(0, 4) return nu, a0, a, b0, b, k columns = pd.MultiIndex.from_product([['real', 'estimate'], 'nu a0 a b0 b k'.split()]) columns = columns.append(pd.MultiIndex.from_product([['estimate'],['stat','time','status','nfev']])) cols = [] results = [] fun = 'ks' method = 'Nelder-Mead' for image in IDEAL: name = os.path.basename(image).split('.')[0] page, = metaomr.open(image) kimg = kan.KanungoImage(kan.normalized_page(page)[0]) for i in xrange(3): params = random_params() synth = Page(kimg.degrade(params)) synth.staff_dist = 8 for maxfev in [25, 50]: start = datetime.now() est_params = kan.est_parameters(synth, test_fn=kan.test_hists_ks if fun == 'ks' else kan.test_hists_chisq, opt_method=method, maxfev=maxfev) end = datetime.now() cols.append((name, fun, maxfev, i)) results.append(list(params) + list(est_params.x) + [est_params.fun, (end - start).total_seconds(), est_params.status, est_params.nfev]) sys.stderr.write('.') res = pd.DataFrame(results, columns=columns) res.index = pd.MultiIndex.from_tuples(cols) res.index.names = 'doc test maxfev num'.split() res.to_csv('kanungo_eval.csv') sys.stderr.write('\n')<|fim▁end|>
<|file_name|>GuiHandler.java<|end_file_name|><|fim▁begin|>package com.timotheteus.raincontrol.handlers; import com.timotheteus.raincontrol.tileentities.IGUITile; import com.timotheteus.raincontrol.tileentities.TileEntityInventoryBase; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.GuiScreen; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.IInventory; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraftforge.fml.common.network.IGuiHandler; import net.minecraftforge.fml.network.FMLPlayMessages; import net.minecraftforge.fml.network.NetworkHooks; import javax.annotation.Nullable; public class GuiHandler implements IGuiHandler { public static GuiScreen openGui(FMLPlayMessages.OpenContainer openContainer) { BlockPos pos = openContainer.getAdditionalData().readBlockPos(); // new GUIChest(type, (IInventory) Minecraft.getInstance().player.inventory, (IInventory) Minecraft.getInstance().world.getTileEntity(pos)); TileEntityInventoryBase te = (TileEntityInventoryBase) Minecraft.getInstance().world.getTileEntity(pos); if (te != null) { return te.createGui(Minecraft.getInstance().player); } return null; } //TODO can remove these, I think @Nullable @Override public Object getServerGuiElement(int ID, EntityPlayer player, World world, int x, int y, int z) { BlockPos pos = new BlockPos(x, y, z); TileEntity te = world.getTileEntity(pos); if (te instanceof IGUITile) { return ((IGUITile) te).createContainer(player); } return null; } @Nullable @Override public Object getClientGuiElement(int ID, EntityPlayer player, World world, int x, int y, int z) { BlockPos pos = new BlockPos(x, y, z); TileEntity te = world.getTileEntity(pos); if (te instanceof IGUITile) { return ((IGUITile) te).createGui(player); } return null; } <|fim▁hole|><|fim▁end|>
}
<|file_name|>grooveoff_it.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.0" language="it_IT"> <context> <name>AboutDialog</name> <message> <location filename="../src/grooveoff/AboutDialog.ui" line="14"/> <source>About GrooveOff</source> <translation>Informazioni su GrooveOff</translation> </message> <message> <location filename="../src/grooveoff/AboutDialog.ui" line="192"/> <source>Close</source> <translation>Chiudi</translation> </message> <message> <location filename="../src/grooveoff/AboutDialog.cpp" line="55"/> <source>GrooveOff</source> <translation>GrooveOff</translation> </message> <message> <location filename="../src/grooveoff/AboutDialog.cpp" line="57"/> <source>Version </source> <translation>Versione </translation> </message> <message> <location filename="../src/grooveoff/AboutDialog.cpp" line="61"/> <source>Offline Grooveshark.com music.</source> <translation>Salva la musica di Grooveshark.com.</translation> </message> <message> <location filename="../src/grooveoff/AboutDialog.cpp" line="62"/> <source>GrooveOff can access the huge Grooveshark database through its &lt;a href=&quot;http://developers.grooveshark.com/docs/public_api/v3/&quot;&gt;public api&lt;/a&gt;.</source> <translation>GrooveOff accede all&apos;enorme database di Grooveshark attaverso la sua &lt;a href=&quot;http://developers.grooveshark.com/docs/public_api/v3/&quot;&gt;api pubblica&lt;/a&gt;.</translation> </message> <message> <location filename="../src/grooveoff/AboutDialog.cpp" line="63"/> <source>&lt;b&gt;Author&lt;/b&gt;: </source> <translation>&lt;b&gt;Autore&lt;/b&gt;: </translation> </message> <message> <location filename="../src/grooveoff/AboutDialog.cpp" line="64"/> <source>&lt;b&gt;License&lt;/b&gt;: </source> <translation>&lt;b&gt;Licenza&lt;/b&gt;: </translation> </message> <message utf8="true"> <location filename="../src/grooveoff/AboutDialog.cpp" line="66"/> <source>GrooveOff is not affiliated with Grooveshark™ and has not been reviewed or officially approved by Grooveshark.com. The author is not responsible for any violations this application does to Grooveshark&apos;s TOS. The author is not related to Grooveshark™ in any way! Support the Artists You Like by Buying Their Music.</source> <translation>GrooveOff non è affiliato a Grooveshark™ e non è stato sottoposto a verifica o approvazione ufficiale da parte di Grooveshark.com. L&apos;autore non è responsabile per eventuali violazioni ai termini d&apos;uso di Grooveshark. L&apos;autore non è coinvolto in alcun modo con Grooveshark™! Supporta i tuoi artisti preferiti acquistando la loro musica.</translation> </message> <message> <location filename="../src/grooveoff/AboutDialog.cpp" line="104"/> <source>Donation to Grooveoff&apos;s author</source> <translation>Donazione all&apos;autore di Grooveoff</translation> </message> <message> <location filename="../src/grooveoff/AboutDialog.cpp" line="65"/> <source>Disclaimer:</source> <translation>Dichiarazione di non responsabilità:</translation> </message> </context> <context> <name>ActionCollection</name> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="56"/> <source>&amp;Exit</source> <translation>&amp;Esci</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="172"/> <source>Close GrooveOff</source> <translation>Chiudi GrooveOff</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="63"/> <source>&amp;Donate</source> <translation>&amp;Dona</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="173"/> <source>Donate with PayPal</source> <translation>Dona con PayPal</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="86"/> <source>&amp;Compact Layout</source> <translation>Disposizione &amp;compatta</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="94"/> <source>&amp;Wide Layout</source> <translation>Disposizione &amp;estesa</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="102"/> <source>&amp;Mini Player</source> <translation></translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="117"/> <source>&amp;Stop all downloads</source> <translation>Ferma &amp;tutti gli scaricamenti</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="124"/> <source>&amp;Remove canceled/failed downloads</source> <translation>Rimuovi gli scaricamenti &amp;cancellati/falliti</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="176"/> <source>Removes all finished downloads preserving files on disk</source> <translation>Rimuove tutti gli scaricamenti terminati preservando i file su disco</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="75"/> <source>Configure &amp;GrooveOff...</source> <translation>Configura &amp;GrooveOff...</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="110"/> <source>Get &amp;new token...</source> <translation>Ottieni &amp;nuovo token...</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="131"/> <source>Clear &amp;finished</source> <translation>Elimina &amp;terminati</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="138"/> <source>&amp;About GrooveOff</source> <translation>&amp;Informazioni su GrooveOff</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="146"/> <source>About &amp;Qt</source> <translation>Informazioni su &amp;Qt</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="190"/> <source>&amp;Playlist</source> <translation>&amp;Scaletta</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="194"/> <source>&amp;File</source> <translation>&amp;File</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="212"/> <source>&amp;Downloads</source> <translation>&amp;Scaricamenti</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="223"/> <source>&amp;View</source> <translation>&amp;Visualizza</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="229"/> <source>&amp;Settings</source> <translation>&amp;Impostazioni</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="233"/> <source>&amp;Help</source> <translation>&amp;Aiuto</translation> </message> <message> <location filename="../src/grooveoff/ActionCollection.cpp" line="252"/> <source>Main Menu</source> <translation>Menu principale</translation> </message> </context> <context> <name>ConfigDialog</name> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="14"/> <source>Configure - GrooveOff</source> <translation>Configura - GrooveOff</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="50"/> <location filename="../src/grooveoff/ConfigDialog.cpp" line="146"/> <source>General</source> <translation>Generale</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="84"/> <source>Session</source> <translation>Sessione</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="90"/> <source>Save and restore session</source> <translation>Salva e ripristina la sessione</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="100"/> <source>Save also failed/aborted</source> <translation>Salva anche falliti/annullati</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="116"/> <source>History</source> <translation>Cronologia</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="122"/> <source>Save searches history</source> <translation>Salva cronologia ricerche</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="140"/> <source>History size:</source> <translation>Dimensione cronologia:</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="175"/> <source>Destination</source> <translation>Cartella di salvataggio</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="181"/> <source>Save last destination path</source> <translation>Ricorda l&apos;ultima cartella di salvataggio</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="191"/> <source>Naming pattern</source> <translation>Schema nome</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="234"/> <location filename="../src/grooveoff/ConfigDialog.cpp" line="147"/> <source>Performance</source> <translation>Prestazioni</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="268"/> <source>Covers</source> <translation>Copertine</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="274"/> <source>Load covers</source> <translation>Scarica copertine</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="284"/> <source>Empty covers cache on exit</source> <translation>Svuota la cache delle copertine all&apos;uscita</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="294"/> <source>Results</source> <translation>Risultati</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="308"/> <source>Limit search results:</source> <translation>Limita i risultati di ricerca:</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="343"/> <source>(0 means unlimited results)</source> <translation>(0 significa risultati illimitati)</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="356"/> <source>Downloads</source> <translation>Scaricamenti</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="364"/> <source>Max simultaneous downloads:</source> <translation>Numero massimo di scaricamenti simultanei:</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="426"/> <source>Default values</source> <translation>Valori predefiniti</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="446"/> <source>OK</source> <translation></translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="453"/> <source>Apply</source> <translation>Applica</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.ui" line="460"/> <source>Cancel</source> <translation>Annulla</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.cpp" line="71"/> <source>%title</source> <translation>%titolo</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.cpp" line="73"/> <source>%artist</source> <translation>%artista</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.cpp" line="75"/> <source>%album</source> <translation>%album</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.cpp" line="77"/> <source>%track</source> <translation>%traccia</translation> </message> <message> <location filename="../src/grooveoff/ConfigDialog.cpp" line="79"/> <source>%time</source> <translation>%durata</translation> </message> </context> <context> <name>DownloadItem</name> <message> <location filename="../src/grooveoff/DownloadItem.cpp" line="136"/> <source>Play</source> <translation>Riproduci</translation> </message> <message> <location filename="../src/grooveoff/DownloadItem.cpp" line="151"/> <source>Open folder</source> <translation>Apri cartella</translation> </message> <message> <location filename="../src/grooveoff/DownloadItem.cpp" line="253"/> <source>Queued</source> <translation>In coda</translation> </message> <message> <location filename="../src/grooveoff/DownloadItem.cpp" line="139"/> <source>Remove track from queue</source> <translation>Rimuovi traccia dalla coda</translation> </message> <message> <location filename="../src/grooveoff/DownloadItem.cpp" line="142"/> <source>Stop track download</source> <translation>Ferma scaricamento traccia</translation> </message> <message> <location filename="../src/grooveoff/DownloadItem.cpp" line="145"/> <source>Delete track from disk</source> <translation>Elimina la traccia dal disco</translation> </message> <message> <location filename="../src/grooveoff/DownloadItem.cpp" line="148"/> <source>Redownload track</source> <translation>Scarica nuovamente la traccia</translation> </message> <message> <location filename="../src/grooveoff/DownloadItem.cpp" line="332"/> <source>Aborted</source> <translation>Annullato</translation> </message> <message> <location filename="../src/grooveoff/DownloadItem.cpp" line="353"/> <source>Network or Server error</source> <translation>Errore di rete o del server</translation> </message> <message> <location filename="../src/grooveoff/DownloadItem.cpp" line="477"/> <source>Abort deletion</source> <translation>Annulla eliminazione</translation> </message> </context> <context> <name>MainWindow</name> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="236"/> <source>GrooveOff</source> <translation>GrooveOff</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="241"/> <location filename="../src/grooveoff/MainWindow.cpp" line="578"/> <source>Connecting...</source> <translation>Connessione...</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="264"/> <source>Current save folder</source> <translation>Cartella di salvataggio corrente</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="246"/> <source>Search:</source> <translation>Cerca:</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="239"/> <source>Offline Grooveshark.com music</source> <translation>Salva la musica di Grooveshark.com</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="250"/> <location filename="../src/grooveoff/MainWindow.cpp" line="251"/> <source>Search for songs, artists, genres, playlists</source> <translation>Cerca canzoni, artisti, generi, playlist</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="255"/> <source>Start search</source> <translation>Inizia ricerca</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="258"/> <source>Main menu</source> <translation>Menu principale</translation><|fim▁hole|> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="261"/> <source>Save in:</source> <translation>Salva in:</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="274"/> <source>Select save folder</source> <translation>Seleziona cartella di salvataggio</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="282"/> <source>Download all tracks</source> <translation>Scarica tutte le tracce</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="487"/> <source>Select Directory</source> <translation>Seleziona cartella</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="547"/> <source>All Artists</source> <translation>Tutti gli artisti</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="549"/> <location filename="../src/grooveoff/MainWindow.cpp" line="1020"/> <source>All Albums</source> <translation>Tutti gli album</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="552"/> <source>Querying...</source> <translation>In attesa...</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="606"/> <source>Connected to Grooveshark</source> <translation>Connesso a Grooveshark</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="607"/> <source>You&apos;re connected to Grooveshark!</source> <translation>Sei connesso a Grooveshark!</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="614"/> <location filename="../src/grooveoff/MainWindow.cpp" line="622"/> <source>Connection error!!</source> <translation>Errore di connessione!!</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="737"/> <source>This song is already in queue.</source> <translation>Questa canzone è già in coda.</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="788"/> <source>The destination folder is not writable. Select a valid path</source> <translation>La cartella di destinazione non è scrivibile. Seleziona un percorso valido</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="981"/> <source>Donation to Grooveoff&apos;s author</source> <translation>Donazione all&apos;autore di Grooveoff</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="736"/> <location filename="../src/grooveoff/MainWindow.cpp" line="755"/> <location filename="../src/grooveoff/MainWindow.cpp" line="787"/> <location filename="../src/grooveoff/MainWindow.cpp" line="815"/> <source>Attention</source> <translation>Attenzione</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="756"/> <source>The destination folder does not exists. Select a valid path</source> <translation>La cartella di destinazione non esiste. Selezionare un percorso valido</translation> </message> <message numerus="yes"> <location filename="../src/grooveoff/MainWindow.cpp" line="633"/> <location filename="../src/grooveoff/MainWindow.cpp" line="650"/> <location filename="../src/grooveoff/MainWindow.cpp" line="1013"/> <source>%n song(s) found</source> <translation> <numerusform>%n brano trovato</numerusform> <numerusform>%n brani trovati</numerusform> </translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="773"/> <source>Overwrite File?</source> <translation>Sovrascrivere il file?</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="774"/> <source>A file named &quot;%1&quot; already exists. Are you sure you want to overwrite it?</source> <translation>Un file chiamato «%1» esiste già. Sei sicuro di volerlo sovrascrivere?</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="816"/> <source>Can&apos;t create destination path:</source> <translation>Non posso creare la cartella di destinazione:</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="816"/> <source>Aborting...</source> <translation>Annullo...</translation> </message> <message> <location filename="../src/grooveoff/MainWindow.cpp" line="965"/> <source>Offline</source> <translation>Non in linea</translation> </message> </context> <context> <name>MatchItem</name> <message> <location filename="../src/grooveoff/MatchItem.cpp" line="117"/> <source>Download again</source> <translation>Scarica di nuovo</translation> </message> <message> <location filename="../src/grooveoff/MatchItem.cpp" line="120"/> <source>Download</source> <translation>Scarica</translation> </message> </context> <context> <name>PlayerWidget</name> <message> <location filename="../src/grooveoff/PlayerWidget.cpp" line="101"/> <source>Bit Rate</source> <translation></translation> </message> <message> <location filename="../src/grooveoff/PlayerWidget.cpp" line="102"/> <source>Sample Rate</source> <translation></translation> </message> <message> <location filename="../src/grooveoff/PlayerWidget.cpp" line="207"/> <source>Pause</source> <translation>Pausa</translation> </message> <message> <location filename="../src/grooveoff/PlayerWidget.cpp" line="349"/> <source>Play</source> <translation>Riproduci</translation> </message> </context> </TS><|fim▁end|>
</message>
<|file_name|>deriving-global.rs<|end_file_name|><|fim▁begin|>// xfail-fast #7103 `extern mod` does not work on check-fast // xfail-pretty - does not converge // Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. extern mod extra; // {En,De}codable mod submod { // if any of these are implemented without global calls for any // function calls, then being in a submodule will (correctly) // cause errors about unrecognised module `std` (or `extra`) #[deriving(Eq, Ord, TotalEq, TotalOrd, IterBytes, Clone, DeepClone, ToStr, Rand, Encodable, Decodable)] enum A { A1(uint), A2(int) } #[deriving(Eq, Ord, TotalEq, TotalOrd, IterBytes, Clone, DeepClone, ToStr, Rand, Encodable, Decodable)] struct B { x: uint, y: int } <|fim▁hole|> IterBytes, Clone, DeepClone, ToStr, Rand, Encodable, Decodable)] struct C(uint, int); } fn main() {}<|fim▁end|>
#[deriving(Eq, Ord, TotalEq, TotalOrd,
<|file_name|>naive_bayes.py<|end_file_name|><|fim▁begin|>import numpy import numpy.linalg def training(inputs, minvar=0.1): """Trains a naive-bayes classifier using inputs Returns means and variances of the classifiers """ return numpy.mean(inputs, axis=0), numpy.maximum(minvar, numpy.var(inputs, axis=0)) def gaussian(input, mu, sigma2): """Calculates gaussian value for each input in the array """ return (1/ (2*numpy.sqrt(3.14*sigma2))) * \ numpy.exp( - ((input-mu)**2)/(2*sigma2)) def likelihood(inputs, means, variances): """Minimum distances between inputs and any reference Each element should be in a row! """ out = numpy.ones(inputs.shape[0]) for j in xrange(inputs.shape[1]): if variances[j] != 0: out = out * \ (gaussian (inputs[:,j], means[j], variances[j])) return out def naive_bayes(test, train): """Implements the whole naive bayes flow. Returns a likelihood array """ m, v = training(train) return likelihood(test, m, v) def naive_bayes_multidimensional(test, train): """Naive bayes analysis keeping dimensions isolated """ m, v = training(train) out = numpy.ones( (test.shape) ) for i in xrange(test.shape[0]): for j in xrange(test.shape[1]): out[i,j] = out[i,j] * \ (gaussian (test[i,j], m[j], v[j])) return out # a = numpy.array([[2, 4, 6], [4, 3, 2], [5, -2, -1], [10, 11, 12], [15, 20, 31]]) # b = numpy.array([[2, 4, 2], [4, 3, 1.5]]) # m, v = training(b) # print m, v<|fim▁hole|># print out<|fim▁end|>
# print likelihood(a, m, v) # out = naive_bayes_multidimensional(a, b) # out = (out / numpy.max(out)) * (out > 0.01)
<|file_name|>get_account_urls.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from keyring import get_password from boto.iam.connection import IAMConnection import lib.LoadBotoConfig as BotoConfig from sys import exit envs = ['dev', 'qa', 'staging', 'demo', 'prod'] for env in envs: id = BotoConfig.config.get(env, 'aws_access_key_id') key = get_password(BotoConfig.config.get(env, 'keyring'), id)<|fim▁hole|> conn = IAMConnection(aws_access_key_id=id, aws_secret_access_key=key) print(conn.get_signin_url())<|fim▁end|>
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require("gulp"), del = require("del"), ts = require("gulp-typescript"), tsProject = ts.createProject("tsconfig.json") typedoc = require("gulp-typedoc"); var compileTS = function () { return tsProject.src()<|fim▁hole|> .js.pipe(gulp.dest("app")); }; gulp.task("doc", function() { return gulp .src(["src/**/*.ts"]) .pipe(typedoc({ module: "commonjs", target: "es5", out: "docs/", name: "My project title" })) ; }); gulp.task("ts", compileTS); gulp.task("cleanup", function() { return del([__dirname + "/app"]); }); gulp.task("default", ["cleanup", "ts"], function () { compileTS(); gulp.watch("src/**/*.ts", ["ts"]); });<|fim▁end|>
.pipe(tsProject())
<|file_name|>ovirt_disks.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: ovirt_disks short_description: "Module to manage Virtual Machine and floating disks in oVirt" version_added: "2.2" author: "Ondra Machacek (@machacekondra)" description: - "Module to manage Virtual Machine and floating disks in oVirt." options: id: description: - "ID of the disk to manage. Either C(id) or C(name) is required." name: description: - "Name of the disk to manage. Either C(id) or C(name)/C(alias) is required." aliases: ['alias'] vm_name: description: - "Name of the Virtual Machine to manage. Either C(vm_id) or C(vm_name) is required if C(state) is I(attached) or I(detached)." vm_id: description: - "ID of the Virtual Machine to manage. Either C(vm_id) or C(vm_name) is required if C(state) is I(attached) or I(detached)."<|fim▁hole|> state: description: - "Should the Virtual Machine disk be present/absent/attached/detached." choices: ['present', 'absent', 'attached', 'detached'] default: 'present' image_path: description: - "Path to disk image, which should be uploaded." - "Note that currently we support only compability version 0.10 of the qcow disk." - "Note that you must have an valid oVirt engine CA in your system trust store or you must provide it in C(ca_file) parameter." - "Note that there is no reliable way to achieve idempotency, so if you want to upload the disk even if the disk with C(id) or C(name) exists, then please use C(force) I(true). If you will use C(force) I(false), which is default, then the disk image won't be uploaded." version_added: "2.3" size: description: - "Size of the disk. Size should be specified using IEC standard units. For example 10GiB, 1024MiB, etc." - "Size can be only increased, not decreased." interface: description: - "Driver of the storage interface." choices: ['virtio', 'ide', 'virtio_scsi'] default: 'virtio' format: description: - Specify format of the disk. - If (cow) format is used, disk will by created as sparse, so space will be allocated for the volume as needed, also known as I(thin provision). - If (raw) format is used, disk storage will be allocated right away, also known as I(preallocated). - Note that this option isn't idempotent as it's not currently possible to change format of the disk via API. choices: ['raw', 'cow'] storage_domain: description: - "Storage domain name where disk should be created. By default storage is chosen by oVirt engine." storage_domains: description: - "Storage domain names where disk should be copied." - "C(**IMPORTANT**)" - "There is no reliable way to achieve idempotency, so every time you specify this parameter the disks are copied, so please handle your playbook accordingly to not copy the disks all the time. This is valid only for VM and floating disks, template disks works as expected." version_added: "2.3" force: description: - "Please take a look at C(image_path) documentation to see the correct usage of this parameter." version_added: "2.3" profile: description: - "Disk profile name to be attached to disk. By default profile is chosen by oVirt engine." bootable: description: - "I(True) if the disk should be bootable. By default when disk is created it isn't bootable." shareable: description: - "I(True) if the disk should be shareable. By default when disk is created it isn't shareable." logical_unit: description: - "Dictionary which describes LUN to be directly attached to VM:" - "C(address) - Address of the storage server. Used by iSCSI." - "C(port) - Port of the storage server. Used by iSCSI." - "C(target) - iSCSI target." - "C(lun_id) - LUN id." - "C(username) - CHAP Username to be used to access storage server. Used by iSCSI." - "C(password) - CHAP Password of the user to be used to access storage server. Used by iSCSI." - "C(storage_type) - Storage type either I(fcp) or I(iscsi)." extends_documentation_fragment: ovirt ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Create and attach new disk to VM - ovirt_disks: name: myvm_disk vm_name: rhel7 size: 10GiB format: cow interface: virtio # Attach logical unit to VM rhel7 - ovirt_disks: vm_name: rhel7 logical_unit: target: iqn.2016-08-09.brq.str-01:omachace id: 1IET_000d0001 address: 10.34.63.204 interface: virtio # Detach disk from VM - ovirt_disks: state: detached name: myvm_disk vm_name: rhel7 size: 10GiB format: cow interface: virtio # Upload local image to disk and attach it to vm: # Since Ansible 2.3 - ovirt_disks: name: mydisk vm_name: myvm interface: virtio size: 10GiB format: cow image_path: /path/to/mydisk.qcow2 storage_domain: data ''' RETURN = ''' id: description: "ID of the managed disk" returned: "On success if disk is found." type: str sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c disk: description: "Dictionary of all the disk attributes. Disk attributes can be found on your oVirt instance at following url: https://ovirt.example.com/ovirt-engine/api/model#types/disk." returned: "On success if disk is found and C(vm_id) or C(vm_name) wasn't passed." disk_attachment: description: "Dictionary of all the disk attachment attributes. Disk attachment attributes can be found on your oVirt instance at following url: https://ovirt.example.com/ovirt-engine/api/model#types/disk_attachment." returned: "On success if disk is found and C(vm_id) or C(vm_name) was passed and VM was found." ''' import os import time import traceback import ssl from httplib import HTTPSConnection try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse try: import ovirtsdk4.types as otypes except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( BaseModule, check_sdk, check_params, create_connection, convert_to_bytes, equal, follow_link, ovirt_full_argument_spec, search_by_name, wait, ) def _search_by_lun(disks_service, lun_id): """ Find disk by LUN ID. """ res = [ disk for disk in disks_service.list(search='disk_type=lun') if ( disk.lun_storage.id == lun_id ) ] return res[0] if res else None def upload_disk_image(connection, module): size = os.path.getsize(module.params['image_path']) transfers_service = connection.system_service().image_transfers_service() transfer = transfers_service.add( otypes.ImageTransfer( image=otypes.Image( id=module.params['id'], ) ) ) transfer_service = transfers_service.image_transfer_service(transfer.id) try: # After adding a new transfer for the disk, the transfer's status will be INITIALIZING. # Wait until the init phase is over. The actual transfer can start when its status is "Transferring". while transfer.phase == otypes.ImageTransferPhase.INITIALIZING: time.sleep(module.params['poll_interval']) transfer = transfer_service.get() # Set needed headers for uploading: upload_headers = { 'Authorization': transfer.signed_ticket, } proxy_url = urlparse(transfer.proxy_url) context = ssl.create_default_context() auth = module.params['auth'] if auth.get('insecure'): context.check_hostname = False context.verify_mode = ssl.CERT_NONE elif auth.get('ca_file'): context.load_verify_locations(cafile=auth.get('ca_file')) proxy_connection = HTTPSConnection( proxy_url.hostname, proxy_url.port, context=context, ) with open(module.params['image_path'], "rb") as disk: chunk_size = 1024 * 1024 * 8 pos = 0 while pos < size: transfer_service.extend() upload_headers['Content-Range'] = "bytes %d-%d/%d" % (pos, min(pos + chunk_size, size) - 1, size) proxy_connection.request( 'PUT', proxy_url.path, disk.read(chunk_size), headers=upload_headers, ) r = proxy_connection.getresponse() if r.status >= 400: raise Exception("Failed to upload disk image.") pos += chunk_size finally: transfer_service.finalize() while transfer.phase in [ otypes.ImageTransferPhase.TRANSFERRING, otypes.ImageTransferPhase.FINALIZING_SUCCESS, ]: time.sleep(module.params['poll_interval']) transfer = transfer_service.get() if transfer.phase in [ otypes.ImageTransferPhase.UNKNOWN, otypes.ImageTransferPhase.FINISHED_FAILURE, otypes.ImageTransferPhase.FINALIZING_FAILURE, otypes.ImageTransferPhase.CANCELLED, ]: raise Exception( "Error occured while uploading image. The transfer is in %s" % transfer.phase ) if module.params.get('logical_unit'): disks_service = connection.system_service().disks_service() wait( service=disks_service.service(module.params['id']), condition=lambda d: d.status == otypes.DiskStatus.OK, wait=module.params['wait'], timeout=module.params['timeout'], ) return True class DisksModule(BaseModule): def build_entity(self): logical_unit = self._module.params.get('logical_unit') return otypes.Disk( id=self._module.params.get('id'), name=self._module.params.get('name'), description=self._module.params.get('description'), format=otypes.DiskFormat( self._module.params.get('format') ) if self._module.params.get('format') else None, sparse=self._module.params.get('format') != 'raw', provisioned_size=convert_to_bytes( self._module.params.get('size') ), storage_domains=[ otypes.StorageDomain( name=self._module.params.get('storage_domain'), ), ], shareable=self._module.params.get('shareable'), lun_storage=otypes.HostStorage( type=otypes.StorageType( logical_unit.get('storage_type', 'iscsi') ), logical_units=[ otypes.LogicalUnit( address=logical_unit.get('address'), port=logical_unit.get('port', 3260), target=logical_unit.get('target'), id=logical_unit.get('id'), username=logical_unit.get('username'), password=logical_unit.get('password'), ) ], ) if logical_unit else None, ) def update_storage_domains(self, disk_id): changed = False disk_service = self._service.service(disk_id) disk = disk_service.get() sds_service = self._connection.system_service().storage_domains_service() # We don't support move&copy for non file based storages: if disk.storage_type != otypes.DiskStorageType.IMAGE: return changed # Initiate move: if self._module.params['storage_domain']: new_disk_storage = search_by_name(sds_service, self._module.params['storage_domain']) changed = self.action( action='move', entity=disk, action_condition=lambda d: new_disk_storage.id != d.storage_domains[0].id, wait_condition=lambda d: d.status == otypes.DiskStatus.OK, storage_domain=otypes.StorageDomain( id=new_disk_storage.id, ), post_action=lambda _: time.sleep(self._module.params['poll_interval']), )['changed'] if self._module.params['storage_domains']: for sd in self._module.params['storage_domains']: new_disk_storage = search_by_name(sds_service, sd) changed = changed or self.action( action='copy', entity=disk, action_condition=( lambda disk: new_disk_storage.id not in [sd.id for sd in disk.storage_domains] ), wait_condition=lambda disk: disk.status == otypes.DiskStatus.OK, storage_domain=otypes.StorageDomain( id=new_disk_storage.id, ), )['changed'] return changed def _update_check(self, entity): return ( equal(self._module.params.get('description'), entity.description) and equal(convert_to_bytes(self._module.params.get('size')), entity.provisioned_size) and equal(self._module.params.get('shareable'), entity.shareable) ) class DiskAttachmentsModule(DisksModule): def build_entity(self): return otypes.DiskAttachment( disk=super(DiskAttachmentsModule, self).build_entity(), interface=otypes.DiskInterface( self._module.params.get('interface') ) if self._module.params.get('interface') else None, bootable=self._module.params.get('bootable'), active=True, ) def update_check(self, entity): return ( super(DiskAttachmentsModule, self)._update_check(follow_link(self._connection, entity.disk)) and equal(self._module.params.get('interface'), str(entity.interface)) and equal(self._module.params.get('bootable'), entity.bootable) ) def main(): argument_spec = ovirt_full_argument_spec( state=dict( choices=['present', 'absent', 'attached', 'detached'], default='present' ), id=dict(default=None), name=dict(default=None, aliases=['alias']), vm_name=dict(default=None), vm_id=dict(default=None), size=dict(default=None), interface=dict(default=None,), storage_domain=dict(default=None), storage_domains=dict(default=None, type='list'), profile=dict(default=None), format=dict(default='cow', choices=['raw', 'cow']), bootable=dict(default=None, type='bool'), shareable=dict(default=None, type='bool'), logical_unit=dict(default=None, type='dict'), image_path=dict(default=None), force=dict(default=False, type='bool'), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) check_sdk(module) check_params(module) try: disk = None state = module.params['state'] connection = create_connection(module.params.get('auth')) disks_service = connection.system_service().disks_service() disks_module = DisksModule( connection=connection, module=module, service=disks_service, ) lun = module.params.get('logical_unit') if lun: disk = _search_by_lun(disks_service, lun.get('id')) ret = None # First take care of creating the VM, if needed: if state == 'present' or state == 'detached' or state == 'attached': ret = disks_module.create( entity=disk, result_state=otypes.DiskStatus.OK if lun is None else None, ) is_new_disk = ret['changed'] ret['changed'] = ret['changed'] or disks_module.update_storage_domains(ret['id']) # We need to pass ID to the module, so in case we want detach/attach disk # we have this ID specified to attach/detach method: module.params['id'] = ret['id'] if disk is None else disk.id # Upload disk image in case it's new disk or force parameter is passed: if module.params['image_path'] and (is_new_disk or module.params['force']): uploaded = upload_disk_image(connection, module) ret['changed'] = ret['changed'] or uploaded elif state == 'absent': ret = disks_module.remove() # If VM was passed attach/detach disks to/from the VM: if module.params.get('vm_id') is not None or module.params.get('vm_name') is not None and state != 'absent': vms_service = connection.system_service().vms_service() # If `vm_id` isn't specified, find VM by name: vm_id = module.params['vm_id'] if vm_id is None: vm_id = getattr(search_by_name(vms_service, module.params['vm_name']), 'id', None) if vm_id is None: module.fail_json( msg="VM don't exists, please create it first." ) disk_attachments_service = vms_service.vm_service(vm_id).disk_attachments_service() disk_attachments_module = DiskAttachmentsModule( connection=connection, module=module, service=disk_attachments_service, changed=ret['changed'] if ret else False, ) if state == 'present' or state == 'attached': ret = disk_attachments_module.create() if lun is None: wait( service=disk_attachments_service.service(ret['id']), condition=lambda d:follow_link(connection, d.disk).status == otypes.DiskStatus.OK, wait=module.params['wait'], timeout=module.params['timeout'], ) elif state == 'detached': ret = disk_attachments_module.remove() module.exit_json(**ret) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=False) if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>formTemplateModel.js<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> * @copyright (c) 2017 WP Ninjas * @since 3.0 */ define( [], function() { var model = Backbone.Model.extend( { defaults: { objectType: 'template', id: 'none', title: 'unknown' }, initialize: function() { this.set( 'desc', this.get( 'template-desc' ) ); } } ); return model; } );<|fim▁end|>
* Model that represents our form template. * * @package Ninja Forms client
<|file_name|>UserEtherpadInfoService.java<|end_file_name|><|fim▁begin|>/* * Code contributed to the Learning Layers project * http://www.learning-layers.eu * Development is partly funded by the FP7 Programme of the European * Commission under Grant Agreement FP7-ICT-318209. * Copyright (c) 2016, Karlsruhe University of Applied Sciences. * For a list of contributors see the AUTHORS file at the top-level directory * of this distribution. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|> * limitations under the License. */ package de.hska.ld.etherpad.service; import de.hska.ld.etherpad.persistence.domain.UserEtherpadInfo; public interface UserEtherpadInfoService { public UserEtherpadInfo save(UserEtherpadInfo userEtherpadInfo); public UserEtherpadInfo findById(Long id); public void storeSessionForUser(String sessionId, String groupId, Long validUntil, UserEtherpadInfo userEtherpadInfo); public void storeAuthorIdForCurrentUser(String authorId); public UserEtherpadInfo getUserEtherpadInfoForCurrentUser(); public UserEtherpadInfo findByAuthorId(String authorId); UserEtherpadInfo findBySessionId(String sessionId); }<|fim▁end|>
* See the License for the specific language governing permissions and
<|file_name|>patcher.rs<|end_file_name|><|fim▁begin|>use std::io::prelude::*; use std::fs; use std::cell::RefCell; use std::fs::File; use std::collections::HashMap; use rustc_serialize::json; use rustc_serialize::hex::ToHex; use parser; use parser::usbr; use parser::{Request, Source}; #[derive(RustcDecodable)] struct PatchMetadata { p_type: String, vendor_id: u16, product_id: u16, request: u8, requesttype: u8, patch_id: u32, // id of this patch min_matches: u16, // min number of matches before patch_id is considered a match } #[derive(RustcDecodable)] struct Patch { meta: PatchMetadata, data: String, // hex-encoded } pub struct Patcher { patches: Vec<Patch>, counts: RefCell<HashMap<u32, u16>>, } impl PatchMetadata { fn matches_descriptor(&self, header: &usbr::ControlPacketHeader) -> bool { self.request == header.request && self.requesttype == header.requesttype } } impl Patcher { pub fn new(dir_path: &str) -> Patcher { let mut patcher = Patcher { patches: vec![], counts: RefCell::new(HashMap::new()) }; for entry in fs::read_dir(dir_path).unwrap() { let mut file = match File::open(&entry.unwrap().path()) { Ok(file) => file, Err(e) => panic!("[E000-Patcher] Could not open file {}", e), }; // read file<|fim▁hole|> let patch: Patch = json::decode(&json_line).unwrap(); // Insert count entry if !patcher.counts.borrow().contains_key(&patch.meta.patch_id) { patcher.counts.borrow_mut().insert(patch.meta.patch_id, patch.meta.min_matches); } // Insert patch into our list patcher.patches.push(patch); } patcher } fn check_control_packet(&self, req: &Request) -> bool { let h_ptr = req.type_header.as_ptr() as *const usbr::ControlPacketHeader; let h: &usbr::ControlPacketHeader = unsafe { &*h_ptr }; for patch in &self.patches { if patch.meta.p_type != "control" { continue; } if patch.meta.matches_descriptor(h) { // The metadata matches, time to compare the data let data_hex: String = req.data[..].to_hex(); if data_hex.len() >= patch.data.len() && data_hex.contains(&patch.data) { let mut count: u16 = *self.counts.borrow().get(&patch.meta.patch_id).unwrap(); count -= 1; if count == 0 { error!("[E001-Patcher] matched at least {} signatures", patch.meta.min_matches); return false; } self.counts.borrow_mut().insert(patch.meta.patch_id, count); } } } true } fn check_bulk_packet(&self, req: &Request) -> bool { for patch in &self.patches { if patch.meta.p_type != "bulk" { continue; } // The metadata matches, time to compare the data let data_hex: String = req.data[..].to_hex(); if data_hex.len() >= patch.data.len() && data_hex.contains(&patch.data) { let mut count: u16 = *self.counts.borrow().get(&patch.meta.patch_id).unwrap(); count -= 1; if count == 0 { error!("[E002-Patcher] matched at least {} signatures", patch.meta.min_matches); return false; } self.counts.borrow_mut().insert(patch.meta.patch_id, count); } } true } fn check_connect(&self, req: &Request) -> bool { let h_ptr = req.type_header.as_ptr() as *const usbr::ConnectHeader; let h: &usbr::ConnectHeader = unsafe { &*h_ptr }; for patch in &self.patches { if patch.meta.p_type != "connect" { continue; } if patch.meta.vendor_id == h.vendor_id && patch.meta.product_id == h.product_id { error!("[E003-Patcher] malicious device found {:x}:{:x}", h.vendor_id, h.product_id); return false; } } true } } impl parser::HasHandlers for Patcher { fn handle_control_packet(&self, _: Source, req: Request) -> (u8, Vec<Request>) { if self.check_control_packet(&req) { (0, vec![req]) } else { (1, vec![req]) } } fn handle_bulk_packet(&self, _: Source, req: Request) -> (u8, Vec<Request>) { if self.check_bulk_packet(&req) { (0, vec![req]) } else { (1, vec![req]) } } fn handle_connect(&self, _: Source, req: Request) -> (u8, Vec<Request>) { if self.check_connect(&req) { (0, vec![req]) } else { (1, vec![req]) } } // TODO: Implement below // // fn handle_int_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) { // (0, vec![req]) // } // // fn handle_iso_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) { // (0, vec![req]) // } // // fn handle_buffered_bulk_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) { // (0, vec![req]) // } // }<|fim▁end|>
let mut json_line = String::new(); file.read_to_string(&mut json_line).unwrap(); // decode file
<|file_name|>OnboardingContainer.tsx<|end_file_name|><|fim▁begin|>import { useEffect } from 'react'; import { useModals } from '@proton/components'; import DriveOnboardingModal from '../components/onboarding/DriveOnboardingModal'; import DriveContainerBlurred from './DriveContainerBlurred'; interface Props { onDone: () => void; } const OnboardingContainer = ({ onDone }: Props) => { const { createModal } = useModals(); useEffect(() => {<|fim▁hole|> createModal(<DriveOnboardingModal onDone={onDone} />); }, []); return <DriveContainerBlurred />; }; export default OnboardingContainer;<|fim▁end|>
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import {Bounds, parseBounds, parseDocumentSize} from './css/layout/bounds'; import {COLORS, isTransparent, parseColor} from './css/types/color'; import {CloneConfigurations, CloneOptions, DocumentCloner, WindowOptions} from './dom/document-cloner'; import {isBodyElement, isHTMLElement, parseTree} from './dom/node-parser'; import {CacheStorage} from './core/cache-storage'; import {CanvasRenderer, RenderConfigurations, RenderOptions} from './render/canvas/canvas-renderer'; import {ForeignObjectRenderer} from './render/canvas/foreignobject-renderer'; import {Context, ContextOptions} from './core/context'; export type Options = CloneOptions & WindowOptions & RenderOptions & ContextOptions & { backgroundColor: string | null; foreignObjectRendering: boolean; removeContainer?: boolean; }; const html2canvas = (element: HTMLElement, options: Partial<Options> = {}): Promise<HTMLCanvasElement> => { return renderElement(element, options); }; export default html2canvas; if (typeof window !== 'undefined') { CacheStorage.setContext(window); } const renderElement = async (element: HTMLElement, opts: Partial<Options>): Promise<HTMLCanvasElement> => { if (!element || typeof element !== 'object') { return Promise.reject('Invalid element provided as first argument'); } const ownerDocument = element.ownerDocument; if (!ownerDocument) { throw new Error(`Element is not attached to a Document`); } const defaultView = ownerDocument.defaultView; if (!defaultView) { throw new Error(`Document is not attached to a Window`); } const resourceOptions = { allowTaint: opts.allowTaint ?? false, imageTimeout: opts.imageTimeout ?? 15000, proxy: opts.proxy, useCORS: opts.useCORS ?? false }; const contextOptions = { logging: opts.logging ?? true, cache: opts.cache, ...resourceOptions }; const windowOptions = { windowWidth: opts.windowWidth ?? defaultView.innerWidth, windowHeight: opts.windowHeight ?? defaultView.innerHeight, scrollX: opts.scrollX ?? defaultView.pageXOffset, scrollY: opts.scrollY ?? defaultView.pageYOffset }; const windowBounds = new Bounds( windowOptions.scrollX, windowOptions.scrollY, windowOptions.windowWidth, windowOptions.windowHeight ); const context = new Context(contextOptions, windowBounds); const foreignObjectRendering = opts.foreignObjectRendering ?? false; const cloneOptions: CloneConfigurations = { allowTaint: opts.allowTaint ?? false, onclone: opts.onclone, ignoreElements: opts.ignoreElements, inlineImages: foreignObjectRendering, copyStyles: foreignObjectRendering }; context.logger.debug( `Starting document clone with size ${windowBounds.width}x${ windowBounds.height } scrolled to ${-windowBounds.left},${-windowBounds.top}` ); const documentCloner = new DocumentCloner(context, element, cloneOptions); const clonedElement = documentCloner.clonedReferenceElement; if (!clonedElement) { return Promise.reject(`Unable to find element in cloned iframe`); } const container = await documentCloner.toIFrame(ownerDocument, windowBounds); const {width, height, left, top} = isBodyElement(clonedElement) || isHTMLElement(clonedElement) ? parseDocumentSize(clonedElement.ownerDocument) : parseBounds(context, clonedElement); const backgroundColor = parseBackgroundColor(context, clonedElement, opts.backgroundColor); const renderOptions: RenderConfigurations = { canvas: opts.canvas, backgroundColor, scale: opts.scale ?? defaultView.devicePixelRatio ?? 1, x: (opts.x ?? 0) + left, y: (opts.y ?? 0) + top, width: opts.width ?? Math.ceil(width), height: opts.height ?? Math.ceil(height) }; let canvas; if (foreignObjectRendering) { context.logger.debug(`Document cloned, using foreign object rendering`); const renderer = new ForeignObjectRenderer(context, renderOptions); canvas = await renderer.render(clonedElement); } else { context.logger.debug( `Document cloned, element located at ${left},${top} with size ${width}x${height} using computed rendering` ); context.logger.debug(`Starting DOM parsing`); const root = parseTree(context, clonedElement); if (backgroundColor === root.styles.backgroundColor) { root.styles.backgroundColor = COLORS.TRANSPARENT; } context.logger.debug( `Starting renderer for element at ${renderOptions.x},${renderOptions.y} with size ${renderOptions.width}x${renderOptions.height}` ); const renderer = new CanvasRenderer(context, renderOptions); canvas = await renderer.render(root); } if (opts.removeContainer ?? true) { if (!DocumentCloner.destroy(container)) { context.logger.error(`Cannot detach cloned iframe as it is not in the DOM anymore`); } } context.logger.debug(`Finished rendering`); return canvas; }; const parseBackgroundColor = (context: Context, element: HTMLElement, backgroundColorOverride?: string | null) => { const ownerDocument = element.ownerDocument; // http://www.w3.org/TR/css3-background/#special-backgrounds const documentBackgroundColor = ownerDocument.documentElement ? parseColor(context, getComputedStyle(ownerDocument.documentElement).backgroundColor as string) : COLORS.TRANSPARENT; const bodyBackgroundColor = ownerDocument.body ? parseColor(context, getComputedStyle(ownerDocument.body).backgroundColor as string) : COLORS.TRANSPARENT; const defaultBackgroundColor =<|fim▁hole|> : 0xffffffff; return element === ownerDocument.documentElement ? isTransparent(documentBackgroundColor) ? isTransparent(bodyBackgroundColor) ? defaultBackgroundColor : bodyBackgroundColor : documentBackgroundColor : defaultBackgroundColor; };<|fim▁end|>
typeof backgroundColorOverride === 'string' ? parseColor(context, backgroundColorOverride) : backgroundColorOverride === null ? COLORS.TRANSPARENT
<|file_name|>Func_readContinuousCharacterData.cpp<|end_file_name|><|fim▁begin|>#include <stddef.h> #include <set> #include <sstream> #include <string> #include <vector> #include "ArgumentRule.h" #include "ConstantNode.h" #include "ContinuousCharacterData.h" #include "Func_readContinuousCharacterData.h" #include "ModelVector.h" #include "NclReader.h" #include "RbException.h" #include "RbFileManager.h" #include "RlBoolean.h" #include "RlContinuousCharacterData.h" #include "RlString.h" #include "RlUserInterface.h" #include "AbstractCharacterData.h" #include "Argument.h" #include "ArgumentRules.h" #include "DagNode.h" #include "DeterministicNode.h" #include "DynamicNode.h" #include "IndirectReferenceFunction.h" #include "ModelObject.h" #include "RbBoolean.h" #include "RbVector.h" #include "RbVectorImpl.h" #include "RevObject.h" #include "RevPtr.h" #include "RevVariable.h" #include "RlConstantNode.h" #include "RlFunction.h" #include "TypeSpec.h" #include "TypedDagNode.h" #include "TypedFunction.h" #include "UserFunctionNode.h" using namespace RevLanguage; /** * The clone function is a convenience function to create proper copies of inherited objected. * E.g. a.clone() will create a clone of the correct type even if 'a' is of derived type 'b'. * * \return A new copy of the process. */ Func_readContinuousCharacterData* Func_readContinuousCharacterData::clone( void ) const { return new Func_readContinuousCharacterData( *this ); } /** Execute function */ RevPtr<RevVariable> Func_readContinuousCharacterData::execute( void ) { // get the information from the arguments for reading the file const RlString& fn = static_cast<const RlString&>( args[0].getVariable()->getRevObject() ); bool return_as_vector = static_cast<const RlBoolean&>( args[1].getVariable()->getRevObject() ).getValue(); // check that the file/path name has been correctly specified RevBayesCore::RbFileManager my_file_manager( fn.getValue() ); if ( my_file_manager.testFile() == false && my_file_manager.testDirectory() == false ) { std::string errorStr = ""; my_file_manager.formatError(errorStr); throw RbException("Could not find file or path with name \"" + fn.getValue() + "\""); } // set up a vector of strings containing the name or names of the files to be read std::vector<std::string> vector_of_file_names; if ( my_file_manager.isDirectory() ) { my_file_manager.setStringWithNamesOfFilesInDirectory(vector_of_file_names); } else { vector_of_file_names.push_back( my_file_manager.getFullFileName() ); } // get the global instance of the NCL reader and clear warnings from its warnings buffer RevBayesCore::NclReader reader = RevBayesCore::NclReader(); // the vector of matrices; ModelVector<ContinuousCharacterData> *m = new ModelVector<ContinuousCharacterData>(); // the return value RevObject* ret_val = NULL; // Set up a map with the file name to be read as the key and the file type as the value. Note that we may not // read all of the files in the string called "vectorOfFileNames" because some of them may not be in a format // that can be read. size_t num_files_read = 0; for (std::vector<std::string>::iterator p = vector_of_file_names.begin(); p != vector_of_file_names.end(); ++p) { bool is_interleaved = false; std::string my_file_type = "unknown"; std::string data_type = "unknown"; if (reader.isNexusFile(*p) == true) { my_file_type = "nexus"; } else if (reader.isPhylipFile(*p, data_type, is_interleaved) == true) { my_file_type = "phylip"; } else if (reader.isFastaFile(*p, data_type) == true) { my_file_type = "fasta"; } int num_matrices_read_for_this_file = 0; if (my_file_type != "unknown") { std::string suffix = "|" + data_type; if ( my_file_type == "phylip" ) { if (is_interleaved == true)<|fim▁hole|> suffix += "|interleaved"; } else { suffix += "|noninterleaved"; } } else if ( my_file_type == "fasta" ) { suffix += "|noninterleaved"; } else { suffix += "|unknown"; } my_file_type += suffix; // read the content of the file now std::vector<RevBayesCore::AbstractCharacterData*> m_i = reader.readMatrices( *p, my_file_type ); for (std::vector<RevBayesCore::AbstractCharacterData*>::iterator it = m_i.begin(); it != m_i.end(); it++) { data_type = (*it)->getDataType(); // Assume success; correct below if failure num_matrices_read_for_this_file++; if ( data_type == "Continuous" ) { RevBayesCore::ContinuousCharacterData *coreM = static_cast<RevBayesCore::ContinuousCharacterData *>( *it ); ContinuousCharacterData mCC = ContinuousCharacterData (coreM ); m->push_back( mCC ); } else { num_matrices_read_for_this_file--; throw RbException("Unknown data type \"" + data_type + "\"."); } } } else { reader.addWarning("Unknown file type"); } if (num_matrices_read_for_this_file > 0) { num_files_read++; } } // print summary of results of file reading to the user if (my_file_manager.isDirectory() == true) { std::stringstream o2; if ( num_files_read == 0 ) { o2 << "Failed to read any files from directory '" << fn.getValue() << "'"; } else if ( num_files_read == 1 ) { if ( m->size() == 1 ) { o2 << "Successfully read one file with one character matrix from directory '" << fn.getValue() << "'"; ret_val = new ContinuousCharacterData( (*m)[0] ); delete m; } else { o2 << "Successfully read one file with " << m->size() << " character matrices from directory '" << fn.getValue() << "'"; ret_val = m; } } else { o2 << "Successfully read " << num_files_read << " files with " << m->size() << " character matrices from directory '" << fn.getValue() << "'"; ret_val = m; } RBOUT(o2.str()); std::set<std::string> my_warnings = reader.getWarnings(); if ( vector_of_file_names.size() - num_files_read > 0 && my_warnings.size() > 0 ) { std::stringstream o3; if (vector_of_file_names.size() - num_files_read == 1) { o3 << "Did not read a file for the following "; } else { o3 << "Did not read " << vector_of_file_names.size() - num_files_read << " files for the following "; } if (my_warnings.size() == 1) { o3 << "reason:"; } else { o3 << "reasons:"; } RBOUT(o3.str()); for (std::set<std::string>::iterator it = my_warnings.begin(); it != my_warnings.end(); it++) { RBOUT("* "+(*it)); } } } else { if (m->size() == 1) { RBOUT("Successfully read one character matrix from file '" + fn.getValue() + "'"); // set the return value if ( return_as_vector == false ) { ret_val = new ContinuousCharacterData( (*m)[0] ); delete m; } else { ret_val = m; } } else if (m->size() > 1) { std::stringstream o3; o3 << "Successfully read " << m->size() << " character matrices from file '" << fn.getValue() << "'"; RBOUT(o3.str()); // set the return value ret_val = m; } else { std::stringstream o3; o3 << "Error reading file '" << fn << "'"; RBOUT(o3.str()); RBOUT("No data matrix was read."); std::set<std::string> my_warnings = reader.getWarnings(); if ( my_warnings.size() > 0 ) { for (std::set<std::string>::iterator it = my_warnings.begin(); it != my_warnings.end(); it++) { RBOUT("Error: " + (*it)); } } } } return new RevVariable( ret_val ); } /** Get argument rules */ const ArgumentRules& Func_readContinuousCharacterData::getArgumentRules( void ) const { static ArgumentRules argumentRules = ArgumentRules(); static bool rules_set = false; if (!rules_set) { argumentRules.push_back( new ArgumentRule( "file", RlString::getClassTypeSpec(), "The name of the file or directory for the character data matrices.", ArgumentRule::BY_VALUE, ArgumentRule::ANY ) ); argumentRules.push_back( new ArgumentRule( "alwaysReturnAsVector", RlBoolean::getClassTypeSpec(), "Should we return this object as a vector even if it is just a single matrix?", ArgumentRule::BY_VALUE, ArgumentRule::ANY, new RlBoolean(false) ) ); rules_set = true; } return argumentRules; } /** Get Rev type of object */ const std::string& Func_readContinuousCharacterData::getClassType(void) { static std::string rev_type = "Func_readContinuousCharacterData"; return rev_type; } /** Get class type spec describing type of object */ const TypeSpec& Func_readContinuousCharacterData::getClassTypeSpec(void) { static TypeSpec rev_type_spec = TypeSpec( getClassType(), new TypeSpec( Function::getClassTypeSpec() ) ); return rev_type_spec; } /** * Get the primary Rev name for this function. */ std::string Func_readContinuousCharacterData::getFunctionName( void ) const { // create a name variable that is the same for all instance of this class std::string f_name = "readContinuousCharacterData"; return f_name; } /** Get type spec */ const TypeSpec& Func_readContinuousCharacterData::getTypeSpec( void ) const { static TypeSpec type_spec = getClassTypeSpec(); return type_spec; } /** Get return type */ const TypeSpec& Func_readContinuousCharacterData::getReturnType( void ) const { static TypeSpec return_typeSpec = ModelVector<ContinuousCharacterData>::getClassTypeSpec(); return return_typeSpec; }<|fim▁end|>
{
<|file_name|>helper.hpp<|end_file_name|><|fim▁begin|>#pragma once #include <QtGlobal> class Helper { public: <|fim▁hole|> ); static char decodeByte( char *buffer, quint16 position, quint8 positionOffset = 0 ); };<|fim▁end|>
static void encodeByte( char *buffer, quint16 position, char byte, quint8 positionOffset = 0
<|file_name|>CalDAV.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2010 Mark Liversedge ([email protected]) * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., 51 * Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #include "CalDAV.h" #include "MainWindow.h" #include "Athlete.h" CalDAV::CalDAV(Context *context) : context(context), mode(None) { nam = new QNetworkAccessManager(this); connect(nam, SIGNAL(finished(QNetworkReply*)), this, SLOT(requestReply(QNetworkReply*))); connect(nam, SIGNAL(authenticationRequired(QNetworkReply*,QAuthenticator*)), this, SLOT(userpass(QNetworkReply*,QAuthenticator*))); // connect(nam, SIGNAL(sslErrors(QNetworkReply*,QList<QSslError>)), this, // SLOT(sslErrors(QNetworkReply*,QList<QSslError>))); } // // GET event directory listing // bool CalDAV::download() { QString url = appsettings->cvalue(context->athlete->cyclist, GC_DVURL, "").toString(); if (url == "") return false; // not configured QNetworkRequest request = QNetworkRequest(QUrl(url)); QByteArray *queryText = new QByteArray( "<?xml version=\"1.0\" encoding=\"utf-8\" ?>" "<C:calendar-query xmlns:D=\"DAV:\"" " xmlns:C=\"urn:ietf:params:xml:ns:caldav\">" " <D:prop>" " <D:getetag/>" " <C:calendar-data/>" " </D:prop>" " <C:filter>" " <C:comp-filter name=\"VCALENDAR\">" " <C:comp-filter name=\"VEVENT\">" " <C:time-range end=\"20200101T000000Z\" start=\"20000101T000000Z\"/>" " </C:comp-filter>" " </C:comp-filter>" " </C:filter>" "</C:calendar-query>\r\n"); request.setRawHeader("Depth", "0"); request.setRawHeader("Content-Type", "application/xml; charset=\"utf-8\""); request.setRawHeader("Content-Length", (QString("%1").arg(queryText->size())).toLatin1()); QBuffer *query = new QBuffer(queryText); mode = Events; QNetworkReply *reply = nam->sendCustomRequest(request, "REPORT", query); if (reply->error() != QNetworkReply::NoError) { QMessageBox::warning(context->mainWindow, tr("CalDAV REPORT url error"), reply->errorString()); mode = None; return false; } return true; } // // Get OPTIONS available // bool CalDAV::options() { QString url = appsettings->cvalue(context->athlete->cyclist, GC_DVURL, "").toString(); if (url == "") return false; // not configured QNetworkRequest request = QNetworkRequest(QUrl(url)); QByteArray *queryText = new QByteArray("<?xml version=\"1.0\" encoding=\"utf-8\" ?>" "<D:options xmlns:D=\"DAV:\">" " <C:calendar-home-set xmlns:C=\"urn:ietf:params:xml:ns:caldav\"/>" "</D:options>"); request.setRawHeader("Depth", "0"); request.setRawHeader("Content-Type", "text/xml; charset=\"utf-8\""); request.setRawHeader("Content-Length", (QString("%1").arg(queryText->size())).toLatin1()); QBuffer *query = new QBuffer(queryText); mode = Options; QNetworkReply *reply = nam->sendCustomRequest(request, "OPTIONS", query); if (reply->error() != QNetworkReply::NoError) { QMessageBox::warning(context->mainWindow, tr("CalDAV OPTIONS url error"), reply->errorString()); mode = None; return false; } return true; } // // Get URI Properties via PROPFIND // bool CalDAV::propfind() { QString url = appsettings->cvalue(context->athlete->cyclist, GC_DVURL, "").toString(); if (url == "") return false; // not configured QNetworkRequest request = QNetworkRequest(QUrl(url)); QByteArray *queryText = new QByteArray( "<?xml version=\"1.0\" encoding=\"utf-8\" ?>" "<D:propfind xmlns:D=\"DAV:\"" " xmlns:C=\"urn:ietf:params:xml:ns:caldav\">" " <D:prop>" " <D:displayname/>" " <C:calendar-timezone/> " " <C:supported-calendar-component-set/> " " </D:prop>" "</D:propfind>\r\n"); request.setRawHeader("Content-Type", "text/xml; charset=\"utf-8\""); request.setRawHeader("Content-Length", (QString("%1").arg(queryText->size())).toLatin1()); request.setRawHeader("Depth", "0"); QBuffer *query = new QBuffer(queryText); mode = PropFind; QNetworkReply *reply = nam->sendCustomRequest(request, "PROPFIND" , query); if (reply->error() != QNetworkReply::NoError) { QMessageBox::warning(context->mainWindow, tr("CalDAV OPTIONS url error"), reply->errorString()); mode = None; return false; } return true; } // // REPORT of "all" VEVENTS // bool CalDAV::report() { QString url = appsettings->cvalue(context->athlete->cyclist, GC_DVURL, "").toString(); if (url == "") return false; // not configured QNetworkRequest request = QNetworkRequest(QUrl(url)); QByteArray *queryText = new QByteArray("<x1:calendar-query xmlns:x1=\"urn:ietf:params:xml:ns:caldav\">" "<x0:prop xmlns:x0=\"DAV:\">" "<x0:getetag/>" "<x1:calendar-data/>" "</x0:prop>" "<x1:filter>" "<x1:comp-filter name=\"VCALENDAR\">" "<x1:comp-filter name=\"VEVENT\">" "<x1:time-range end=\"21001231\" start=\"20000101T000000Z\"/>" "</x1:comp-filter>" "</x1:comp-filter>" "</x1:filter>" "</x1:calendar-query>"); QBuffer *query = new QBuffer(queryText); mode = Report; QNetworkReply *reply = nam->sendCustomRequest(request, "REPORT", query); if (reply->error() != QNetworkReply::NoError) { QMessageBox::warning(context->mainWindow, tr("CalDAV REPORT url error"), reply->errorString()); mode = None; return false; } return true; } // utility function to create a VCALENDAR from a single RideItem static icalcomponent *createEvent(RideItem *rideItem) { // calendar icalcomponent *root = icalcomponent_new(ICAL_VCALENDAR_COMPONENT); // calendar version icalproperty *version = icalproperty_new_version("2.0"); icalcomponent_add_property(root, version); icalcomponent *event = icalcomponent_new(ICAL_VEVENT_COMPONENT); // // Unique ID // QString id = rideItem->ride()->id(); if (id == "") { id = QUuid::createUuid().toString() + "@" + "goldencheetah.org"; rideItem->ride()->setId(id); rideItem->notifyRideMetadataChanged(); rideItem->setDirty(true); // need to save this! } icalproperty *uid = icalproperty_new_uid(id.toLatin1()); icalcomponent_add_property(event, uid); // // START DATE // struct icaltimetype atime; QDateTime utc = rideItem->dateTime.toUTC(); atime.year = utc.date().year(); atime.month = utc.date().month(); atime.day = utc.date().day(); atime.hour = utc.time().hour(); atime.minute = utc.time().minute(); atime.second = utc.time().second(); atime.is_utc = 1; // this is UTC is_utc is redundant but kept for completeness atime.is_date = 0; // this is a date AND time atime.is_daylight = 0; // no daylight savings - its UTC atime.zone = icaltimezone_get_utc_timezone(); // set UTC timezone icalproperty *dtstart = icalproperty_new_dtstart(atime); icalcomponent_add_property(event, dtstart); // // DURATION // // override values? QMap<QString,QString> lookup; lookup = rideItem->ride()->metricOverrides.value("workout_time"); int secs = lookup.value("value", "0.0").toDouble(); // from last - first timestamp? if (!rideItem->ride()->dataPoints().isEmpty() && rideItem->ride()->dataPoints().last() != NULL) { if (!secs) secs = rideItem->ride()->dataPoints().last()->secs; } // ok, got secs so now create in vcard struct icaldurationtype dur; dur.is_neg = 0; dur.days = dur.weeks = 0; dur.hours = secs/3600; dur.minutes = secs%3600/60; dur.seconds = secs%60; icalcomponent_set_duration(event, dur); // set title & description QString title = rideItem->ride()->getTag("Title", ""); // *new* 'special' metadata field if (title == "") title = rideItem->ride()->getTag("Sport", "") + " Workout"; icalcomponent_set_summary(event, title.toLatin1()); // set description using standard stuff icalcomponent_set_description(event, rideItem->ride()->getTag("Calendar Text", "").toLatin1()); // attach ridefile // google doesn't support attachments yet. There is a labs option to use google docs // but it is only available to Google Apps customers. // put the event into root icalcomponent_add_component(root, event); return root; } // extract <calendar-data> entries and concatenate // into a single string. This is from a query response // where the VEVENTS are embedded within an XML document static QString extractComponents(QString document) { QString returning = ""; // parse the document and extract the multistatus node (there is only one of those) QDomDocument doc; if (document == "" || doc.setContent(document) == false) return ""; QDomNode multistatus = doc.documentElement(); if (multistatus.isNull()) return ""; // Google Calendar retains the namespace prefix in the results // Apple MobileMe doesn't. This means the element names will // possibly need a prefix... QString Dprefix = ""; QString Cprefix = ""; if (multistatus.nodeName().startsWith("D:")) { Dprefix = "D:"; Cprefix = "C:"; } // read all the responses within the multistatus for (QDomNode response = multistatus.firstChildElement(Dprefix + "response"); response.nodeName() == (Dprefix + "response"); response = response.nextSiblingElement(Dprefix + "response")) { // skate over the nest of crap to get at the calendar-data QDomNode propstat = response.firstChildElement(Dprefix + "propstat"); QDomNode prop = propstat.firstChildElement(Dprefix + "prop"); QDomNode calendardata = prop.firstChildElement(Cprefix + "calendar-data"); // extract the calendar entry - top and tail the other crap QString text = calendardata.toElement().text(); int start = text.indexOf("BEGIN:VEVENT"); int stop = text.indexOf("END:VEVENT"); if (start == -1 || stop == -1) continue; returning += text.mid(start, stop-start+10) + "\n"; } return returning; } // // PUT a ride item // bool CalDAV::upload(RideItem *rideItem) { // is this a valid ride? if (!rideItem || !rideItem->ride()) return false; QString url = appsettings->cvalue(context->athlete->cyclist, GC_DVURL, "").toString(); if (url == "") return false; // not configured // lets upload to calendar url += rideItem->fileName; url += ".ics"; // form the request QNetworkRequest request = QNetworkRequest(QUrl(url)); request.setRawHeader("Content-Type", "text/calendar"); request.setRawHeader("Content-Length", "xxxx"); // create the ICal event icalcomponent *vcard = createEvent(rideItem); QByteArray vcardtext(icalcomponent_as_ical_string(vcard)); icalcomponent_free(vcard); mode = Put; QNetworkReply *reply = nam->put(request, vcardtext); if (reply->error() != QNetworkReply::NoError) { mode = None; QMessageBox::warning(context->mainWindow, tr("CalDAV Calendar url error"), reply->errorString());<|fim▁hole|> return false; } return true; } // // All queries/commands respond here // void CalDAV::requestReply(QNetworkReply *reply) { QString response = reply->readAll(); switch (mode) { case Report: case Events: context->athlete->rideCalendar->refreshRemote(extractComponents(response)); break; default: case Options: case PropFind: case Put: //nothing at the moment break; } mode = None; } // // Provide user credentials, called when receive a 401 // void CalDAV::userpass(QNetworkReply*,QAuthenticator*a) { QString user = appsettings->cvalue(context->athlete->cyclist, GC_DVUSER, "").toString(); QString pass = appsettings->cvalue(context->athlete->cyclist, GC_DVPASS, "").toString(); a->setUser(user); a->setPassword(pass); } // // Trap SSL errors, does nothing ... for now // void CalDAV::sslErrors(QNetworkReply*,QList<QSslError>&) { }<|fim▁end|>
<|file_name|>vector_plot.py<|end_file_name|><|fim▁begin|>import numpy as np from numpy import ma from matplotlib import pyplot as plt from mpl_toolkits.basemap import Basemap as bm from mpl_toolkits.basemap import addcyclic import palettable class vector_plot: def __init__(self, ucompos, vcompos): self.ucompos = ucompos self.vcompos = vcompos <|fim▁hole|> self.windspeed = np.sqrt(np.power(self.uanoms, 2) + np.power(self.vanoms, 2)) def plotmap(self, domain = [0., 360., -90., 90.], res='c', stepp=2, scale=20): latitudes = self.windspeed.latitudes.data longitudes = self.windspeed.longitudes.data m = bm(projection='cyl',llcrnrlat=latitudes.min(),urcrnrlat=latitudes.max(),\ llcrnrlon=longitudes.min(),urcrnrlon=longitudes.max(),\ lat_ts=0, resolution=res) lons, lats = np.meshgrid(longitudes, latitudes) cmap = palettable.colorbrewer.sequential.Oranges_9.mpl_colormap f, ax = plt.subplots(figsize=(10,6)) m.ax = ax x, y = m(lons, lats) im = m.pcolormesh(lons, lats, self.windspeed.data, cmap=cmap) cb = m.colorbar(im) cb.set_label('wind speed (m/s)', fontsize=14) Q = m.quiver(x[::stepp,::stepp], y[::stepp,::stepp], \ self.uanoms.data[::stepp,::stepp], self.vanoms.data[::stepp,::stepp], \ pivot='middle', scale=scale) l,b,w,h = ax.get_position().bounds qk = plt.quiverkey(Q, l+w-0.1, b-0.03, 5, "5 m/s", labelpos='E', fontproperties={'size':14}, coordinates='figure') m.drawcoastlines() return f<|fim▁end|>
self.uanoms = self.ucompos.dset['composite_anomalies'] self.vanoms = self.vcompos.dset['composite_anomalies']
<|file_name|>drumkv1widget_elements.cpp<|end_file_name|><|fim▁begin|>// drumkv1widget_elements.cpp // /**************************************************************************** Copyright (C) 2012-2020, rncbc aka Rui Nuno Capela. All rights reserved. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. *****************************************************************************/ #include "drumkv1widget_elements.h" #include "drumkv1widget.h" #include "drumkv1_ui.h" #include "drumkv1_sample.h" #include <QApplication> #include <QHeaderView> #include <QFileInfo> #include <QMimeData> #include <QDrag> #include <QUrl> #include <QIcon> #include <QPixmap> #include <QTimer> #include <QDragEnterEvent> #include <QDragMoveEvent> #include <QDropEvent> //---------------------------------------------------------------------------- // drumkv1widget_elements_model -- List model. // Constructor. drumkv1widget_elements_model::drumkv1widget_elements_model ( drumkv1_ui *pDrumkUi, QObject *pParent ) : QAbstractItemModel(pParent), m_pDrumkUi(pDrumkUi) { QIcon icon; icon.addPixmap( QPixmap(":/images/ledOff.png"), QIcon::Normal, QIcon::Off); icon.addPixmap( QPixmap(":/images/ledOn.png"), QIcon::Normal, QIcon::On); m_pixmaps[0] = new QPixmap( icon.pixmap(12, 12, QIcon::Normal, QIcon::Off)); m_pixmaps[1] = new QPixmap( icon.pixmap(12, 12, QIcon::Normal, QIcon::On)); m_headers << tr("Element") << tr("Sample"); for (int i = 0; i < MAX_NOTES; ++i) m_notes_on[i] = 0; reset(); } // Destructor. drumkv1widget_elements_model::~drumkv1widget_elements_model (void) { delete m_pixmaps[1]; delete m_pixmaps[0]; } int drumkv1widget_elements_model::rowCount ( const QModelIndex& /*parent*/ ) const { return MAX_NOTES; } int drumkv1widget_elements_model::columnCount ( const QModelIndex& /*parent*/ ) const { return m_headers.count(); } QVariant drumkv1widget_elements_model::headerData ( int section, Qt::Orientation orient, int role ) const { if (orient == Qt::Horizontal) { switch (role) { case Qt::DisplayRole: return m_headers.at(section); case Qt::TextAlignmentRole: return columnAlignment(section); default: break; } } return QVariant(); } QVariant drumkv1widget_elements_model::data ( const QModelIndex& index, int role ) const { switch (role) { case Qt::DecorationRole: if (index.column() == 0) return *m_pixmaps[m_notes_on[index.row()] > 0 ? 1 : 0]; break; case Qt::DisplayRole: return itemDisplay(index); case Qt::TextAlignmentRole: return columnAlignment(index.column()); case Qt::ToolTipRole: return itemToolTip(index); default: break; } return QVariant(); } QModelIndex drumkv1widget_elements_model::index ( int row, int column, const QModelIndex& /*parent*/) const { return createIndex(row, column, (m_pDrumkUi ? m_pDrumkUi->element(row) : nullptr)); } QModelIndex drumkv1widget_elements_model::parent ( const QModelIndex& ) const { return QModelIndex(); } drumkv1_element *drumkv1widget_elements_model::elementFromIndex ( const QModelIndex& index ) const { return static_cast<drumkv1_element *> (index.internalPointer()); } drumkv1_ui *drumkv1widget_elements_model::instance (void) const { return m_pDrumkUi; } void drumkv1widget_elements_model::midiInLedNote ( int key, int vel ) { if (vel > 0) { m_notes_on[key] = vel; midiInLedUpdate(key); } else if (m_notes_on[key] > 0) { QTimer::singleShot(200, this, SLOT(midiInLedTimeout())); } } void drumkv1widget_elements_model::midiInLedTimeout (void) { for (int key = 0; key < MAX_NOTES; ++key) { if (m_notes_on[key] > 0) { m_notes_on[key] = 0; midiInLedUpdate(key); } } } void drumkv1widget_elements_model::midiInLedUpdate ( int key ) { const QModelIndex& index = drumkv1widget_elements_model::index(key, 0); #if QT_VERSION >= QT_VERSION_CHECK(5, 1, 0) emit dataChanged(index, index, QVector<int>() << Qt::DecorationRole); #else emit dataChanged(index, index); #endif } void drumkv1widget_elements_model::reset (void) { #if QT_VERSION < QT_VERSION_CHECK(5, 0, 0) QAbstractItemModel::reset(); #else QAbstractItemModel::beginResetModel(); QAbstractItemModel::endResetModel(); #endif } QString drumkv1widget_elements_model::itemDisplay ( const QModelIndex& index ) const { switch (index.column()) { case 0: // Element. return drumkv1widget::completeNoteName(index.row()); case 1: // Sample. drumkv1_element *element = elementFromIndex(index); if (element) { const char *pszSampleFile = element->sampleFile(); if (pszSampleFile) return QFileInfo(pszSampleFile).completeBaseName(); else return tr("(None)"); } } return QString('-'); } QString drumkv1widget_elements_model::itemToolTip ( const QModelIndex& index ) const { QString sToolTip = '[' + drumkv1widget::completeNoteName(index.row()) + ']'; drumkv1_element *element = elementFromIndex(index); if (element) { const char *pszSampleFile = element->sampleFile(); if (pszSampleFile) { sToolTip += '\n'; sToolTip += QFileInfo(pszSampleFile).completeBaseName(); } } return sToolTip; } int drumkv1widget_elements_model::columnAlignment( int /*column*/ ) const { return int(Qt::AlignLeft | Qt::AlignVCenter); } //---------------------------------------------------------------------------- // drumkv1widget_elements -- Custom (tree) list view. // Constructor. drumkv1widget_elements::drumkv1widget_elements ( QWidget *pParent ) : QTreeView(pParent), m_pModel(nullptr), m_pDragSample(nullptr), m_iDirectNoteOn(-1), m_iDirectNoteOnVelocity(64) { resetDragState(); } // Destructor. drumkv1widget_elements::~drumkv1widget_elements (void) { if (m_pModel) delete m_pModel; } // Settlers. void drumkv1widget_elements::setInstance ( drumkv1_ui *pDrumkUi ) { if (m_pModel) delete m_pModel; m_pModel = new drumkv1widget_elements_model(pDrumkUi); QTreeView::setModel(m_pModel); QTreeView::setSelectionMode(QAbstractItemView::SingleSelection); QTreeView::setRootIsDecorated(false); QTreeView::setUniformRowHeights(true); QTreeView::setItemsExpandable(false); QTreeView::setAllColumnsShowFocus(true); QTreeView::setAlternatingRowColors(true); QTreeView::setMinimumSize(QSize(360, 80)); QTreeView::setSizePolicy( QSizePolicy::Preferred, QSizePolicy::MinimumExpanding); QTreeView::setAcceptDrops(true); QHeaderView *pHeader = QTreeView::header(); pHeader->setDefaultAlignment(Qt::AlignLeft); pHeader->setStretchLastSection(true); // Element selectors QObject::connect(QTreeView::selectionModel(), SIGNAL(currentRowChanged(const QModelIndex&, const QModelIndex&)), SLOT(currentRowChanged(const QModelIndex&, const QModelIndex&))); QObject::connect(this, SIGNAL(doubleClicked(const QModelIndex&)), SLOT(doubleClicked(const QModelIndex&))); } drumkv1_ui *drumkv1widget_elements::instance (void) const { return (m_pModel ? m_pModel->instance() : nullptr); } // Current element accessors. void drumkv1widget_elements::setCurrentIndex ( int row ) { QTreeView::setCurrentIndex(m_pModel->index(row, 0)); } int drumkv1widget_elements::currentIndex (void) const { return QTreeView::currentIndex().row(); } // Internal slot handlers. void drumkv1widget_elements::currentRowChanged ( const QModelIndex& current, const QModelIndex& /*previous*/ ) { emit itemActivated(current.row()); } void drumkv1widget_elements::doubleClicked ( const QModelIndex& index ) { emit itemDoubleClicked(index.row()); } // Mouse interaction. void drumkv1widget_elements::mousePressEvent ( QMouseEvent *pMouseEvent ) { if (pMouseEvent->button() == Qt::LeftButton) { const QPoint& pos = pMouseEvent->pos(); if (pos.x() > 0 && pos.x() < 16) { directNoteOn(QTreeView::indexAt(pos).row()); return; // avoid double-clicks... } else { m_dragState = DragStart; m_posDrag = pos; } } QTreeView::mousePressEvent(pMouseEvent); } void drumkv1widget_elements::mouseMoveEvent ( QMouseEvent *pMouseEvent ) { QTreeView::mouseMoveEvent(pMouseEvent); if (m_dragState == DragStart && (m_posDrag - pMouseEvent->pos()).manhattanLength() > QApplication::startDragDistance()) { drumkv1_element *element = static_cast<drumkv1_element *> ( QTreeView::currentIndex().internalPointer()); // Start dragging alright... if (element && element->sample()) { QList<QUrl> urls; m_pDragSample = element->sample(); urls.append(QUrl::fromLocalFile(m_pDragSample->filename())); QMimeData *pMimeData = new QMimeData(); pMimeData->setUrls(urls);; QDrag *pDrag = new QDrag(this); pDrag->setMimeData(pMimeData); pDrag->exec(Qt::CopyAction); } resetDragState(); } } void drumkv1widget_elements::mouseReleaseEvent ( QMouseEvent *pMouseEvent ) { QTreeView::mouseReleaseEvent(pMouseEvent); directNoteOff(); m_pDragSample = nullptr; resetDragState(); } // Drag-n-drop (more of the later) support. void drumkv1widget_elements::dragEnterEvent ( QDragEnterEvent *pDragEnterEvent ) { QTreeView::dragEnterEvent(pDragEnterEvent); if (pDragEnterEvent->mimeData()->hasUrls()) pDragEnterEvent->acceptProposedAction(); } void drumkv1widget_elements::dragMoveEvent ( QDragMoveEvent *pDragMoveEvent ) { QTreeView::dragMoveEvent(pDragMoveEvent); if (pDragMoveEvent->mimeData()->hasUrls()) { const QModelIndex& index #if QT_VERSION >= QT_VERSION_CHECK(6, 0, 0) = QTreeView::indexAt(pDragMoveEvent->position().toPoint()); #else = QTreeView::indexAt(pDragMoveEvent->pos()); #endif if (index.isValid()) { setCurrentIndex(index.row()); if (m_pDragSample) { drumkv1_element *element = static_cast<drumkv1_element *> ( index.internalPointer()); // Start dragging alright... if (element && m_pDragSample == element->sample()) return; } pDragMoveEvent->acceptProposedAction(); } } } void drumkv1widget_elements::dropEvent ( QDropEvent *pDropEvent ) { QTreeView::dropEvent(pDropEvent); const QMimeData *pMimeData = pDropEvent->mimeData(); if (pMimeData->hasUrls()) { const QString& sFilename = QListIterator<QUrl>(pMimeData->urls()).peekNext().toLocalFile(); if (!sFilename.isEmpty()) emit itemLoadSampleFile(sFilename, currentIndex()); } } // Reset drag/select state. void drumkv1widget_elements::resetDragState (void) { m_dragState = DragNone; } // Refreshner. void drumkv1widget_elements::refresh (void) { if (m_pModel == nullptr) return; QItemSelectionModel *pSelectionModel = QTreeView::selectionModel(); const QModelIndex& index = pSelectionModel->currentIndex(); m_pModel->reset(); QTreeView::header()->resizeSections(QHeaderView::ResizeToContents); pSelectionModel->setCurrentIndex(index, QItemSelectionModel::NoUpdate); } // Default size hint. QSize drumkv1widget_elements::sizeHint (void) const { return QSize(360, 80); } // MIDI input status update void drumkv1widget_elements::midiInLedNote ( int key, int vel ) { if (m_pModel) m_pModel->midiInLedNote(key, vel); } // Direct note-on/off methods. void drumkv1widget_elements::directNoteOn ( int key ) { if (m_pModel == nullptr || key < 0) return; drumkv1_ui *pDrumkUi = m_pModel->instance(); if (pDrumkUi == nullptr) return; m_iDirectNoteOn = key; pDrumkUi->directNoteOn(m_iDirectNoteOn, m_iDirectNoteOnVelocity); drumkv1_sample *pSample = pDrumkUi->sample(); if (pSample) { const float srate_ms = 0.001f * pSample->sampleRate(); const int timeout_ms = int(float(pSample->length() >> 1) / srate_ms); QTimer::singleShot(timeout_ms, this, SLOT(directNoteOff())); } } void drumkv1widget_elements::directNoteOff (void) { if (m_pModel == nullptr || m_iDirectNoteOn < 0) return; drumkv1_ui *pDrumkUi = m_pModel->instance(); if (pDrumkUi == nullptr) return; <|fim▁hole|> // Direct note-on velocity accessors. void drumkv1widget_elements::setDirectNoteOnVelocity ( int vel ) { m_iDirectNoteOnVelocity = vel; } int drumkv1widget_elements::directNoteOnVelocity (void) const { return m_iDirectNoteOnVelocity; } // end of drumkv1widget_elements.cpp<|fim▁end|>
pDrumkUi->directNoteOn(m_iDirectNoteOn, 0); // note-off! m_iDirectNoteOn = -1; }
<|file_name|>construct-the-rectangle.py<|end_file_name|><|fim▁begin|>class Solution(object): def constructRectangle(self, area): """ :type area: int :rtype: List[int] """ ans = None W = 1 while W * W <= area:<|fim▁hole|> if area % W == 0: ans = [area / W, W] W += 1 return ans<|fim▁end|>
<|file_name|>placement.cc<|end_file_name|><|fim▁begin|>/* * This file is part of the demos-linux package. * Copyright (C) 2011-2022 Mark Veltzer <[email protected]> *<|fim▁hole|> * (at your option) any later version. * * demos-linux is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with demos-linux. If not, see <http://www.gnu.org/licenses/>. */ #include <firstinclude.h> #include <stdlib.h> // for malloc(3), free(3), EXIT_SUCCESS #include <iostream> // for std::cout, std::endl /* * This example shows how to use the C++ operator new placement * operator. * * Things we learn: * 1. How to write your own placement function. * 2. Regular constructor gets called after the placement. * 3. Releasing of space could be overridden too. * 4. This could be used for caching and real time considerations for instance. * 5. Even if you allocate an array the delete[] is NOT called so * your regular delete operator needs to know how to do the job * both for arrays and for single elements (if you want arrays * at all that is...). * * TODO: * - show in place construction (calling the constructor on an otherwise * allocated block of ram) */ class A { public: float val; A(void) { val=-7.6; } A(double ival) { val=ival; } void *operator new(size_t size, double val) { std::cout << "in new operator" << std::endl; std::cout << "size is " << size << std::endl; void *pointer=malloc(size); std::cout << "pointer is " << pointer << std::endl; // next two lines have no effect since the constructor // will be called and will override it // A *p=(A *)pointer; // p->val=val; return(pointer); } // this is for allocating arrays, the size that you get // is SizeOfObject*NumOfObjects... void *operator new[] (const size_t size) { std::cout << "in new[] operator" << std::endl; std::cout << "size is " << size << std::endl; void *pointer=malloc(size); std::cout << "pointer is " << pointer << std::endl; return(pointer); } // notice that this does NOT get called... void operator delete[] (void *pointer) { std::cout << "in delete[] operator" << std::endl; std::cout << "pointer is " << pointer << std::endl; free(pointer); } void* operator new(size_t size) { std::cout << "in new operator" << std::endl; std::cout << "size is " << size << std::endl; // void *pointer=new char[size]; void *pointer=malloc(size); std::cout << "pointer is " << pointer << std::endl; return(pointer); } void operator delete(void *pointer) { std::cout << "in delete operator" << std::endl; std::cout << "pointer is " << pointer << std::endl; free(pointer); } }; int main(int argc, char** argv, char** envp) { std::cout << "heap no arguments example" << std::endl; A* a=new A(); std::cout << "a->val is " << a->val << std::endl; #pragma GCC diagnostic ignored "-Wmismatched-new-delete" delete a; std::cout << "heap arguments example" << std::endl; A* b=new(5.5)A(); std::cout << "b->val is " << b->val << std::endl; #pragma GCC diagnostic ignored "-Wmismatched-new-delete" delete b; std::cout << "many heap no arguments example" << std::endl; const unsigned int num_objs=5; A* e=new A[num_objs]; for(unsigned int i=0; i<num_objs; i++) { std::cout << i << " " << "e->val is " << e[i].val << std::endl; } delete[] e; // the next two examples are stack examples in which case neither // the new nor the delete operator will be called (memory is stack // memory). // could you write a C++ object which can be used ONLY on the stack // or conversly ONLY on the heap using this property ?!? std::cout << "stack no arguments example" << std::endl; A c; std::cout << "c.val is " << c.val << std::endl; std::cout << "stack arguments example" << std::endl; A d(6.7); std::cout << "d.val is " << d.val << std::endl; return EXIT_SUCCESS; }<|fim▁end|>
* demos-linux is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or
<|file_name|>list_documents.rs<|end_file_name|><|fim▁begin|>use crate::domain::model::error::Error as ModelError; use crate::domain::ports::secondary::list::{List, Parameters}; use async_trait::async_trait; use common::document::ContainerDocument; use futures::stream::{Stream, StreamExt}; use std::pin::Pin; use tracing::info_span; use tracing_futures::Instrument; type PinnedStream<T> = Pin<Box<dyn Stream<Item = T> + Send + 'static>>; #[async_trait] pub trait ListDocuments<D> { async fn list_documents(&self) -> Result<PinnedStream<Result<D, ModelError>>, ModelError>; } #[async_trait] impl<D, T> ListDocuments<D> for T where D: ContainerDocument + Send + Sync + 'static, T: List<D> + Send + Sync, { async fn list_documents(&self) -> Result<PinnedStream<Result<D, ModelError>>, ModelError> { let doc_type = D::static_doc_type().to_string(); let documents = self .list_documents(Parameters { doc_type }) .await? .map(|raw| raw.map_err(|err| ModelError::DocumentRetrievalError { source: err.into() })) .instrument(info_span!(<|fim▁hole|> Ok(documents.boxed()) } }<|fim▁end|>
"List documents", doc_type = D::static_doc_type(), ));
<|file_name|>content.js<|end_file_name|><|fim▁begin|>/* global describe, before, it */ require('mocha')<|fim▁hole|>require('should') var async = require('async') var testUtils = require('./testUtils') var errorMessage = require('../errorMessages/errorMessages') var mongo_dcms_core = require('../index') describe('should get file Content by file id', function () { var filepath = './test/testAsset/testTextFile.txt' var savedFileId var document = { filePath: filepath, fileName: 'testTextFile.txt', contentType: 'binary/octet-stream', identityMetaData: { projectId: 10 } } before(function (done) { this.timeout(5000) async.series([ function (callback) { testUtils.clearDb(callback) }, function (callback) { mongo_dcms_core.connect(testUtils.dbUrl) mongo_dcms_core.uploadFile(document, {}, function (err, sucess) { if (err) { callback(err) } else { savedFileId = sucess.fileId callback(null) } }) } ], done) }) it('should get file Content by file id', function (done) { mongo_dcms_core.getFileContentByFileId(savedFileId, function (err, sucess) { if (err) { err.should.equal(null) done() } else { sucess.fileData.should.not.equal(null) sucess.contentType.should.equal('binary/octet-stream') sucess.fileName.should.equal('testTextFile.txt') sucess.fileMetaData.should.not.equal(null) sucess.fileMetaData.should.not.equal(undefined) done() } }) }) it('should return message file not found', function (done) { mongo_dcms_core.getFileContentByFileId('56f0dc0ca80f6cc01929cd1e', function (err, sucess) { if (err) { err.should.equal(errorMessage.fileNotFoundForSpecifiedFileId) done() } else { sucess.should.equal(null) done() } }) }) })<|fim▁end|>
<|file_name|>tracemalloc.py<|end_file_name|><|fim▁begin|>from collections.abc import Sequence, Iterable from functools import total_ordering import fnmatch import linecache import os.path import pickle # Import types and functions implemented in C from _tracemalloc import * from _tracemalloc import _get_object_traceback, _get_traces def _format_size(size, sign): for unit in ('B', 'KiB', 'MiB', 'GiB', 'TiB'): if abs(size) < 100 and unit != 'B': # 3 digits (xx.x UNIT) if sign: return "%+.1f %s" % (size, unit) else: return "%.1f %s" % (size, unit) if abs(size) < 10 * 1024 or unit == 'TiB': # 4 or 5 digits (xxxx UNIT) if sign: return "%+.0f %s" % (size, unit) else: return "%.0f %s" % (size, unit) size /= 1024 class Statistic: """ Statistic difference on memory allocations between two Snapshot instance. """ __slots__ = ('traceback', 'size', 'count') def __init__(self, traceback, size, count): self.traceback = traceback self.size = size self.count = count def __hash__(self): return hash((self.traceback, self.size, self.count)) def __eq__(self, other): return (self.traceback == other.traceback and self.size == other.size and self.count == other.count) def __str__(self): text = ("%s: size=%s, count=%i" % (self.traceback, _format_size(self.size, False), self.count)) if self.count: average = self.size / self.count text += ", average=%s" % _format_size(average, False) return text def __repr__(self): return ('<Statistic traceback=%r size=%i count=%i>' % (self.traceback, self.size, self.count)) def _sort_key(self): return (self.size, self.count, self.traceback) class StatisticDiff: """ Statistic difference on memory allocations between an old and a new Snapshot instance. """ __slots__ = ('traceback', 'size', 'size_diff', 'count', 'count_diff') def __init__(self, traceback, size, size_diff, count, count_diff): self.traceback = traceback self.size = size self.size_diff = size_diff self.count = count self.count_diff = count_diff def __hash__(self): return hash((self.traceback, self.size, self.size_diff, self.count, self.count_diff)) def __eq__(self, other): return (self.traceback == other.traceback and self.size == other.size and self.size_diff == other.size_diff and self.count == other.count and self.count_diff == other.count_diff) def __str__(self): text = ("%s: size=%s (%s), count=%i (%+i)" % (self.traceback, _format_size(self.size, False), _format_size(self.size_diff, True), self.count, self.count_diff)) if self.count: average = self.size / self.count text += ", average=%s" % _format_size(average, False) return text def __repr__(self): return ('<StatisticDiff traceback=%r size=%i (%+i) count=%i (%+i)>' % (self.traceback, self.size, self.size_diff, self.count, self.count_diff)) def _sort_key(self): return (abs(self.size_diff), self.size, abs(self.count_diff), self.count, self.traceback) def _compare_grouped_stats(old_group, new_group): statistics = [] for traceback, stat in new_group.items(): previous = old_group.pop(traceback, None) if previous is not None: stat = StatisticDiff(traceback, stat.size, stat.size - previous.size, stat.count, stat.count - previous.count) else: stat = StatisticDiff(traceback, stat.size, stat.size, stat.count, stat.count) statistics.append(stat) for traceback, stat in old_group.items(): stat = StatisticDiff(traceback, 0, -stat.size, 0, -stat.count) statistics.append(stat) return statistics @total_ordering class Frame: """ Frame of a traceback. """ __slots__ = ("_frame",) def __init__(self, frame): # frame is a tuple: (filename: str, lineno: int) self._frame = frame @property def filename(self): return self._frame[0] @property def lineno(self): return self._frame[1] def __eq__(self, other): return (self._frame == other._frame) def __lt__(self, other): return (self._frame < other._frame) def __hash__(self): return hash(self._frame) def __str__(self): return "%s:%s" % (self.filename, self.lineno) def __repr__(self): return "<Frame filename=%r lineno=%r>" % (self.filename, self.lineno) @total_ordering class Traceback(Sequence): """ Sequence of Frame instances sorted from the oldest frame to the most recent frame. """ __slots__ = ("_frames",) def __init__(self, frames): Sequence.__init__(self) # frames is a tuple of frame tuples: see Frame constructor for the # format of a frame tuple; it is reversed, because _tracemalloc # returns frames sorted from most recent to oldest, but the # Python API expects oldest to most recent self._frames = tuple(reversed(frames)) def __len__(self): return len(self._frames) def __getitem__(self, index): if isinstance(index, slice): return tuple(Frame(trace) for trace in self._frames[index]) else: return Frame(self._frames[index]) def __contains__(self, frame): return frame._frame in self._frames def __hash__(self): return hash(self._frames) def __eq__(self, other): return (self._frames == other._frames) <|fim▁hole|> return (self._frames < other._frames) def __str__(self): return str(self[0]) def __repr__(self): return "<Traceback %r>" % (tuple(self),) def format(self, limit=None, most_recent_first=False): lines = [] if limit is not None: if limit > 0: frame_slice = self[-limit:] else: frame_slice = self[:limit] else: frame_slice = self if most_recent_first: frame_slice = reversed(frame_slice) for frame in frame_slice: lines.append(' File "%s", line %s' % (frame.filename, frame.lineno)) line = linecache.getline(frame.filename, frame.lineno).strip() if line: lines.append(' %s' % line) return lines def get_object_traceback(obj): """ Get the traceback where the Python object *obj* was allocated. Return a Traceback instance. Return None if the tracemalloc module is not tracing memory allocations or did not trace the allocation of the object. """ frames = _get_object_traceback(obj) if frames is not None: return Traceback(frames) else: return None class Trace: """ Trace of a memory block. """ __slots__ = ("_trace",) def __init__(self, trace): # trace is a tuple: (domain: int, size: int, traceback: tuple). # See Traceback constructor for the format of the traceback tuple. self._trace = trace @property def domain(self): return self._trace[0] @property def size(self): return self._trace[1] @property def traceback(self): return Traceback(self._trace[2]) def __eq__(self, other): return (self._trace == other._trace) def __hash__(self): return hash(self._trace) def __str__(self): return "%s: %s" % (self.traceback, _format_size(self.size, False)) def __repr__(self): return ("<Trace domain=%s size=%s, traceback=%r>" % (self.domain, _format_size(self.size, False), self.traceback)) class _Traces(Sequence): def __init__(self, traces): Sequence.__init__(self) # traces is a tuple of trace tuples: see Trace constructor self._traces = traces def __len__(self): return len(self._traces) def __getitem__(self, index): if isinstance(index, slice): return tuple(Trace(trace) for trace in self._traces[index]) else: return Trace(self._traces[index]) def __contains__(self, trace): return trace._trace in self._traces def __eq__(self, other): return (self._traces == other._traces) def __repr__(self): return "<Traces len=%s>" % len(self) def _normalize_filename(filename): filename = os.path.normcase(filename) if filename.endswith('.pyc'): filename = filename[:-1] return filename class BaseFilter: def __init__(self, inclusive): self.inclusive = inclusive def _match(self, trace): raise NotImplementedError class Filter(BaseFilter): def __init__(self, inclusive, filename_pattern, lineno=None, all_frames=False, domain=None): super().__init__(inclusive) self.inclusive = inclusive self._filename_pattern = _normalize_filename(filename_pattern) self.lineno = lineno self.all_frames = all_frames self.domain = domain @property def filename_pattern(self): return self._filename_pattern def _match_frame_impl(self, filename, lineno): filename = _normalize_filename(filename) if not fnmatch.fnmatch(filename, self._filename_pattern): return False if self.lineno is None: return True else: return (lineno == self.lineno) def _match_frame(self, filename, lineno): return self._match_frame_impl(filename, lineno) ^ (not self.inclusive) def _match_traceback(self, traceback): if self.all_frames: if any(self._match_frame_impl(filename, lineno) for filename, lineno in traceback): return self.inclusive else: return (not self.inclusive) else: filename, lineno = traceback[0] return self._match_frame(filename, lineno) def _match(self, trace): domain, size, traceback = trace res = self._match_traceback(traceback) if self.domain is not None: if self.inclusive: return res and (domain == self.domain) else: return res or (domain != self.domain) return res class DomainFilter(BaseFilter): def __init__(self, inclusive, domain): super().__init__(inclusive) self._domain = domain @property def domain(self): return self._domain def _match(self, trace): domain, size, traceback = trace return (domain == self.domain) ^ (not self.inclusive) class Snapshot: """ Snapshot of traces of memory blocks allocated by Python. """ def __init__(self, traces, traceback_limit): # traces is a tuple of trace tuples: see _Traces constructor for # the exact format self.traces = _Traces(traces) self.traceback_limit = traceback_limit def dump(self, filename): """ Write the snapshot into a file. """ with open(filename, "wb") as fp: pickle.dump(self, fp, pickle.HIGHEST_PROTOCOL) @staticmethod def load(filename): """ Load a snapshot from a file. """ with open(filename, "rb") as fp: return pickle.load(fp) def _filter_trace(self, include_filters, exclude_filters, trace): if include_filters: if not any(trace_filter._match(trace) for trace_filter in include_filters): return False if exclude_filters: if any(not trace_filter._match(trace) for trace_filter in exclude_filters): return False return True def filter_traces(self, filters): """ Create a new Snapshot instance with a filtered traces sequence, filters is a list of Filter or DomainFilter instances. If filters is an empty list, return a new Snapshot instance with a copy of the traces. """ if not isinstance(filters, Iterable): raise TypeError("filters must be a list of filters, not %s" % type(filters).__name__) if filters: include_filters = [] exclude_filters = [] for trace_filter in filters: if trace_filter.inclusive: include_filters.append(trace_filter) else: exclude_filters.append(trace_filter) new_traces = [trace for trace in self.traces._traces if self._filter_trace(include_filters, exclude_filters, trace)] else: new_traces = self.traces._traces.copy() return Snapshot(new_traces, self.traceback_limit) def _group_by(self, key_type, cumulative): if key_type not in ('traceback', 'filename', 'lineno'): raise ValueError("unknown key_type: %r" % (key_type,)) if cumulative and key_type not in ('lineno', 'filename'): raise ValueError("cumulative mode cannot by used " "with key type %r" % key_type) stats = {} tracebacks = {} if not cumulative: for trace in self.traces._traces: domain, size, trace_traceback = trace try: traceback = tracebacks[trace_traceback] except KeyError: if key_type == 'traceback': frames = trace_traceback elif key_type == 'lineno': frames = trace_traceback[:1] else: # key_type == 'filename': frames = ((trace_traceback[0][0], 0),) traceback = Traceback(frames) tracebacks[trace_traceback] = traceback try: stat = stats[traceback] stat.size += size stat.count += 1 except KeyError: stats[traceback] = Statistic(traceback, size, 1) else: # cumulative statistics for trace in self.traces._traces: domain, size, trace_traceback = trace for frame in trace_traceback: try: traceback = tracebacks[frame] except KeyError: if key_type == 'lineno': frames = (frame,) else: # key_type == 'filename': frames = ((frame[0], 0),) traceback = Traceback(frames) tracebacks[frame] = traceback try: stat = stats[traceback] stat.size += size stat.count += 1 except KeyError: stats[traceback] = Statistic(traceback, size, 1) return stats def statistics(self, key_type, cumulative=False): """ Group statistics by key_type. Return a sorted list of Statistic instances. """ grouped = self._group_by(key_type, cumulative) statistics = list(grouped.values()) statistics.sort(reverse=True, key=Statistic._sort_key) return statistics def compare_to(self, old_snapshot, key_type, cumulative=False): """ Compute the differences with an old snapshot old_snapshot. Get statistics as a sorted list of StatisticDiff instances, grouped by group_by. """ new_group = self._group_by(key_type, cumulative) old_group = old_snapshot._group_by(key_type, cumulative) statistics = _compare_grouped_stats(old_group, new_group) statistics.sort(reverse=True, key=StatisticDiff._sort_key) return statistics def take_snapshot(): """ Take a snapshot of traces of memory blocks allocated by Python. """ if not is_tracing(): raise RuntimeError("the tracemalloc module must be tracing memory " "allocations to take a snapshot") traces = _get_traces() traceback_limit = get_traceback_limit() return Snapshot(traces, traceback_limit)<|fim▁end|>
def __lt__(self, other):
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#[derive(Debug)] struct Rectangle { width: u32, height: u32, } // Methods are similar to functions: they’re declared with the fn keyword and // their name, they can have parameters and a return value, and they contain // some code that is run when they’re called from somewhere else. However, // methods are different from functions in that they’re defined within the // context of a struct (or an enum or a trait object, which we cover in Chapters // 6 and 17, respectively), and their first parameter is always self, which // represents the instance of the struct the method is being called on. impl Rectangle { fn area(&self) -> u32 { self.width * self.height } fn can_hold($self, other: &Rectangle) -> bool { self.width > other.width && self.height > other.height } } fn main() { let rect1 = Rectangle { width: 30, height: 50 }; println!("The area of the Rectangle is {} square pixels.", rect1.area() ); let rect2 = Rectangle { width: 10, height: 40 }; let rect3 = Rectangle { width: 60, height: 45 };<|fim▁hole|><|fim▁end|>
println!("Can rect1 hold rect2? {}", rect1.can_hold(&rect2)); println!("Can rect1 hold rect3? {}", rect1.can_hold(&rect3)); }
<|file_name|>image.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Image operations.""" from __future__ import absolute_import as _abs from . import _make def resize(data, size, layout="NCHW", method="bilinear", align_corners=True, out_dtype=None): """Image resize operator. This operator takes data as input and does 2D scaling to the given scale factor. In the default case, where the data_layout is `NCHW` with data of shape (n, c, h, w) out will have a shape (n, c, size[0], size[1]) method indicates the algorithm to be used while calculating ghe out value and method can be one of ("bilinear", "nearest_neighbor", "bicubic") Parameters<|fim▁hole|> ---------- data : relay.Expr The input data to the operator. size: Tuple of Expr The out size to which the image will be resized. layout : str, optional Layout of the input. method : str, optional Scale method to used [nearest_neighbor, bilinear, bicubic]. align_corners : int, optional Should be true to preserve the values at the corner pixels out_dtype : str, optional Type to return. If left None returns the same type as input. Returns ------- result: relay.Expr The resized result. """ return _make.resize(data, size, layout, method, align_corners, out_dtype)<|fim▁end|>
<|file_name|>sync_request_bouncer.hpp<|end_file_name|><|fim▁begin|>/****************************************************************************** * ____ _ _____ * * / ___| / \ | ___| C++ * * | | / _ \ | |_ Actor * * | |___ / ___ \| _| Framework * * \____/_/ \_|_| * * * * Copyright (C) 2011 - 2016 * * Dominik Charousset <dominik.charousset (at) haw-hamburg.de> * * * * Distributed under the terms and conditions of the BSD 3-Clause License or * * (at your option) under the terms and conditions of the Boost Software * * License 1.0. See accompanying files LICENSE and LICENSE_ALTERNATIVE. * * * * If you did not receive a copy of the license files, see * * http://opensource.org/licenses/BSD-3-Clause and * * http://www.boost.org/LICENSE_1_0.txt. * ******************************************************************************/ #ifndef CAF_DETAIL_SYNC_REQUEST_BOUNCER_HPP #define CAF_DETAIL_SYNC_REQUEST_BOUNCER_HPP #include <cstdint> <|fim▁hole|> namespace caf { class actor_addr; class message_id; class local_actor; class mailbox_element; } // namespace caf namespace caf { namespace detail { struct sync_request_bouncer { error rsn; explicit sync_request_bouncer(error r); void operator()(const strong_actor_ptr& sender, const message_id& mid) const; void operator()(const mailbox_element& e) const; }; } // namespace detail } // namespace caf #endif // CAF_DETAIL_SYNC_REQUEST_BOUNCER_HPP<|fim▁end|>
#include "caf/fwd.hpp" #include "caf/exit_reason.hpp"
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function (grunt) { // Project configuration. grunt.initConfig({ // Metadata. pkg : grunt.file.readJSON('storelocator.jquery.json'), banner : '/*! <%= pkg.title || pkg.name %> - v<%= pkg.version %> - ' + '<%= grunt.template.today("yyyy-mm-dd") %>\n' + '<%= pkg.homepage ? "* " + pkg.homepage + "\\n" : "" %>' + '* Copyright (c) <%= grunt.template.today("yyyy") %> <%= pkg.author.name %>;' + ' Licensed <%= _.map(pkg.licenses, "type").join(", ") %> */\n', // Task configuration. clean : { files: ['dist'] }, sass : { dist: { files: { 'dist/assets/css/storelocator.css' : 'src/css/storelocator.scss', 'dist/assets/css/bootstrap-example.css' : 'src/css/bootstrap-example.scss' } } }, concat : { options: { stripBanners: true }, dist : { src : ['src/js/jquery.<%= pkg.name %>.js'], dest: 'dist/assets/js/plugins/storeLocator/jquery.<%= pkg.name %>.js' } }, uglify: { dist: { files: { 'dist/assets/js/plugins/storeLocator/jquery.<%= pkg.name %>.min.js': '<%= concat.dist.dest %>', 'dist/assets/js/libs/handlebars.min.js' : 'libs/handlebars/*.js', 'dist/assets/js/geocode.min.js' : 'src/js/geocode.js', 'dist/assets/js/libs/markerclusterer.min.js' : 'libs/markerclusterer/*.js', } } }, qunit : { files: ['test/**/*.html'] }, jshint : { gruntfile: { options: { jshintrc: '.jshintrc' }, src : 'Gruntfile.js' }, src : { options: {<|fim▁hole|> jQuery: true, google: true }, src : ['src/**/*.js'] }, test : { options: { jshintrc: 'test/.jshintrc' }, src : ['test/**/*.js'] } }, usebanner: { dist: { options: { position: 'top', banner : '<%= banner %>' }, files : { 'dist/assets/js/plugins/storeLocator/jquery.<%= pkg.name %>.js' : 'dist/assets/js/plugins/storeLocator/jquery.<%= pkg.name %>.js', 'dist/assets/js/plugins/storeLocator/jquery.<%= pkg.name %>.min.js': 'dist/assets/js/plugins/storeLocator/jquery.<%= pkg.name %>.min.js' } } }, cssmin : { dist: { files: { 'dist/assets/css/storelocator.min.css': 'dist/assets/css/storelocator.css', 'dist/assets/css/bootstrap-example.min.css': 'dist/assets/css/bootstrap-example.css' } } }, handlebars : { dist: { files: { 'dist/assets/js/plugins/storeLocator/templates/compiled/standard-templates.js': 'src/templates/standard/*.html', 'dist/assets/js/plugins/storeLocator/templates/compiled/kml-templates.js': 'src/templates/kml/*.html' } } }, watch : { gruntfile: { files: '<%= jshint.gruntfile.src %>', tasks: ['jshint:gruntfile'] }, src : { files : ['src/**/*'], tasks : ['sass', 'concat', 'uglify', 'usebanner', 'cssmin'], options: { spawn : false } }, test : { files: '<%= jshint.test.src %>', tasks: ['jshint:test', 'qunit'] } } }); // These plugins provide necessary tasks. grunt.loadNpmTasks('grunt-contrib-clean'); grunt.loadNpmTasks('grunt-contrib-sass'); grunt.loadNpmTasks('grunt-contrib-concat'); grunt.loadNpmTasks('grunt-contrib-uglify'); grunt.loadNpmTasks('grunt-contrib-qunit'); grunt.loadNpmTasks('grunt-contrib-jshint'); grunt.loadNpmTasks('grunt-contrib-watch'); grunt.loadNpmTasks('grunt-banner'); grunt.loadNpmTasks('grunt-contrib-cssmin'); grunt.loadNpmTasks('grunt-contrib-handlebars'); // Build grunt.registerTask('build', ['sass', 'concat', 'uglify', 'usebanner', 'cssmin']); //Watch src build grunt.registerTask('watchsrc', ['watch:src']); };<|fim▁end|>
jshintrc: 'src/.jshintrc' }, globals: {
<|file_name|>test_hwaf_projects_test.go<|end_file_name|><|fim▁begin|>package main_test import ( "fmt" "io/ioutil" "os" "path/filepath" "strings" "testing" "text/template" ) type pkgdef_t struct { Name string Deps []string } func (p pkgdef_t) BaseName() string { return filepath.Base(p.Name) } func (p pkgdef_t) TestName() string { return "test" + p.BaseName() } func (p pkgdef_t) LibName() string { return "Lib" + p.BaseName() } func (p pkgdef_t) PkgDeps() string { if len(p.Deps) <= 0 { return "" } s := []string{ "deps: {", " public: [", } for _, dep := range p.Deps { s = append( s, " "+dep+",", ) } s = append( s, " ],", "},", ) // reindent for i := range s { s[i] = " " + s[i] } return strings.Join(s, "\n") } func (p pkgdef_t) LibUse() string { if len(p.Deps) <= 0 { return "" } s := []string{ "use: [", } for _, dep := range p.Deps { s = append( s, fmt.Sprintf(` "Lib%s",`, dep), ) } s = append( s, "],", ) // reindent for i := range s { s[i] = " " + s[i] } return strings.Join(s, "\n") } func (p pkgdef_t) HdrIncludes() string { if len(p.Deps) <= 0 { return "" } s := make([]string, 0, len(p.Deps)) for _, dep := range p.Deps { s = append( s, fmt.Sprintf(`#include "%s/Lib%s.hxx"`, dep, dep), ) } return strings.Join(s, "\n") } func (p pkgdef_t) HdrMembers() string { if len(p.Deps) <= 0 { return "" } s := make([]string, 0, len(p.Deps)) for _, dep := range p.Deps { s = append( s, fmt.Sprintf(` CLib%s m_%s;`, dep, dep), ) } return strings.Join(s, "\n") } func (p pkgdef_t) LibMembers() string { if len(p.Deps) <= 0 { return "" } s := make([]string, 0, len(p.Deps)) for _, dep := range p.Deps { s = append( s, fmt.Sprintf(` m_%s.f();`, dep), ) } return strings.Join(s, "\n") } func (p pkgdef_t) TestUse() string { if len(p.Deps) <= 0 { return "use: [" + p.LibName() + "]," } s := []string{ "use: [", } for _, dep := range p.Deps { s = append( s, fmt.Sprintf(` "Lib%s",`, dep), ) } s = append( s, fmt.Sprintf(` "%s",`, p.LibName()), "],", ) // reindent for i := range s { s[i] = " " + s[i] } return strings.Join(s, "\n") } func TestMultiProject(t *testing.T) { workdir, err := ioutil.TempDir("", "hwaf-test-") if err != nil { t.Fatalf(err.Error()) } defer os.RemoveAll(workdir) //fmt.Printf(">>> test: %s\n", workdir) err = os.Chdir(workdir) if err != nil { t.Fatalf(err.Error()) } hwaf, err := newlogger("hwaf.log") if err != nil { t.Fatalf(err.Error()) } defer hwaf.Close() const hscript_tmpl = ` ## -*- yaml -*- package: { name: "{{.Name}}", authors: ["me"], {{.PkgDeps}} } configure: { tools: ["compiler_c", "compiler_cxx", "find_python"], env: { PYTHONPATH: "${INSTALL_AREA}/python:${PYTHONPATH}", }, } build: { {{.TestName}}: { features: "cxx cxxprogram hwaf_install_headers hwaf_utest", includes: "includes", export_includes: "includes", cwd: "includes",<|fim▁hole|> }, {{.LibName}}: { features: "cxx cxxshlib hwaf_install_headers hwaf_export_lib", includes: "includes", export_includes: "includes", cwd: "includes", source: "src/{{.LibName}}.cxx", {{.LibUse}} }, } ` const src_lib_tmpl = ` #include <iostream> #include "{{.BaseName}}/{{.LibName}}.hxx" C{{.LibName}}::C{{.LibName}}() { std::cout << "c-tor C{{.LibName}}" << std::endl; } C{{.LibName}}::~C{{.LibName}}() { std::cout << "d-tor C{{.LibName}}" << std::endl; } void C{{.LibName}}::f() { std::cout << "C{{.LibName}}.f" << std::endl; {{.LibMembers}} } ` const hdr_lib_tmpl = ` #ifndef __{{.LibName}}_hxx__ #define __{{.LibName}}_hxx__ 1 // -------------------------------------- {{.HdrIncludes}} #ifdef _MSC_VER #define DllExport __declspec( dllexport ) #else #define DllExport #endif class DllExport C{{.LibName}} { public: C{{.LibName}}(); ~C{{.LibName}}(); void f(); private: {{.HdrMembers}} }; #endif // !__{{.LibName}}_hxx__ ` const src_tst_tmpl = ` #include <iostream> #include "{{.BaseName}}/{{.LibName}}.hxx" int main(int argc, char **argv) { std::cout << "Testing binary for package {{.Name}}\n" << "argc: " << argc << "\n" << "argv: " << argv << "\n"; C{{.LibName}} o; o.f(); return 0; } ` gen_tmpl := func(fname string, text string, data interface{}) error { f, err := os.Create(fname) if err != nil { return err } defer f.Close() t := template.New("tmpl") template.Must(t.Parse(text)) err = t.Execute(f, data) if err != nil { return err } return f.Sync() } gen_proj := func(projname string, projdeps []string, pkgdefs []pkgdef_t) error { var err error projdir := filepath.Join(workdir, projname) err = os.MkdirAll(projdir, 0777) if err != nil { return err } pkgdir := filepath.Join(projdir, "src") err = os.MkdirAll(pkgdir, 0777) if err != nil { return err } gen_pkg := func(pkg pkgdef_t) error { var err error // create pkg structure for _, dir := range []string{ filepath.Join(pkg.Name, "includes", pkg.Name), filepath.Join(pkg.Name, "src"), } { err = os.MkdirAll(dir, 0777) if err != nil { return err } } // create hscript fname := filepath.Join(pkg.Name, "hscript.yml") err = gen_tmpl(fname, hscript_tmpl, pkg) if err != nil { return err } // header fname = filepath.Join(pkg.Name, "includes", pkg.Name, fmt.Sprintf("%s.hxx", pkg.LibName())) err = gen_tmpl(fname, hdr_lib_tmpl, pkg) if err != nil { return err } // lib fname = filepath.Join(pkg.Name, "src", fmt.Sprintf("%s.cxx", pkg.LibName())) err = gen_tmpl(fname, src_lib_tmpl, pkg) if err != nil { return err } // test fname = filepath.Join(pkg.Name, "src", fmt.Sprintf("%s.cxx", pkg.TestName())) err = gen_tmpl(fname, src_tst_tmpl, pkg) if err != nil { return err } return err } //fmt.Printf("pkgdir=%q\n", pkgdir) err = os.Chdir(pkgdir) if err != nil { return err } for _, pkg := range pkgdefs { err = gen_pkg(pkg) if err != nil { return err } } //fmt.Printf("projdir=%q\n", projdir) err = os.Chdir(projdir) if err != nil { return err } gen_projdeps := func(projdeps []string) string { if len(projdeps) <= 0 { return "" } projpath := make([]string, 0, len(projdeps)) for _, dep := range projdeps { projpath = append( projpath, filepath.Join(workdir, dep, "install-area"), ) } return strings.Join(projpath, ":") } // build project for _, cmd := range [][]string{ {"hwaf", "init", "-v=1", "."}, {"hwaf", "setup", "-v=1", "-p=" + gen_projdeps(projdeps)}, {"hwaf", "configure"}, {"hwaf"}, {"hwaf", "check"}, } { err := hwaf.Run(cmd[0], cmd[1:]...) if err != nil { hwaf.Display() t.Fatalf("cmd %v failed: %v", cmd, err) } } return err } for _, table := range []struct { projname string projdeps []string pkgdefs []pkgdef_t }{ { "project_001", []string{}, []pkgdef_t{ { "project_001_pkg_001", []string{}, }, { "project_001_pkg_002", []string{}, }, { "project_001_pkg_003", []string{}, }, }, }, { "project_002", []string{"project_001"}, []pkgdef_t{ { "project_002_pkg_001", []string{"project_001_pkg_001"}, }, { "project_002_pkg_002", []string{"project_001_pkg_001", "project_001_pkg_002"}, }, { "project_002_pkg_003", []string{"project_001_pkg_001", "project_001_pkg_003"}, }, }, }, { "project_003", []string{"project_001"}, []pkgdef_t{ { "project_003_pkg_001", []string{"project_001_pkg_001"}, }, { "project_003_pkg_002", []string{"project_001_pkg_001", "project_001_pkg_002"}, }, { "project_003_pkg_003", []string{"project_001_pkg_001", "project_001_pkg_003"}, }, }, }, { "project_004", []string{"project_002", "project_003"}, []pkgdef_t{ { "project_004_pkg_001", []string{ "project_001_pkg_001", "project_002_pkg_001", }, }, { "project_004_pkg_002", []string{ "project_001_pkg_001", "project_001_pkg_002", "project_002_pkg_001", "project_003_pkg_002", }, }, { "project_004_pkg_003", []string{ "project_001_pkg_001", "project_001_pkg_003", "project_002_pkg_001", "project_002_pkg_003", "project_003_pkg_001", "project_003_pkg_003", }, }, }, }, } { err = gen_proj(table.projname, table.projdeps, table.pkgdefs) if err != nil { t.Fatalf("project [%v]: %v\n", table.projname, err) } } //hwaf.Display() } // EOF<|fim▁end|>
source: "src/{{.TestName}}.cxx", {{.TestUse}}
<|file_name|>admin.py<|end_file_name|><|fim▁begin|># from .. import Workflow, Stage, Task, TaskFile # # from flask.ext import admin # from flask.ext.admin.contrib import sqla # #<|fim▁hole|># adm.add_view(sqla.ModelView(m, session))<|fim▁end|>
# def add_cosmos_admin(flask_app, session): # adm = admin.Admin(flask_app, 'Flask Admin', base_template="admin_layout.html") # for m in [Workflow, Stage, Task, TaskFile]:
<|file_name|>Nap.java<|end_file_name|><|fim▁begin|>//: move/Nap.java package pokepon.move; import pokepon.enums.*; import pokepon.pony.Pony; import pokepon.battle.*; /** * Heal 50% of user's HP. * * @author silverweed */ <|fim▁hole|> type = Type.NIGHT; moveType = Move.MoveType.STATUS; maxpp = pp = 10; accuracy = -1; priority = 0; briefDesc = "User sleeps 2 turns to refill HP and cure status."; healUser = 1f; healUserStatus = 1f; } public Nap(Pony p) { this(); pony = p; } @Override public BattleEvent[] getBattleEvents() { return new BattleEvent[] { new BattleEvent(1, name) { @Override public void afterMoveUsage(final BattleEngine be) { if(be.getAttacker() == pony && !pony.isFainted()) { pony.setAsleep(true); if(be.getBattleTask() != null) { be.getBattleTask().sendB("|battle|"+pony.getNickname()+" takes a nap and becomes healthy!"); be.getBattleTask().sendB(be.getConnection(be.getSide(pony)),"|addstatus|ally|slp"); be.getBattleTask().sendB(be.getConnection(be.getOppositeSide(pony)),"|addstatus|opp|slp"); } pony.sleepCounter = 3; count = 0; } } } }; } }<|fim▁end|>
public class Nap extends Move { public Nap() { super("Nap");
<|file_name|>abilities.ts<|end_file_name|><|fim▁begin|>/* Ratings and how they work: -1: Detrimental An ability that severely harms the user. ex. Defeatist, Slow Start 0: Useless An ability with no overall benefit in a singles battle. ex. Color Change, Plus 1: Ineffective An ability that has minimal effect or is only useful in niche situations. ex. Light Metal, Suction Cups 2: Useful An ability that can be generally useful. ex. Flame Body, Overcoat 3: Effective An ability with a strong effect on the user or foe. ex. Chlorophyll, Sturdy 4: Very useful One of the more popular abilities. It requires minimal support to be effective. ex. Adaptability, Magic Bounce 5: Essential The sort of ability that defines metagames. ex. Imposter, Shadow Tag */ export const Abilities: {[abilityid: string]: AbilityData} = { noability: { isNonstandard: "Past", name: "No Ability", rating: 0.1, num: 0, }, adaptability: { onModifyMove(move) { move.stab = 2; }, name: "Adaptability", rating: 4, num: 91, }, aerilate: { onModifyTypePriority: -1, onModifyType(move, pokemon) { const noModifyType = [ 'judgment', 'multiattack', 'naturalgift', 'revelationdance', 'technoblast', 'terrainpulse', 'weatherball', ]; if (move.type === 'Normal' && !noModifyType.includes(move.id) && !(move.isZ && move.category !== 'Status')) { move.type = 'Flying'; move.aerilateBoosted = true; } }, onBasePowerPriority: 23, onBasePower(basePower, pokemon, target, move) { if (move.aerilateBoosted) return this.chainModify([4915, 4096]); }, name: "Aerilate", rating: 4, num: 184, }, aftermath: { name: "Aftermath", onDamagingHitOrder: 1, onDamagingHit(damage, target, source, move) { if (!target.hp && this.checkMoveMakesContact(move, source, target, true)) { this.damage(source.baseMaxhp / 4, source, target); } }, rating: 2.5, num: 106, }, airlock: { onSwitchIn(pokemon) { this.effectState.switchingIn = true; }, onStart(pokemon) { // Air Lock does not activate when Skill Swapped or when Neutralizing Gas leaves the field if (!this.effectState.switchingIn) return; this.add('-ability', pokemon, 'Air Lock'); this.effectState.switchingIn = false; }, suppressWeather: true, name: "Air Lock", rating: 2, num: 76, }, analytic: { onBasePowerPriority: 21, onBasePower(basePower, pokemon) { let boosted = true; for (const target of this.getAllActive()) { if (target === pokemon) continue; if (this.queue.willMove(target)) { boosted = false; break; } } if (boosted) { this.debug('Analytic boost'); return this.chainModify([5325, 4096]); } }, name: "Analytic", rating: 2.5, num: 148, }, angerpoint: { onHit(target, source, move) { if (!target.hp) return; if (move?.effectType === 'Move' && target.getMoveHitData(move).crit) { target.setBoost({atk: 6}); this.add('-setboost', target, 'atk', 12, '[from] ability: Anger Point'); } }, name: "Anger Point", rating: 1.5, num: 83, }, anticipation: { onStart(pokemon) { for (const target of pokemon.foes()) { for (const moveSlot of target.moveSlots) { const move = this.dex.moves.get(moveSlot.move); if (move.category === 'Status') continue; const moveType = move.id === 'hiddenpower' ? target.hpType : move.type; if ( this.dex.getImmunity(moveType, pokemon) && this.dex.getEffectiveness(moveType, pokemon) > 0 || move.ohko ) { this.add('-ability', pokemon, 'Anticipation'); return; } } } }, name: "Anticipation", rating: 0.5, num: 107, }, arenatrap: { onFoeTrapPokemon(pokemon) { if (!pokemon.isAdjacent(this.effectState.target)) return; if (pokemon.isGrounded()) { pokemon.tryTrap(true); } }, onFoeMaybeTrapPokemon(pokemon, source) { if (!source) source = this.effectState.target; if (!source || !pokemon.isAdjacent(source)) return; if (pokemon.isGrounded(!pokemon.knownType)) { // Negate immunity if the type is unknown pokemon.maybeTrapped = true; } }, name: "Arena Trap", rating: 5, num: 71, }, aromaveil: { onAllyTryAddVolatile(status, target, source, effect) { if (['attract', 'disable', 'encore', 'healblock', 'taunt', 'torment'].includes(status.id)) { if (effect.effectType === 'Move') { const effectHolder = this.effectState.target; this.add('-block', target, 'ability: Aroma Veil', '[of] ' + effectHolder); } return null; } }, isBreakable: true, name: "Aroma Veil", rating: 2, num: 165, }, asoneglastrier: { onPreStart(pokemon) { this.add('-ability', pokemon, 'As One'); this.add('-ability', pokemon, 'Unnerve'); this.effectState.unnerved = true; }, onEnd() { this.effectState.unnerved = false; }, onFoeTryEatItem() { return !this.effectState.unnerved; }, onSourceAfterFaint(length, target, source, effect) { if (effect && effect.effectType === 'Move') { this.boost({atk: length}, source, source, this.dex.abilities.get('chillingneigh')); } }, isPermanent: true, name: "As One (Glastrier)", rating: 3.5, num: 266, }, asonespectrier: { onPreStart(pokemon) { this.add('-ability', pokemon, 'As One'); this.add('-ability', pokemon, 'Unnerve'); this.effectState.unnerved = true; }, onEnd() { this.effectState.unnerved = false; }, onFoeTryEatItem() { return !this.effectState.unnerved; }, onSourceAfterFaint(length, target, source, effect) { if (effect && effect.effectType === 'Move') { this.boost({spa: length}, source, source, this.dex.abilities.get('grimneigh')); } }, isPermanent: true, name: "As One (Spectrier)", rating: 3.5, num: 267, }, aurabreak: { onStart(pokemon) { if (this.suppressingAbility(pokemon)) return; this.add('-ability', pokemon, 'Aura Break'); }, onAnyTryPrimaryHit(target, source, move) { if (target === source || move.category === 'Status') return; move.hasAuraBreak = true; }, isBreakable: true, name: "Aura Break", rating: 1, num: 188, }, baddreams: { onResidualOrder: 28, onResidualSubOrder: 2, onResidual(pokemon) { if (!pokemon.hp) return; for (const target of pokemon.foes()) { if (target.status === 'slp' || target.hasAbility('comatose')) { this.damage(target.baseMaxhp / 8, target, pokemon); } } }, name: "Bad Dreams", rating: 1.5, num: 123, }, ballfetch: { name: "Ball Fetch", rating: 0, num: 237, }, battery: { onAllyBasePowerPriority: 22, onAllyBasePower(basePower, attacker, defender, move) { if (attacker !== this.effectState.target && move.category === 'Special') { this.debug('Battery boost'); return this.chainModify([5325, 4096]); } }, name: "Battery", rating: 0, num: 217, }, battlearmor: { onCriticalHit: false, isBreakable: true, name: "Battle Armor", rating: 1, num: 4, }, battlebond: { onSourceAfterFaint(length, target, source, effect) { if (effect?.effectType !== 'Move') { return; } if (source.species.id === 'greninja' && source.hp && !source.transformed && source.side.foePokemonLeft()) { this.add('-activate', source, 'ability: Battle Bond'); source.formeChange('Greninja-Ash', this.effect, true); } }, onModifyMovePriority: -1, onModifyMove(move, attacker) { if (move.id === 'watershuriken' && attacker.species.name === 'Greninja-Ash' && !attacker.transformed) { move.multihit = 3; } }, isPermanent: true, name: "Battle Bond", rating: 4, num: 210, }, beastboost: { onSourceAfterFaint(length, target, source, effect) { if (effect && effect.effectType === 'Move') { let statName = 'atk'; let bestStat = 0; let s: StatIDExceptHP; for (s in source.storedStats) { if (source.storedStats[s] > bestStat) { statName = s; bestStat = source.storedStats[s]; } } this.boost({[statName]: length}, source); } }, name: "Beast Boost", rating: 3.5, num: 224, }, berserk: { onDamage(damage, target, source, effect) { if ( effect.effectType === "Move" && !effect.multihit && (!effect.negateSecondary && !(effect.hasSheerForce && source.hasAbility('sheerforce'))) ) { target.abilityState.checkedBerserk = false; } else { target.abilityState.checkedBerserk = true; } }, onTryEatItem(item, pokemon) { const healingItems = [ 'aguavberry', 'enigmaberry', 'figyberry', 'iapapaberry', 'magoberry', 'sitrusberry', 'wikiberry', 'oranberry', 'berryjuice', ]; if (healingItems.includes(item.id)) { return pokemon.abilityState.checkedBerserk; } return true; }, onAfterMoveSecondary(target, source, move) { target.abilityState.checkedBerserk = true; if (!source || source === target || !target.hp || !move.totalDamage) return; const lastAttackedBy = target.getLastAttackedBy(); if (!lastAttackedBy) return; const damage = move.multihit ? move.totalDamage : lastAttackedBy.damage; if (target.hp <= target.maxhp / 2 && target.hp + damage > target.maxhp / 2) { this.boost({spa: 1}); } }, name: "Berserk", rating: 2, num: 201, }, bigpecks: { onBoost(boost, target, source, effect) { if (source && target === source) return; if (boost.def && boost.def < 0) { delete boost.def; if (!(effect as ActiveMove).secondaries && effect.id !== 'octolock') { this.add("-fail", target, "unboost", "Defense", "[from] ability: Big Pecks", "[of] " + target); } } }, isBreakable: true, name: "Big Pecks", rating: 0.5, num: 145, }, blaze: { onModifyAtkPriority: 5, onModifyAtk(atk, attacker, defender, move) { if (move.type === 'Fire' && attacker.hp <= attacker.maxhp / 3) { this.debug('Blaze boost'); return this.chainModify(1.5); } }, onModifySpAPriority: 5, onModifySpA(atk, attacker, defender, move) { if (move.type === 'Fire' && attacker.hp <= attacker.maxhp / 3) { this.debug('Blaze boost'); return this.chainModify(1.5); } }, name: "Blaze", rating: 2, num: 66, }, bulletproof: { onTryHit(pokemon, target, move) { if (move.flags['bullet']) { this.add('-immune', pokemon, '[from] ability: Bulletproof'); return null; } }, isBreakable: true, name: "Bulletproof", rating: 3, num: 171, }, cheekpouch: { onEatItem(item, pokemon) { this.heal(pokemon.baseMaxhp / 3); }, name: "Cheek Pouch", rating: 2, num: 167, }, chillingneigh: { onSourceAfterFaint(length, target, source, effect) { if (effect && effect.effectType === 'Move') { this.boost({atk: length}, source); } }, name: "Chilling Neigh", rating: 3, num: 264, }, chlorophyll: { onModifySpe(spe, pokemon) { if (['sunnyday', 'desolateland'].includes(pokemon.effectiveWeather())) { return this.chainModify(2); } }, name: "Chlorophyll", rating: 3, num: 34, }, clearbody: { onBoost(boost, target, source, effect) { if (source && target === source) return; let showMsg = false; let i: BoostID; for (i in boost) { if (boost[i]! < 0) { delete boost[i]; showMsg = true; } } if (showMsg && !(effect as ActiveMove).secondaries && effect.id !== 'octolock') { this.add("-fail", target, "unboost", "[from] ability: Clear Body", "[of] " + target); } }, isBreakable: true, name: "Clear Body", rating: 2, num: 29, }, cloudnine: { onSwitchIn(pokemon) { this.effectState.switchingIn = true; }, onStart(pokemon) { // Cloud Nine does not activate when Skill Swapped or when Neutralizing Gas leaves the field if (!this.effectState.switchingIn) return; this.add('-ability', pokemon, 'Cloud Nine'); this.effectState.switchingIn = false; }, suppressWeather: true, name: "Cloud Nine", rating: 2, num: 13, }, colorchange: { onAfterMoveSecondary(target, source, move) { if (!target.hp) return; const type = move.type; if ( target.isActive && move.effectType === 'Move' && move.category !== 'Status' && type !== '???' && !target.hasType(type) ) { if (!target.setType(type)) return false; this.add('-start', target, 'typechange', type, '[from] ability: Color Change'); if (target.side.active.length === 2 && target.position === 1) { // Curse Glitch const action = this.queue.willMove(target); if (action && action.move.id === 'curse') { action.targetLoc = -1; } } } }, name: "Color Change", rating: 0, num: 16, }, comatose: { onStart(pokemon) { this.add('-ability', pokemon, 'Comatose'); }, onSetStatus(status, target, source, effect) { if ((effect as Move)?.status) { this.add('-immune', target, '[from] ability: Comatose'); } return false; }, // Permanent sleep "status" implemented in the relevant sleep-checking effects isPermanent: true, name: "Comatose", rating: 4, num: 213, }, competitive: { onAfterEachBoost(boost, target, source, effect) { if (!source || target.isAlly(source)) { if (effect.id === 'stickyweb') { this.hint("Court Change Sticky Web counts as lowering your own Speed, and Competitive only affects stats lowered by foes.", true, source.side); } return; } let statsLowered = false; let i: BoostID; for (i in boost) { if (boost[i]! < 0) { statsLowered = true; } } if (statsLowered) { this.add('-ability', target, 'Competitive'); this.boost({spa: 2}, target, target, null, true); } }, name: "Competitive", rating: 2.5, num: 172, }, compoundeyes: { onSourceModifyAccuracyPriority: -1, onSourceModifyAccuracy(accuracy) { if (typeof accuracy !== 'number') return; this.debug('compoundeyes - enhancing accuracy'); return this.chainModify([5325, 4096]); }, name: "Compound Eyes", rating: 3, num: 14, }, contrary: { onBoost(boost, target, source, effect) { if (effect && effect.id === 'zpower') return; let i: BoostID; for (i in boost) { boost[i]! *= -1; } }, isBreakable: true, name: "Contrary", rating: 4.5, num: 126, }, corrosion: { // Implemented in sim/pokemon.js:Pokemon#setStatus name: "Corrosion", rating: 2.5, num: 212, }, cottondown: { onDamagingHit(damage, target, source, move) { let activated = false; for (const pokemon of this.getAllActive()) { if (pokemon === target || pokemon.fainted) continue; if (!activated) { this.add('-ability', target, 'Cotton Down'); activated = true; } this.boost({spe: -1}, pokemon, target, null, true); } }, name: "Cotton Down", rating: 2, num: 238, }, curiousmedicine: { onStart(pokemon) { for (const ally of pokemon.adjacentAllies()) { ally.clearBoosts(); this.add('-clearboost', ally, '[from] ability: Curious Medicine', '[of] ' + pokemon); } }, name: "Curious Medicine", rating: 0, num: 261, }, cursedbody: { onDamagingHit(damage, target, source, move) { if (source.volatiles['disable']) return; if (!move.isMax && !move.isFutureMove && move.id !== 'struggle') { if (this.randomChance(3, 10)) { source.addVolatile('disable', this.effectState.target); } } }, name: "Cursed Body", rating: 2, num: 130, }, cutecharm: { onDamagingHit(damage, target, source, move) { if (this.checkMoveMakesContact(move, source, target)) { if (this.randomChance(3, 10)) { source.addVolatile('attract', this.effectState.target); } } }, name: "Cute Charm", rating: 0.5, num: 56, }, damp: { onAnyTryMove(target, source, effect) { if (['explosion', 'mindblown', 'mistyexplosion', 'selfdestruct'].includes(effect.id)) { this.attrLastMove('[still]'); this.add('cant', this.effectState.target, 'ability: Damp', effect, '[of] ' + target); return false; } }, onAnyDamage(damage, target, source, effect) { if (effect && effect.id === 'aftermath') { return false; } }, isBreakable: true, name: "Damp", rating: 1, num: 6, }, dancer: { name: "Dancer", // implemented in runMove in scripts.js rating: 1.5, num: 216, }, darkaura: { onStart(pokemon) { if (this.suppressingAbility(pokemon)) return; this.add('-ability', pokemon, 'Dark Aura'); }, onAnyBasePowerPriority: 20, onAnyBasePower(basePower, source, target, move) { if (target === source || move.category === 'Status' || move.type !== 'Dark') return; if (!move.auraBooster) move.auraBooster = this.effectState.target; if (move.auraBooster !== this.effectState.target) return; return this.chainModify([move.hasAuraBreak ? 3072 : 5448, 4096]); }, isBreakable: true, name: "Dark Aura", rating: 3, num: 186, }, dauntlessshield: { onStart(pokemon) { this.boost({def: 1}, pokemon); }, name: "Dauntless Shield", rating: 3.5, num: 235, }, dazzling: { onFoeTryMove(target, source, move) { const targetAllExceptions = ['perishsong', 'flowershield', 'rototiller']; if (move.target === 'foeSide' || (move.target === 'all' && !targetAllExceptions.includes(move.id))) { return; } const dazzlingHolder = this.effectState.target; if ((source.isAlly(dazzlingHolder) || move.target === 'all') && move.priority > 0.1) { this.attrLastMove('[still]'); this.add('cant', dazzlingHolder, 'ability: Dazzling', move, '[of] ' + target); return false; } }, isBreakable: true, name: "Dazzling", rating: 2.5, num: 219, }, defeatist: { onModifyAtkPriority: 5, onModifyAtk(atk, pokemon) { if (pokemon.hp <= pokemon.maxhp / 2) { return this.chainModify(0.5); } }, onModifySpAPriority: 5, onModifySpA(atk, pokemon) { if (pokemon.hp <= pokemon.maxhp / 2) { return this.chainModify(0.5); } }, name: "Defeatist", rating: -1, num: 129, }, defiant: { onAfterEachBoost(boost, target, source, effect) { if (!source || target.isAlly(source)) { if (effect.id === 'stickyweb') { this.hint("Court Change Sticky Web counts as lowering your own Speed, and Defiant only affects stats lowered by foes.", true, source.side); } return; } let statsLowered = false; let i: BoostID; for (i in boost) { if (boost[i]! < 0) { statsLowered = true; } } if (statsLowered) { this.add('-ability', target, 'Defiant'); this.boost({atk: 2}, target, target, null, true); } }, name: "Defiant", rating: 2.5, num: 128, }, deltastream: { onStart(source) { this.field.setWeather('deltastream'); }, onAnySetWeather(target, source, weather) { const strongWeathers = ['desolateland', 'primordialsea', 'deltastream']; if (this.field.getWeather().id === 'deltastream' && !strongWeathers.includes(weather.id)) return false; }, onEnd(pokemon) { if (this.field.weatherState.source !== pokemon) return; for (const target of this.getAllActive()) { if (target === pokemon) continue; if (target.hasAbility('deltastream')) { this.field.weatherState.source = target; return; } } this.field.clearWeather(); }, name: "Delta Stream", rating: 4, num: 191, }, desolateland: { onStart(source) { this.field.setWeather('desolateland'); }, onAnySetWeather(target, source, weather) { const strongWeathers = ['desolateland', 'primordialsea', 'deltastream']; if (this.field.getWeather().id === 'desolateland' && !strongWeathers.includes(weather.id)) return false; }, onEnd(pokemon) { if (this.field.weatherState.source !== pokemon) return; for (const target of this.getAllActive()) { if (target === pokemon) continue; if (target.hasAbility('desolateland')) { this.field.weatherState.source = target; return; } } this.field.clearWeather(); }, name: "Desolate Land", rating: 4.5, num: 190, }, disguise: { onDamagePriority: 1, onDamage(damage, target, source, effect) { if ( effect && effect.effectType === 'Move' && ['mimikyu', 'mimikyutotem'].includes(target.species.id) && !target.transformed ) { this.add('-activate', target, 'ability: Disguise'); this.effectState.busted = true; return 0; } }, onCriticalHit(target, source, move) { if (!target) return; if (!['mimikyu', 'mimikyutotem'].includes(target.species.id) || target.transformed) { return; } const hitSub = target.volatiles['substitute'] && !move.flags['bypasssub'] && !(move.infiltrates && this.gen >= 6); if (hitSub) return; if (!target.runImmunity(move.type)) return; return false; }, onEffectiveness(typeMod, target, type, move) { if (!target || move.category === 'Status') return; if (!['mimikyu', 'mimikyutotem'].includes(target.species.id) || target.transformed) { return; } const hitSub = target.volatiles['substitute'] && !move.flags['bypasssub'] && !(move.infiltrates && this.gen >= 6); if (hitSub) return; if (!target.runImmunity(move.type)) return; return 0; }, onUpdate(pokemon) { if (['mimikyu', 'mimikyutotem'].includes(pokemon.species.id) && this.effectState.busted) { const speciesid = pokemon.species.id === 'mimikyutotem' ? 'Mimikyu-Busted-Totem' : 'Mimikyu-Busted'; pokemon.formeChange(speciesid, this.effect, true); this.damage(pokemon.baseMaxhp / 8, pokemon, pokemon, this.dex.species.get(speciesid)); } }, isBreakable: true, isPermanent: true, name: "Disguise", rating: 3.5, num: 209, }, download: { onStart(pokemon) { let totaldef = 0; let totalspd = 0; for (const target of pokemon.foes()) { totaldef += target.getStat('def', false, true); totalspd += target.getStat('spd', false, true); } if (totaldef && totaldef >= totalspd) { this.boost({spa: 1}); } else if (totalspd) { this.boost({atk: 1}); } }, name: "Download", rating: 3.5, num: 88, }, dragonsmaw: { onModifyAtkPriority: 5, onModifyAtk(atk, attacker, defender, move) { if (move.type === 'Dragon') { this.debug('Dragon\'s Maw boost'); return this.chainModify(1.5); } }, onModifySpAPriority: 5, onModifySpA(atk, attacker, defender, move) { if (move.type === 'Dragon') { this.debug('Dragon\'s Maw boost'); return this.chainModify(1.5); } }, name: "Dragon's Maw", rating: 3.5, num: 263, }, drizzle: { onStart(source) { for (const action of this.queue) { if (action.choice === 'runPrimal' && action.pokemon === source && source.species.id === 'kyogre') return; if (action.choice !== 'runSwitch' && action.choice !== 'runPrimal') break; } this.field.setWeather('raindance'); }, name: "Drizzle", rating: 4, num: 2, }, drought: { onStart(source) { for (const action of this.queue) { if (action.choice === 'runPrimal' && action.pokemon === source && source.species.id === 'groudon') return; if (action.choice !== 'runSwitch' && action.choice !== 'runPrimal') break; } this.field.setWeather('sunnyday'); }, name: "Drought", rating: 4, num: 70, }, dryskin: { onTryHit(target, source, move) { if (target !== source && move.type === 'Water') { if (!this.heal(target.baseMaxhp / 4)) { this.add('-immune', target, '[from] ability: Dry Skin'); } return null; } }, onSourceBasePowerPriority: 17, onSourceBasePower(basePower, attacker, defender, move) { if (move.type === 'Fire') { return this.chainModify(1.25); } }, onWeather(target, source, effect) { if (target.hasItem('utilityumbrella')) return; if (effect.id === 'raindance' || effect.id === 'primordialsea') { this.heal(target.baseMaxhp / 8); } else if (effect.id === 'sunnyday' || effect.id === 'desolateland') { this.damage(target.baseMaxhp / 8, target, target); } }, isBreakable: true, name: "Dry Skin", rating: 3, num: 87, }, earlybird: { name: "Early Bird", // Implemented in statuses.js rating: 1.5, num: 48, }, effectspore: { onDamagingHit(damage, target, source, move) { if (this.checkMoveMakesContact(move, source, target) && !source.status && source.runStatusImmunity('powder')) { const r = this.random(100); if (r < 11) { source.setStatus('slp', target); } else if (r < 21) { source.setStatus('par', target); } else if (r < 30) { source.setStatus('psn', target); } } }, name: "Effect Spore", rating: 2, num: 27, }, electricsurge: { onStart(source) { this.field.setTerrain('electricterrain'); }, name: "Electric Surge", rating: 4, num: 226, }, emergencyexit: { onEmergencyExit(target) { if (!this.canSwitch(target.side) || target.forceSwitchFlag || target.switchFlag) return; for (const side of this.sides) { for (const active of side.active) { active.switchFlag = false; } } target.switchFlag = true; this.add('-activate', target, 'ability: Emergency Exit'); }, name: "Emergency Exit", rating: 1, num: 194, }, fairyaura: { onStart(pokemon) { if (this.suppressingAbility(pokemon)) return; this.add('-ability', pokemon, 'Fairy Aura'); }, onAnyBasePowerPriority: 20, onAnyBasePower(basePower, source, target, move) { if (target === source || move.category === 'Status' || move.type !== 'Fairy') return; if (!move.auraBooster) move.auraBooster = this.effectState.target; if (move.auraBooster !== this.effectState.target) return; return this.chainModify([move.hasAuraBreak ? 3072 : 5448, 4096]); }, isBreakable: true, name: "Fairy Aura", rating: 3, num: 187, }, filter: { onSourceModifyDamage(damage, source, target, move) { if (target.getMoveHitData(move).typeMod > 0) { this.debug('Filter neutralize'); return this.chainModify(0.75); } }, isBreakable: true, name: "Filter", rating: 3, num: 111, }, flamebody: { onDamagingHit(damage, target, source, move) { if (this.checkMoveMakesContact(move, source, target)) { if (this.randomChance(3, 10)) { source.trySetStatus('brn', target); } } }, name: "Flame Body", rating: 2, num: 49, }, flareboost: { onBasePowerPriority: 19, onBasePower(basePower, attacker, defender, move) { if (attacker.status === 'brn' && move.category === 'Special') { return this.chainModify(1.5); } }, name: "Flare Boost", rating: 2, num: 138, }, flashfire: { onTryHit(target, source, move) { if (target !== source && move.type === 'Fire') { move.accuracy = true; if (!target.addVolatile('flashfire')) { this.add('-immune', target, '[from] ability: Flash Fire'); } return null; } }, onEnd(pokemon) { pokemon.removeVolatile('flashfire'); }, condition: { noCopy: true, // doesn't get copied by Baton Pass onStart(target) { this.add('-start', target, 'ability: Flash Fire'); }, onModifyAtkPriority: 5, onModifyAtk(atk, attacker, defender, move) { if (move.type === 'Fire' && attacker.hasAbility('flashfire')) { this.debug('Flash Fire boost'); return this.chainModify(1.5); } }, onModifySpAPriority: 5, onModifySpA(atk, attacker, defender, move) { if (move.type === 'Fire' && attacker.hasAbility('flashfire')) { this.debug('Flash Fire boost'); return this.chainModify(1.5); } }, onEnd(target) { this.add('-end', target, 'ability: Flash Fire', '[silent]'); }, }, isBreakable: true, name: "Flash Fire", rating: 3.5, num: 18, }, flowergift: { onStart(pokemon) { delete this.effectState.forme; }, onUpdate(pokemon) { if (!pokemon.isActive || pokemon.baseSpecies.baseSpecies !== 'Cherrim' || pokemon.transformed) return; if (!pokemon.hp) return; if (['sunnyday', 'desolateland'].includes(pokemon.effectiveWeather())) { if (pokemon.species.id !== 'cherrimsunshine') { pokemon.formeChange('Cherrim-Sunshine', this.effect, false, '[msg]'); } } else { if (pokemon.species.id === 'cherrimsunshine') { pokemon.formeChange('Cherrim', this.effect, false, '[msg]'); } } }, onAllyModifyAtkPriority: 3, onAllyModifyAtk(atk, pokemon) { if (this.effectState.target.baseSpecies.baseSpecies !== 'Cherrim') return; if (['sunnyday', 'desolateland'].includes(pokemon.effectiveWeather())) { return this.chainModify(1.5); } }, onAllyModifySpDPriority: 4, onAllyModifySpD(spd, pokemon) { if (this.effectState.target.baseSpecies.baseSpecies !== 'Cherrim') return; if (['sunnyday', 'desolateland'].includes(pokemon.effectiveWeather())) { return this.chainModify(1.5); } }, isBreakable: true, name: "Flower Gift", rating: 1, num: 122, }, flowerveil: { onAllyBoost(boost, target, source, effect) { if ((source && target === source) || !target.hasType('Grass')) return; let showMsg = false; let i: BoostID; for (i in boost) { if (boost[i]! < 0) { delete boost[i]; showMsg = true; } } if (showMsg && !(effect as ActiveMove).secondaries) { const effectHolder = this.effectState.target; this.add('-block', target, 'ability: Flower Veil', '[of] ' + effectHolder); } }, onAllySetStatus(status, target, source, effect) { if (target.hasType('Grass') && source && target !== source && effect && effect.id !== 'yawn') { this.debug('interrupting setStatus with Flower Veil'); if (effect.id === 'synchronize' || (effect.effectType === 'Move' && !effect.secondaries)) { const effectHolder = this.effectState.target; this.add('-block', target, 'ability: Flower Veil', '[of] ' + effectHolder); } return null; } }, onAllyTryAddVolatile(status, target) { if (target.hasType('Grass') && status.id === 'yawn') { this.debug('Flower Veil blocking yawn'); const effectHolder = this.effectState.target; this.add('-block', target, 'ability: Flower Veil', '[of] ' + effectHolder); return null; } }, isBreakable: true, name: "Flower Veil", rating: 0, num: 166, }, fluffy: { onSourceModifyDamage(damage, source, target, move) { let mod = 1; if (move.type === 'Fire') mod *= 2; if (move.flags['contact']) mod /= 2; return this.chainModify(mod); }, isBreakable: true, name: "Fluffy", rating: 3.5, num: 218, }, forecast: { onUpdate(pokemon) { if (pokemon.baseSpecies.baseSpecies !== 'Castform' || pokemon.transformed) return; let forme = null; switch (pokemon.effectiveWeather()) { case 'sunnyday': case 'desolateland': if (pokemon.species.id !== 'castformsunny') forme = 'Castform-Sunny'; break; case 'raindance': case 'primordialsea': if (pokemon.species.id !== 'castformrainy') forme = 'Castform-Rainy'; break; case 'hail': if (pokemon.species.id !== 'castformsnowy') forme = 'Castform-Snowy'; break; default: if (pokemon.species.id !== 'castform') forme = 'Castform'; break; } if (pokemon.isActive && forme) { pokemon.formeChange(forme, this.effect, false, '[msg]'); } }, name: "Forecast", rating: 2, num: 59, }, forewarn: { onStart(pokemon) { let warnMoves: (Move | Pokemon)[][] = []; let warnBp = 1; for (const target of pokemon.foes()) { for (const moveSlot of target.moveSlots) { const move = this.dex.moves.get(moveSlot.move); let bp = move.basePower; if (move.ohko) bp = 150; if (move.id === 'counter' || move.id === 'metalburst' || move.id === 'mirrorcoat') bp = 120; if (bp === 1) bp = 80; if (!bp && move.category !== 'Status') bp = 80; if (bp > warnBp) { warnMoves = [[move, target]]; warnBp = bp; } else if (bp === warnBp) { warnMoves.push([move, target]); } } } if (!warnMoves.length) return; const [warnMoveName, warnTarget] = this.sample(warnMoves); this.add('-activate', pokemon, 'ability: Forewarn', warnMoveName, '[of] ' + warnTarget); }, name: "Forewarn", rating: 0.5, num: 108, }, friendguard: { name: "Friend Guard", onAnyModifyDamage(damage, source, target, move) { if (target !== this.effectState.target && target.isAlly(this.effectState.target)) { this.debug('Friend Guard weaken'); return this.chainModify(0.75); } }, isBreakable: true, rating: 0, num: 132, }, frisk: { onStart(pokemon) { for (const target of pokemon.foes()) { if (target.item) { this.add('-item', target, target.getItem().name, '[from] ability: Frisk', '[of] ' + pokemon, '[identify]'); } } }, name: "Frisk", rating: 1.5, num: 119, }, fullmetalbody: { onBoost(boost, target, source, effect) { if (source && target === source) return; let showMsg = false; let i: BoostID; for (i in boost) { if (boost[i]! < 0) { delete boost[i]; showMsg = true; } } if (showMsg && !(effect as ActiveMove).secondaries && effect.id !== 'octolock') { this.add("-fail", target, "unboost", "[from] ability: Full Metal Body", "[of] " + target); } }, name: "Full Metal Body", rating: 2, num: 230, }, furcoat: { onModifyDefPriority: 6, onModifyDef(def) { return this.chainModify(2); }, isBreakable: true, name: "Fur Coat", rating: 4, num: 169, }, galewings: { onModifyPriority(priority, pokemon, target, move) { if (move?.type === 'Flying' && pokemon.hp === pokemon.maxhp) return priority + 1; }, name: "Gale Wings", rating: 3, num: 177, }, galvanize: { onModifyTypePriority: -1, onModifyType(move, pokemon) { const noModifyType = [ 'judgment', 'multiattack', 'naturalgift', 'revelationdance', 'technoblast', 'terrainpulse', 'weatherball', ]; if (move.type === 'Normal' && !noModifyType.includes(move.id) && !(move.isZ && move.category !== 'Status')) { move.type = 'Electric'; move.galvanizeBoosted = true; } }, onBasePowerPriority: 23, onBasePower(basePower, pokemon, target, move) { if (move.galvanizeBoosted) return this.chainModify([4915, 4096]); }, name: "Galvanize", rating: 4, num: 206, }, gluttony: { name: "Gluttony", rating: 1.5, num: 82, }, gooey: { onDamagingHit(damage, target, source, move) { if (this.checkMoveMakesContact(move, source, target, true)) { this.add('-ability', target, 'Gooey'); this.boost({spe: -1}, source, target, null, true); } }, name: "Gooey", rating: 2, num: 183, }, gorillatactics: { onStart(pokemon) { pokemon.abilityState.choiceLock = ""; }, onBeforeMove(pokemon, target, move) { if (move.isZOrMaxPowered || move.id === 'struggle') return; if (pokemon.abilityState.choiceLock && pokemon.abilityState.choiceLock !== move.id) { // Fails unless ability is being ignored (these events will not run), no PP lost. this.addMove('move', pokemon, move.name); this.attrLastMove('[still]'); this.debug("Disabled by Gorilla Tactics"); this.add('-fail', pokemon); return false; } }, onModifyMove(move, pokemon) { if (pokemon.abilityState.choiceLock || move.isZOrMaxPowered || move.id === 'struggle') return; pokemon.abilityState.choiceLock = move.id; }, onModifyAtkPriority: 1, onModifyAtk(atk, pokemon) { if (pokemon.volatiles['dynamax']) return; // PLACEHOLDER this.debug('Gorilla Tactics Atk Boost'); return this.chainModify(1.5); }, onDisableMove(pokemon) { if (!pokemon.abilityState.choiceLock) return; if (pokemon.volatiles['dynamax']) return; for (const moveSlot of pokemon.moveSlots) { if (moveSlot.id !== pokemon.abilityState.choiceLock) { pokemon.disableMove(moveSlot.id, false, this.effectState.sourceEffect); } } }, onEnd(pokemon) { pokemon.abilityState.choiceLock = ""; }, name: "Gorilla Tactics", rating: 4.5, num: 255, }, grasspelt: { onModifyDefPriority: 6, onModifyDef(pokemon) { if (this.field.isTerrain('grassyterrain')) return this.chainModify(1.5); }, isBreakable: true, name: "Grass Pelt", rating: 0.5, num: 179, }, grassysurge: { onStart(source) { this.field.setTerrain('grassyterrain'); }, name: "Grassy Surge", rating: 4, num: 229, }, grimneigh: { onSourceAfterFaint(length, target, source, effect) { if (effect && effect.effectType === 'Move') { this.boost({spa: length}, source); } }, name: "Grim Neigh", rating: 3, num: 265, }, gulpmissile: { onDamagingHit(damage, target, source, move) { if (!source.hp || !source.isActive || target.transformed || target.isSemiInvulnerable()) return; if (['cramorantgulping', 'cramorantgorging'].includes(target.species.id)) { this.damage(source.baseMaxhp / 4, source, target); if (target.species.id === 'cramorantgulping') { this.boost({def: -1}, source, target, null, true); } else { source.trySetStatus('par', target, move); } target.formeChange('cramorant', move); } }, // The Dive part of this mechanic is implemented in Dive's `onTryMove` in moves.ts onSourceTryPrimaryHit(target, source, effect) { if ( effect && effect.id === 'surf' && source.hasAbility('gulpmissile') && source.species.name === 'Cramorant' && !source.transformed ) { const forme = source.hp <= source.maxhp / 2 ? 'cramorantgorging' : 'cramorantgulping'; source.formeChange(forme, effect); } }, isPermanent: true, name: "Gulp Missile", rating: 2.5, num: 241, }, guts: { onModifyAtkPriority: 5, onModifyAtk(atk, pokemon) { if (pokemon.status) { return this.chainModify(1.5); } }, name: "Guts", rating: 3, num: 62, }, harvest: { name: "Harvest", onResidualOrder: 28, onResidualSubOrder: 2, onResidual(pokemon) { if (this.field.isWeather(['sunnyday', 'desolateland']) || this.randomChance(1, 2)) { if (pokemon.hp && !pokemon.item && this.dex.items.get(pokemon.lastItem).isBerry) { pokemon.setItem(pokemon.lastItem); pokemon.lastItem = ''; this.add('-item', pokemon, pokemon.getItem(), '[from] ability: Harvest'); } } }, rating: 2.5, num: 139, }, healer: { name: "Healer", onResidualOrder: 5, onResidualSubOrder: 3, onResidual(pokemon) { for (const allyActive of pokemon.adjacentAllies()) { if (allyActive.status && this.randomChance(3, 10)) { this.add('-activate', pokemon, 'ability: Healer'); allyActive.cureStatus(); } } }, rating: 0, num: 131, }, heatproof: { onSourceBasePowerPriority: 18, onSourceBasePower(basePower, attacker, defender, move) { if (move.type === 'Fire') { return this.chainModify(0.5); } }, onDamage(damage, target, source, effect) { if (effect && effect.id === 'brn') { return damage / 2; } }, isBreakable: true, name: "Heatproof", rating: 2, num: 85, }, heavymetal: { onModifyWeightPriority: 1, onModifyWeight(weighthg) { return weighthg * 2; }, isBreakable: true, name: "Heavy Metal", rating: 0, num: 134, }, honeygather: { name: "Honey Gather", rating: 0, num: 118, }, hugepower: { onModifyAtkPriority: 5, onModifyAtk(atk) { return this.chainModify(2); }, name: "Huge Power", rating: 5, num: 37, }, hungerswitch: { onResidualOrder: 29, onResidual(pokemon) { if (pokemon.species.baseSpecies !== 'Morpeko' || pokemon.transformed) return; const targetForme = pokemon.species.name === 'Morpeko' ? 'Morpeko-Hangry' : 'Morpeko'; pokemon.formeChange(targetForme); }, name: "Hunger Switch", rating: 1, num: 258, }, hustle: { // This should be applied directly to the stat as opposed to chaining with the others onModifyAtkPriority: 5, onModifyAtk(atk) { return this.modify(atk, 1.5); }, onSourceModifyAccuracyPriority: -1, onSourceModifyAccuracy(accuracy, target, source, move) { if (move.category === 'Physical' && typeof accuracy === 'number') { return this.chainModify([3277, 4096]); } }, name: "Hustle", rating: 3.5, num: 55, }, hydration: { onResidualOrder: 5, onResidualSubOrder: 3, onResidual(pokemon) { if (pokemon.status && ['raindance', 'primordialsea'].includes(pokemon.effectiveWeather())) { this.debug('hydration'); this.add('-activate', pokemon, 'ability: Hydration'); pokemon.cureStatus(); } }, name: "Hydration", rating: 1.5, num: 93, }, hypercutter: { onBoost(boost, target, source, effect) { if (source && target === source) return; if (boost.atk && boost.atk < 0) { delete boost.atk; if (!(effect as ActiveMove).secondaries) { this.add("-fail", target, "unboost", "Attack", "[from] ability: Hyper Cutter", "[of] " + target); } } }, isBreakable: true, name: "Hyper Cutter", rating: 1.5, num: 52, }, icebody: { onWeather(target, source, effect) { if (effect.id === 'hail') { this.heal(target.baseMaxhp / 16); } }, onImmunity(type, pokemon) { if (type === 'hail') return false; }, name: "Ice Body", rating: 1, num: 115, }, iceface: { onStart(pokemon) { if (this.field.isWeather('hail') && pokemon.species.id === 'eiscuenoice' && !pokemon.transformed) { this.add('-activate', pokemon, 'ability: Ice Face'); this.effectState.busted = false; pokemon.formeChange('Eiscue', this.effect, true); } }, onDamagePriority: 1, onDamage(damage, target, source, effect) { if ( effect && effect.effectType === 'Move' && effect.category === 'Physical' && target.species.id === 'eiscue' && !target.transformed ) { this.add('-activate', target, 'ability: Ice Face'); this.effectState.busted = true; return 0; } }, onCriticalHit(target, type, move) { if (!target) return; if (move.category !== 'Physical' || target.species.id !== 'eiscue' || target.transformed) return; if (target.volatiles['substitute'] && !(move.flags['bypasssub'] || move.infiltrates)) return; if (!target.runImmunity(move.type)) return; return false; }, onEffectiveness(typeMod, target, type, move) { if (!target) return; if (move.category !== 'Physical' || target.species.id !== 'eiscue' || target.transformed) return; const hitSub = target.volatiles['substitute'] && !move.flags['bypasssub'] && !(move.infiltrates && this.gen >= 6); if (hitSub) return; if (!target.runImmunity(move.type)) return; return 0; }, onUpdate(pokemon) { if (pokemon.species.id === 'eiscue' && this.effectState.busted) { pokemon.formeChange('Eiscue-Noice', this.effect, true); } }, onAnyWeatherStart() { const pokemon = this.effectState.target; if (!pokemon.hp) return; if (this.field.isWeather('hail') && pokemon.species.id === 'eiscuenoice' && !pokemon.transformed) { this.add('-activate', pokemon, 'ability: Ice Face'); this.effectState.busted = false; pokemon.formeChange('Eiscue', this.effect, true); } }, isBreakable: true, isPermanent: true, name: "Ice Face", rating: 3, num: 248, }, icescales: { onSourceModifyDamage(damage, source, target, move) { if (move.category === 'Special') { return this.chainModify(0.5); } }, isBreakable: true, name: "Ice Scales", rating: 4, num: 246, }, illuminate: { name: "Illuminate", rating: 0, num: 35, }, illusion: { onBeforeSwitchIn(pokemon) { pokemon.illusion = null; // yes, you can Illusion an active pokemon but only if it's to your right for (let i = pokemon.side.pokemon.length - 1; i > pokemon.position; i--) { const possibleTarget = pokemon.side.pokemon[i]; if (!possibleTarget.fainted) { pokemon.illusion = possibleTarget; break; } } }, onDamagingHit(damage, target, source, move) { if (target.illusion) { this.singleEvent('End', this.dex.abilities.get('Illusion'), target.abilityState, target, source, move); } }, onEnd(pokemon) { if (pokemon.illusion) { this.debug('illusion cleared'); pokemon.illusion = null; const details = pokemon.species.name + (pokemon.level === 100 ? '' : ', L' + pokemon.level) + (pokemon.gender === '' ? '' : ', ' + pokemon.gender) + (pokemon.set.shiny ? ', shiny' : ''); this.add('replace', pokemon, details); this.add('-end', pokemon, 'Illusion'); } }, onFaint(pokemon) { pokemon.illusion = null; }, name: "Illusion", rating: 4.5, num: 149, }, immunity: { onUpdate(pokemon) { if (pokemon.status === 'psn' || pokemon.status === 'tox') { this.add('-activate', pokemon, 'ability: Immunity'); pokemon.cureStatus(); } }, onSetStatus(status, target, source, effect) { if (status.id !== 'psn' && status.id !== 'tox') return; if ((effect as Move)?.status) { this.add('-immune', target, '[from] ability: Immunity'); } return false; }, isBreakable: true, name: "Immunity", rating: 2, num: 17, }, imposter: { onSwitchIn(pokemon) { this.effectState.switchingIn = true; }, onStart(pokemon) { // Imposter does not activate when Skill Swapped or when Neutralizing Gas leaves the field if (!this.effectState.switchingIn) return; // copies across in doubles/triples // (also copies across in multibattle and diagonally in free-for-all, // but side.foe already takes care of those) const target = pokemon.side.foe.active[pokemon.side.foe.active.length - 1 - pokemon.position]; if (target) { pokemon.transformInto(target, this.dex.abilities.get('imposter')); } this.effectState.switchingIn = false; }, name: "Imposter", rating: 5, num: 150, }, infiltrator: { onModifyMove(move) { move.infiltrates = true; }, name: "Infiltrator", rating: 2.5, num: 151, }, innardsout: { name: "Innards Out", onDamagingHitOrder: 1, onDamagingHit(damage, target, source, move) { if (!target.hp) { this.damage(target.getUndynamaxedHP(damage), source, target); } }, rating: 4, num: 215, }, innerfocus: { onTryAddVolatile(status, pokemon) { if (status.id === 'flinch') return null; }, onBoost(boost, target, source, effect) { if (effect.id === 'intimidate') { delete boost.atk; this.add('-fail', target, 'unboost', 'Attack', '[from] ability: Inner Focus', '[of] ' + target); } }, isBreakable: true, name: "Inner Focus", rating: 1.5, num: 39, }, insomnia: { onUpdate(pokemon) { if (pokemon.status === 'slp') { this.add('-activate', pokemon, 'ability: Insomnia'); pokemon.cureStatus(); } }, onSetStatus(status, target, source, effect) { if (status.id !== 'slp') return; if ((effect as Move)?.status) { this.add('-immune', target, '[from] ability: Insomnia'); } return false; }, isBreakable: true, name: "Insomnia", rating: 2, num: 15, }, intimidate: { onStart(pokemon) { let activated = false; for (const target of pokemon.adjacentFoes()) { if (!activated) { this.add('-ability', pokemon, 'Intimidate', 'boost'); activated = true; } if (target.volatiles['substitute']) { this.add('-immune', target); } else { this.boost({atk: -1}, target, pokemon, null, true); } } }, name: "Intimidate", rating: 3.5, num: 22, }, intrepidsword: { onStart(pokemon) { this.boost({atk: 1}, pokemon); }, name: "Intrepid Sword", rating: 4, num: 234, }, ironbarbs: { onDamagingHitOrder: 1, onDamagingHit(damage, target, source, move) { if (this.checkMoveMakesContact(move, source, target, true)) { this.damage(source.baseMaxhp / 8, source, target); } }, name: "Iron Barbs", rating: 2.5, num: 160, }, ironfist: { onBasePowerPriority: 23, onBasePower(basePower, attacker, defender, move) { if (move.flags['punch']) { this.debug('Iron Fist boost'); return this.chainModify([4915, 4096]); } }, name: "Iron Fist", rating: 3, num: 89, }, justified: { onDamagingHit(damage, target, source, move) { if (move.type === 'Dark') { this.boost({atk: 1}); } }, name: "Justified", rating: 2.5, num: 154, }, keeneye: { onBoost(boost, target, source, effect) { if (source && target === source) return; if (boost.accuracy && boost.accuracy < 0) { delete boost.accuracy; if (!(effect as ActiveMove).secondaries) { this.add("-fail", target, "unboost", "accuracy", "[from] ability: Keen Eye", "[of] " + target); } } }, onModifyMove(move) { move.ignoreEvasion = true; }, isBreakable: true, name: "Keen Eye", rating: 0.5, num: 51, }, klutz: { // Item suppression implemented in Pokemon.ignoringItem() within sim/pokemon.js name: "Klutz", rating: -1, num: 103, }, leafguard: { onSetStatus(status, target, source, effect) { if (['sunnyday', 'desolateland'].includes(target.effectiveWeather())) { if ((effect as Move)?.status) { this.add('-immune', target, '[from] ability: Leaf Guard'); } return false; } }, onTryAddVolatile(status, target) { if (status.id === 'yawn' && ['sunnyday', 'desolateland'].includes(target.effectiveWeather())) { this.add('-immune', target, '[from] ability: Leaf Guard'); return null; } }, isBreakable: true, name: "Leaf Guard", rating: 0.5, num: 102, }, levitate: { // airborneness implemented in sim/pokemon.js:Pokemon#isGrounded isBreakable: true, name: "Levitate", rating: 3.5, num: 26, }, libero: { onPrepareHit(source, target, move) { if (move.hasBounced || move.sourceEffect === 'snatch') return; const type = move.type; if (type && type !== '???' && source.getTypes().join() !== type) { if (!source.setType(type)) return; this.add('-start', source, 'typechange', type, '[from] ability: Libero'); } }, name: "Libero", rating: 4.5, num: 236, }, lightmetal: { onModifyWeight(weighthg) { return this.trunc(weighthg / 2); }, isBreakable: true, name: "Light Metal", rating: 1, num: 135, }, lightningrod: { onTryHit(target, source, move) { if (target !== source && move.type === 'Electric') { if (!this.boost({spa: 1})) { this.add('-immune', target, '[from] ability: Lightning Rod'); } return null; } }, onAnyRedirectTarget(target, source, source2, move) { if (move.type !== 'Electric' || ['firepledge', 'grasspledge', 'waterpledge'].includes(move.id)) return; const redirectTarget = ['randomNormal', 'adjacentFoe'].includes(move.target) ? 'normal' : move.target; if (this.validTarget(this.effectState.target, source, redirectTarget)) { if (move.smartTarget) move.smartTarget = false; if (this.effectState.target !== target) { this.add('-activate', this.effectState.target, 'ability: Lightning Rod'); } return this.effectState.target; } }, isBreakable: true, name: "Lightning Rod", rating: 3, num: 31, }, limber: { onUpdate(pokemon) { if (pokemon.status === 'par') { this.add('-activate', pokemon, 'ability: Limber'); pokemon.cureStatus(); } }, onSetStatus(status, target, source, effect) { if (status.id !== 'par') return; if ((effect as Move)?.status) { this.add('-immune', target, '[from] ability: Limber'); } return false; }, isBreakable: true, name: "Limber", rating: 2, num: 7, }, liquidooze: { onSourceTryHeal(damage, target, source, effect) { this.debug("Heal is occurring: " + target + " <- " + source + " :: " + effect.id); const canOoze = ['drain', 'leechseed', 'strengthsap']; if (canOoze.includes(effect.id)) { this.damage(damage); return 0; } }, name: "Liquid Ooze", rating: 1.5, num: 64, }, liquidvoice: { onModifyTypePriority: -1, onModifyType(move, pokemon) { if (move.flags['sound'] && !pokemon.volatiles['dynamax']) { // hardcode move.type = 'Water'; } }, name: "Liquid Voice", rating: 1.5, num: 204, }, longreach: { onModifyMove(move) { delete move.flags['contact']; }, name: "Long Reach", rating: 1, num: 203, }, magicbounce: { name: "Magic Bounce", onTryHitPriority: 1, onTryHit(target, source, move) { if (target === source || move.hasBounced || !move.flags['reflectable']) { return; } const newMove = this.dex.getActiveMove(move.id); newMove.hasBounced = true; newMove.pranksterBoosted = false; this.actions.useMove(newMove, target, source); return null; }, onAllyTryHitSide(target, source, move) { if (target.isAlly(source) || move.hasBounced || !move.flags['reflectable']) { return; } const newMove = this.dex.getActiveMove(move.id); newMove.hasBounced = true; newMove.pranksterBoosted = false; this.actions.useMove(newMove, this.effectState.target, source); return null; }, condition: { duration: 1, }, isBreakable: true, rating: 4, num: 156, }, magicguard: { onDamage(damage, target, source, effect) { if (effect.effectType !== 'Move') { if (effect.effectType === 'Ability') this.add('-activate', source, 'ability: ' + effect.name); return false; } }, name: "Magic Guard", rating: 4, num: 98, }, magician: { onAfterMoveSecondarySelf(source, target, move) { if (!move || !target) return; if (target !== source && move.category !== 'Status') { if (source.item || source.volatiles['gem'] || move.id === 'fling') return; const yourItem = target.takeItem(source); if (!yourItem) return; if (!source.setItem(yourItem)) { target.item = yourItem.id; // bypass setItem so we don't break choicelock or anything return; } this.add('-item', source, yourItem, '[from] ability: Magician', '[of] ' + target); } }, name: "Magician", rating: 1.5, num: 170, }, magmaarmor: { onUpdate(pokemon) { if (pokemon.status === 'frz') { this.add('-activate', pokemon, 'ability: Magma Armor'); pokemon.cureStatus(); } }, onImmunity(type, pokemon) { if (type === 'frz') return false; }, isBreakable: true, name: "Magma Armor", rating: 1, num: 40, }, magnetpull: { onFoeTrapPokemon(pokemon) { if (pokemon.hasType('Steel') && pokemon.isAdjacent(this.effectState.target)) { pokemon.tryTrap(true); } }, onFoeMaybeTrapPokemon(pokemon, source) { if (!source) source = this.effectState.target; if (!source || !pokemon.isAdjacent(source)) return; if (!pokemon.knownType || pokemon.hasType('Steel')) { pokemon.maybeTrapped = true; } }, name: "Magnet Pull", rating: 4, num: 42, }, marvelscale: { onModifyDefPriority: 6, onModifyDef(def, pokemon) { if (pokemon.status) { return this.chainModify(1.5); } }, isBreakable: true, name: "Marvel Scale", rating: 2.5, num: 63, }, megalauncher: { onBasePowerPriority: 19, onBasePower(basePower, attacker, defender, move) { if (move.flags['pulse']) { return this.chainModify(1.5); } }, name: "Mega Launcher", rating: 3, num: 178, }, merciless: { onModifyCritRatio(critRatio, source, target) { if (target && ['psn', 'tox'].includes(target.status)) return 5; }, name: "Merciless", rating: 1.5, num: 196, }, mimicry: { onStart(pokemon) { if (this.field.terrain) { pokemon.addVolatile('mimicry'); } else { const types = pokemon.baseSpecies.types; if (pokemon.getTypes().join() === types.join() || !pokemon.setType(types)) return; this.add('-start', pokemon, 'typechange', types.join('/'), '[from] ability: Mimicry'); this.hint("Transform Mimicry changes you to your original un-transformed types."); } }, onAnyTerrainStart() { const pokemon = this.effectState.target; delete pokemon.volatiles['mimicry']; pokemon.addVolatile('mimicry'); }, onEnd(pokemon) { delete pokemon.volatiles['mimicry']; }, condition: { onStart(pokemon) { let newType; switch (this.field.terrain) { case 'electricterrain': newType = 'Electric'; break; case 'grassyterrain': newType = 'Grass'; break; case 'mistyterrain': newType = 'Fairy'; break; case 'psychicterrain': newType = 'Psychic'; break; } if (!newType || pokemon.getTypes().join() === newType || !pokemon.setType(newType)) return; this.add('-start', pokemon, 'typechange', newType, '[from] ability: Mimicry'); }, onUpdate(pokemon) { if (!this.field.terrain) { const types = pokemon.species.types; if (pokemon.getTypes().join() === types.join() || !pokemon.setType(types)) return; this.add('-activate', pokemon, 'ability: Mimicry'); this.add('-end', pokemon, 'typechange', '[silent]'); pokemon.removeVolatile('mimicry'); } }, }, name: "Mimicry", rating: 0.5, num: 250, }, minus: { onModifySpAPriority: 5, onModifySpA(spa, pokemon) { for (const allyActive of pokemon.allies()) { if (allyActive.hasAbility(['minus', 'plus'])) { return this.chainModify(1.5); } } }, name: "Minus", rating: 0, num: 58, }, mirrorarmor: { onBoost(boost, target, source, effect) { // Don't bounce self stat changes, or boosts that have already bounced if (target === source || !boost || effect.id === 'mirrorarmor') return; let b: BoostID; for (b in boost) { if (boost[b]! < 0) { if (target.boosts[b] === -6) continue; const negativeBoost: SparseBoostsTable = {}; negativeBoost[b] = boost[b]; delete boost[b]; this.add('-ability', target, 'Mirror Armor'); this.boost(negativeBoost, source, target, null, true); } } }, isBreakable: true, name: "Mirror Armor", rating: 2, num: 240, }, mistysurge: { onStart(source) { this.field.setTerrain('mistyterrain'); }, name: "Misty Surge", rating: 3.5, num: 228, }, moldbreaker: { onStart(pokemon) { this.add('-ability', pokemon, 'Mold Breaker'); }, onModifyMove(move) { move.ignoreAbility = true; }, name: "Mold Breaker", rating: 3.5, num: 104, }, moody: { onResidualOrder: 28, onResidualSubOrder: 2, onResidual(pokemon) { let stats: BoostID[] = []; const boost: SparseBoostsTable = {}; let statPlus: BoostID; for (statPlus in pokemon.boosts) { if (statPlus === 'accuracy' || statPlus === 'evasion') continue; if (pokemon.boosts[statPlus] < 6) { stats.push(statPlus); } } let randomStat: BoostID | undefined = stats.length ? this.sample(stats) : undefined; if (randomStat) boost[randomStat] = 2; stats = []; let statMinus: BoostID; for (statMinus in pokemon.boosts) { if (statMinus === 'accuracy' || statMinus === 'evasion') continue; if (pokemon.boosts[statMinus] > -6 && statMinus !== randomStat) { stats.push(statMinus); } } randomStat = stats.length ? this.sample(stats) : undefined; if (randomStat) boost[randomStat] = -1; this.boost(boost); }, name: "Moody", rating: 5, num: 141, }, motordrive: { onTryHit(target, source, move) { if (target !== source && move.type === 'Electric') { if (!this.boost({spe: 1})) { this.add('-immune', target, '[from] ability: Motor Drive'); } return null; } }, isBreakable: true, name: "Motor Drive", rating: 3, num: 78, }, moxie: { onSourceAfterFaint(length, target, source, effect) { if (effect && effect.effectType === 'Move') { this.boost({atk: length}, source); } }, name: "Moxie", rating: 3, num: 153, }, multiscale: { onSourceModifyDamage(damage, source, target, move) { if (target.hp >= target.maxhp) { this.debug('Multiscale weaken'); return this.chainModify(0.5); } }, isBreakable: true, name: "Multiscale", rating: 3.5, num: 136, }, multitype: { // Multitype's type-changing itself is implemented in statuses.js isPermanent: true, name: "Multitype", rating: 4, num: 121, }, mummy: { name: "Mummy", onDamagingHit(damage, target, source, move) { const sourceAbility = source.getAbility(); if (sourceAbility.isPermanent || sourceAbility.id === 'mummy') { return; } if (this.checkMoveMakesContact(move, source, target, !source.isAlly(target))) { const oldAbility = source.setAbility('mummy', target); if (oldAbility) { this.add('-activate', target, 'ability: Mummy', this.dex.abilities.get(oldAbility).name, '[of] ' + source); } } }, rating: 2, num: 152, }, naturalcure: { onCheckShow(pokemon) { // This is complicated // For the most part, in-game, it's obvious whether or not Natural Cure activated, // since you can see how many of your opponent's pokemon are statused. // The only ambiguous situation happens in Doubles/Triples, where multiple pokemon // that could have Natural Cure switch out, but only some of them get cured. if (pokemon.side.active.length === 1) return; if (pokemon.showCure === true || pokemon.showCure === false) return; const cureList = []; let noCureCount = 0; for (const curPoke of pokemon.side.active) { // pokemon not statused if (!curPoke?.status) { // this.add('-message', "" + curPoke + " skipped: not statused or doesn't exist"); continue; } if (curPoke.showCure) { // this.add('-message', "" + curPoke + " skipped: Natural Cure already known"); continue; } const species = curPoke.species; // pokemon can't get Natural Cure if (!Object.values(species.abilities).includes('Natural Cure')) { // this.add('-message', "" + curPoke + " skipped: no Natural Cure"); continue; } // pokemon's ability is known to be Natural Cure if (!species.abilities['1'] && !species.abilities['H']) { // this.add('-message', "" + curPoke + " skipped: only one ability"); continue; } // pokemon isn't switching this turn if (curPoke !== pokemon && !this.queue.willSwitch(curPoke)) { // this.add('-message', "" + curPoke + " skipped: not switching"); continue; } if (curPoke.hasAbility('naturalcure')) { // this.add('-message', "" + curPoke + " confirmed: could be Natural Cure (and is)"); cureList.push(curPoke); } else { // this.add('-message', "" + curPoke + " confirmed: could be Natural Cure (but isn't)"); noCureCount++; } } if (!cureList.length || !noCureCount) { // It's possible to know what pokemon were cured for (const pkmn of cureList) { pkmn.showCure = true; } } else { // It's not possible to know what pokemon were cured // Unlike a -hint, this is real information that battlers need, so we use a -message this.add('-message', "(" + cureList.length + " of " + pokemon.side.name + "'s pokemon " + (cureList.length === 1 ? "was" : "were") + " cured by Natural Cure.)"); for (const pkmn of cureList) { pkmn.showCure = false; } } }, onSwitchOut(pokemon) { if (!pokemon.status) return; // if pokemon.showCure is undefined, it was skipped because its ability // is known if (pokemon.showCure === undefined) pokemon.showCure = true; if (pokemon.showCure) this.add('-curestatus', pokemon, pokemon.status, '[from] ability: Natural Cure'); pokemon.setStatus(''); // only reset .showCure if it's false // (once you know a Pokemon has Natural Cure, its cures are always known) if (!pokemon.showCure) pokemon.showCure = undefined; }, name: "Natural Cure", rating: 2.5, num: 30, }, neuroforce: { onModifyDamage(damage, source, target, move) { if (move && target.getMoveHitData(move).typeMod > 0) { return this.chainModify([5120, 4096]); } }, name: "Neuroforce", rating: 2.5, num: 233, }, neutralizinggas: { // Ability suppression implemented in sim/pokemon.ts:Pokemon#ignoringAbility onPreStart(pokemon) { this.add('-ability', pokemon, 'Neutralizing Gas'); pokemon.abilityState.ending = false; for (const target of this.getAllActive()) { if (target.illusion) { this.singleEvent('End', this.dex.abilities.get('Illusion'), target.abilityState, target, pokemon, 'neutralizinggas'); } if (target.volatiles['slowstart']) { delete target.volatiles['slowstart']; this.add('-end', target, 'Slow Start', '[silent]'); } } }, onEnd(source) { for (const pokemon of this.getAllActive()) { if (pokemon !== source && pokemon.hasAbility('Neutralizing Gas')) { return; } } this.add('-end', source, 'ability: Neutralizing Gas'); // FIXME this happens before the pokemon switches out, should be the opposite order. // Not an easy fix since we cant use a supported event. Would need some kind of special event that // gathers events to run after the switch and then runs them when the ability is no longer accessible. // (If you're tackling this, do note extreme weathers have the same issue) // Mark this pokemon's ability as ending so Pokemon#ignoringAbility skips it if (source.abilityState.ending) return; source.abilityState.ending = true; const sortedActive = this.getAllActive(); this.speedSort(sortedActive); for (const pokemon of sortedActive) { if (pokemon !== source) { if (pokemon.getAbility().isPermanent) continue; // does not interact with e.g Ice Face, Zen Mode // Will be suppressed by Pokemon#ignoringAbility if needed this.singleEvent('Start', pokemon.getAbility(), pokemon.abilityState, pokemon); } } }, name: "Neutralizing Gas", rating: 4, num: 256, }, noguard: { onAnyInvulnerabilityPriority: 1, onAnyInvulnerability(target, source, move) { if (move && (source === this.effectState.target || target === this.effectState.target)) return 0; }, onAnyAccuracy(accuracy, target, source, move) { if (move && (source === this.effectState.target || target === this.effectState.target)) { return true; } return accuracy; }, name: "No Guard", rating: 4, num: 99, }, normalize: { onModifyTypePriority: 1, onModifyType(move, pokemon) { const noModifyType = [ 'hiddenpower', 'judgment', 'multiattack', 'naturalgift', 'revelationdance', 'struggle', 'technoblast', 'terrainpulse', 'weatherball', ]; if (!(move.isZ && move.category !== 'Status') && !noModifyType.includes(move.id)) { move.type = 'Normal'; move.normalizeBoosted = true; } }, onBasePowerPriority: 23, onBasePower(basePower, pokemon, target, move) { if (move.normalizeBoosted) return this.chainModify([4915, 4096]); }, name: "Normalize", rating: 0, num: 96, }, oblivious: { onUpdate(pokemon) { if (pokemon.volatiles['attract']) { this.add('-activate', pokemon, 'ability: Oblivious'); pokemon.removeVolatile('attract'); this.add('-end', pokemon, 'move: Attract', '[from] ability: Oblivious'); } if (pokemon.volatiles['taunt']) { this.add('-activate', pokemon, 'ability: Oblivious'); pokemon.removeVolatile('taunt'); // Taunt's volatile already sends the -end message when removed } }, onImmunity(type, pokemon) { if (type === 'attract') return false; }, onTryHit(pokemon, target, move) { if (move.id === 'attract' || move.id === 'captivate' || move.id === 'taunt') { this.add('-immune', pokemon, '[from] ability: Oblivious'); return null; } }, onBoost(boost, target, source, effect) { if (effect.id === 'intimidate') { delete boost.atk; this.add('-fail', target, 'unboost', 'Attack', '[from] ability: Oblivious', '[of] ' + target); } }, isBreakable: true, name: "Oblivious", rating: 1.5, num: 12, }, overcoat: { onImmunity(type, pokemon) { if (type === 'sandstorm' || type === 'hail' || type === 'powder') return false; }, onTryHitPriority: 1, onTryHit(target, source, move) { if (move.flags['powder'] && target !== source && this.dex.getImmunity('powder', target)) { this.add('-immune', target, '[from] ability: Overcoat'); return null; } }, isBreakable: true, name: "Overcoat", rating: 2, num: 142, }, overgrow: { onModifyAtkPriority: 5, onModifyAtk(atk, attacker, defender, move) { if (move.type === 'Grass' && attacker.hp <= attacker.maxhp / 3) { this.debug('Overgrow boost'); return this.chainModify(1.5); } }, onModifySpAPriority: 5, onModifySpA(atk, attacker, defender, move) { if (move.type === 'Grass' && attacker.hp <= attacker.maxhp / 3) { this.debug('Overgrow boost'); return this.chainModify(1.5); } }, name: "Overgrow", rating: 2, num: 65, }, owntempo: { onUpdate(pokemon) { if (pokemon.volatiles['confusion']) { this.add('-activate', pokemon, 'ability: Own Tempo'); pokemon.removeVolatile('confusion'); } }, onTryAddVolatile(status, pokemon) { if (status.id === 'confusion') return null; }, onHit(target, source, move) { if (move?.volatileStatus === 'confusion') { this.add('-immune', target, 'confusion', '[from] ability: Own Tempo'); } }, onBoost(boost, target, source, effect) { if (effect.id === 'intimidate') { delete boost.atk; this.add('-fail', target, 'unboost', 'Attack', '[from] ability: Own Tempo', '[of] ' + target); } }, isBreakable: true, name: "Own Tempo", rating: 1.5, num: 20, }, parentalbond: { onPrepareHit(source, target, move) { if (move.category === 'Status' || move.selfdestruct || move.multihit) return; if (['endeavor', 'fling', 'iceball', 'rollout'].includes(move.id)) return; if (!move.flags['charge'] && !move.spreadHit && !move.isZ && !move.isMax) { move.multihit = 2; move.multihitType = 'parentalbond'; } }, // Damage modifier implemented in BattleActions#modifyDamage() onSourceModifySecondaries(secondaries, target, source, move) { if (move.multihitType === 'parentalbond' && move.id === 'secretpower' && move.hit < 2) { // hack to prevent accidentally suppressing King's Rock/Razor Fang return secondaries.filter(effect => effect.volatileStatus === 'flinch'); } }, name: "Parental Bond", rating: 4.5, num: 185, }, pastelveil: { onStart(pokemon) { for (const ally of pokemon.alliesAndSelf()) { if (['psn', 'tox'].includes(ally.status)) { this.add('-activate', pokemon, 'ability: Pastel Veil'); ally.cureStatus(); } } }, onUpdate(pokemon) { if (['psn', 'tox'].includes(pokemon.status)) { this.add('-activate', pokemon, 'ability: Pastel Veil'); pokemon.cureStatus(); } }, onAllySwitchIn(pokemon) { if (['psn', 'tox'].includes(pokemon.status)) { this.add('-activate', this.effectState.target, 'ability: Pastel Veil'); pokemon.cureStatus(); } }, onSetStatus(status, target, source, effect) { if (!['psn', 'tox'].includes(status.id)) return; if ((effect as Move)?.status) { this.add('-immune', target, '[from] ability: Pastel Veil'); } return false; }, onAllySetStatus(status, target, source, effect) { if (!['psn', 'tox'].includes(status.id)) return; if ((effect as Move)?.status) { const effectHolder = this.effectState.target; this.add('-block', target, 'ability: Pastel Veil', '[of] ' + effectHolder); } return false; }, isBreakable: true, name: "Pastel Veil", rating: 2, num: 257, }, perishbody: { onDamagingHit(damage, target, source, move) { if (!this.checkMoveMakesContact(move, source, target)) return; let announced = false; for (const pokemon of [target, source]) { if (pokemon.volatiles['perishsong']) continue; if (!announced) { this.add('-ability', target, 'Perish Body'); announced = true; } pokemon.addVolatile('perishsong'); } }, name: "Perish Body", rating: 1, num: 253, }, pickpocket: { onAfterMoveSecondary(target, source, move) { if (source && source !== target && move?.flags['contact']) { if (target.item || target.switchFlag || target.forceSwitchFlag || source.switchFlag === true) { return; } const yourItem = source.takeItem(target); if (!yourItem) { return; } if (!target.setItem(yourItem)) { source.item = yourItem.id; return; } this.add('-enditem', source, yourItem, '[silent]', '[from] ability: Pickpocket', '[of] ' + source); this.add('-item', target, yourItem, '[from] ability: Pickpocket', '[of] ' + source); } }, name: "Pickpocket", rating: 1, num: 124, }, pickup: { onResidualOrder: 28, onResidualSubOrder: 2, onResidual(pokemon) { if (pokemon.item) return; const pickupTargets = this.getAllActive().filter(target => ( target.lastItem && target.usedItemThisTurn && pokemon.isAdjacent(target) )); if (!pickupTargets.length) return; const randomTarget = this.sample(pickupTargets); const item = randomTarget.lastItem; randomTarget.lastItem = ''; this.add('-item', pokemon, this.dex.items.get(item), '[from] ability: Pickup'); pokemon.setItem(item); }, name: "Pickup", rating: 0.5, num: 53, }, pixilate: { onModifyTypePriority: -1, onModifyType(move, pokemon) { const noModifyType = [ 'judgment', 'multiattack', 'naturalgift', 'revelationdance', 'technoblast', 'terrainpulse', 'weatherball', ]; if (move.type === 'Normal' && !noModifyType.includes(move.id) && !(move.isZ && move.category !== 'Status')) { move.type = 'Fairy'; move.pixilateBoosted = true; } }, onBasePowerPriority: 23, onBasePower(basePower, pokemon, target, move) { if (move.pixilateBoosted) return this.chainModify([4915, 4096]); }, name: "Pixilate", rating: 4, num: 182, }, plus: { onModifySpAPriority: 5, onModifySpA(spa, pokemon) { for (const allyActive of pokemon.allies()) { if (allyActive.hasAbility(['minus', 'plus'])) { return this.chainModify(1.5); } } }, name: "Plus", rating: 0, num: 57, }, poisonheal: { onDamagePriority: 1, onDamage(damage, target, source, effect) { if (effect.id === 'psn' || effect.id === 'tox') { this.heal(target.baseMaxhp / 8); return false; } }, name: "Poison Heal", rating: 4, num: 90, }, poisonpoint: { onDamagingHit(damage, target, source, move) { if (this.checkMoveMakesContact(move, source, target)) { if (this.randomChance(3, 10)) { source.trySetStatus('psn', target); } } }, name: "Poison Point", rating: 1.5, num: 38, }, poisontouch: { // upokecenter says this is implemented as an added secondary effect onModifyMove(move) { if (!move?.flags['contact'] || move.target === 'self') return; if (!move.secondaries) { move.secondaries = []; } move.secondaries.push({ chance: 30, status: 'psn', ability: this.dex.abilities.get('poisontouch'), }); }, name: "Poison Touch", rating: 2, num: 143, }, powerconstruct: { onResidualOrder: 29, onResidual(pokemon) { if (pokemon.baseSpecies.baseSpecies !== 'Zygarde' || pokemon.transformed || !pokemon.hp) return; if (pokemon.species.id === 'zygardecomplete' || pokemon.hp > pokemon.maxhp / 2) return; this.add('-activate', pokemon, 'ability: Power Construct'); pokemon.formeChange('Zygarde-Complete', this.effect, true); pokemon.baseMaxhp = Math.floor(Math.floor( 2 * pokemon.species.baseStats['hp'] + pokemon.set.ivs['hp'] + Math.floor(pokemon.set.evs['hp'] / 4) + 100 ) * pokemon.level / 100 + 10); const newMaxHP = pokemon.volatiles['dynamax'] ? (2 * pokemon.baseMaxhp) : pokemon.baseMaxhp; pokemon.hp = newMaxHP - (pokemon.maxhp - pokemon.hp); pokemon.maxhp = newMaxHP; this.add('-heal', pokemon, pokemon.getHealth, '[silent]'); }, isPermanent: true, name: "Power Construct", rating: 5, num: 211, }, powerofalchemy: { onAllyFaint(target) { if (!this.effectState.target.hp) return; const ability = target.getAbility(); const additionalBannedAbilities = [ 'noability', 'flowergift', 'forecast', 'hungerswitch', 'illusion', 'imposter', 'neutralizinggas', 'powerofalchemy', 'receiver', 'trace', 'wonderguard', ]; if (target.getAbility().isPermanent || additionalBannedAbilities.includes(target.ability)) return; this.add('-ability', this.effectState.target, ability, '[from] ability: Power of Alchemy', '[of] ' + target); this.effectState.target.setAbility(ability); }, name: "Power of Alchemy", rating: 0, num: 223, }, powerspot: { onAllyBasePowerPriority: 22, onAllyBasePower(basePower, attacker, defender, move) { if (attacker !== this.effectState.target) { this.debug('Power Spot boost'); return this.chainModify([5325, 4096]); } }, name: "Power Spot", rating: 1, num: 249, }, prankster: { onModifyPriority(priority, pokemon, target, move) { if (move?.category === 'Status') { move.pranksterBoosted = true; return priority + 1; } }, name: "Prankster", rating: 4, num: 158, }, pressure: { onStart(pokemon) { this.add('-ability', pokemon, 'Pressure'); }, onDeductPP(target, source) { if (target.isAlly(source)) return; return 1; }, name: "Pressure", rating: 2.5, num: 46, }, primordialsea: { onStart(source) { this.field.setWeather('primordialsea'); }, onAnySetWeather(target, source, weather) { const strongWeathers = ['desolateland', 'primordialsea', 'deltastream']; if (this.field.getWeather().id === 'primordialsea' && !strongWeathers.includes(weather.id)) return false; }, onEnd(pokemon) { if (this.field.weatherState.source !== pokemon) return; for (const target of this.getAllActive()) { if (target === pokemon) continue; if (target.hasAbility('primordialsea')) { this.field.weatherState.source = target; return; } } this.field.clearWeather(); }, name: "Primordial Sea", rating: 4.5, num: 189, }, prismarmor: { onSourceModifyDamage(damage, source, target, move) { if (target.getMoveHitData(move).typeMod > 0) { this.debug('Prism Armor neutralize'); return this.chainModify(0.75); } }, name: "Prism Armor", rating: 3, num: 232, }, propellertail: { onModifyMovePriority: 1, onModifyMove(move) { // most of the implementation is in Battle#getTarget move.tracksTarget = move.target !== 'scripted'; }, name: "Propeller Tail", rating: 0, num: 239, }, protean: { onPrepareHit(source, target, move) { if (move.hasBounced || move.sourceEffect === 'snatch') return; const type = move.type; if (type && type !== '???' && source.getTypes().join() !== type) { if (!source.setType(type)) return; this.add('-start', source, 'typechange', type, '[from] ability: Protean'); } }, name: "Protean", rating: 4.5, num: 168, }, psychicsurge: { onStart(source) { this.field.setTerrain('psychicterrain'); }, name: "Psychic Surge", rating: 4, num: 227, }, punkrock: { onBasePowerPriority: 7, onBasePower(basePower, attacker, defender, move) { if (move.flags['sound']) { this.debug('Punk Rock boost'); return this.chainModify([5325, 4096]); } }, onSourceModifyDamage(damage, source, target, move) { if (move.flags['sound']) { this.debug('Punk Rock weaken'); return this.chainModify(0.5); } }, isBreakable: true, name: "Punk Rock", rating: 3.5, num: 244, }, purepower: { onModifyAtkPriority: 5, onModifyAtk(atk) { return this.chainModify(2); }, name: "Pure Power", rating: 5, num: 74, }, queenlymajesty: { onFoeTryMove(target, source, move) { const targetAllExceptions = ['perishsong', 'flowershield', 'rototiller']; if (move.target === 'foeSide' || (move.target === 'all' && !targetAllExceptions.includes(move.id))) { return; } const dazzlingHolder = this.effectState.target; if ((source.isAlly(dazzlingHolder) || move.target === 'all') && move.priority > 0.1) { this.attrLastMove('[still]'); this.add('cant', dazzlingHolder, 'ability: Queenly Majesty', move, '[of] ' + target); return false; } }, isBreakable: true, name: "Queenly Majesty", rating: 2.5, num: 214, }, quickdraw: { onFractionalPriorityPriority: -1, onFractionalPriority(priority, pokemon, target, move) { if (move.category !== "Status" && this.randomChance(3, 10)) { this.add('-activate', pokemon, 'ability: Quick Draw'); return 0.1; } }, name: "Quick Draw", rating: 2.5, num: 259, }, quickfeet: { onModifySpe(spe, pokemon) { if (pokemon.status) { return this.chainModify(1.5); } }, name: "Quick Feet", rating: 2.5, num: 95, }, raindish: { onWeather(target, source, effect) { if (target.hasItem('utilityumbrella')) return; if (effect.id === 'raindance' || effect.id === 'primordialsea') { this.heal(target.baseMaxhp / 16); } }, name: "Rain Dish", rating: 1.5, num: 44, }, rattled: { onDamagingHit(damage, target, source, move) { if (['Dark', 'Bug', 'Ghost'].includes(move.type)) { this.boost({spe: 1}); } }, onAfterBoost(boost, target, source, effect) { if (effect && effect.id === 'intimidate') { this.boost({spe: 1}); } }, name: "Rattled", rating: 1.5, num: 155, }, receiver: { onAllyFaint(target) { if (!this.effectState.target.hp) return; const ability = target.getAbility(); const additionalBannedAbilities = [ 'noability', 'flowergift', 'forecast', 'hungerswitch', 'illusion', 'imposter', 'neutralizinggas', 'powerofalchemy', 'receiver', 'trace', 'wonderguard', ]; if (target.getAbility().isPermanent || additionalBannedAbilities.includes(target.ability)) return; this.add('-ability', this.effectState.target, ability, '[from] ability: Receiver', '[of] ' + target); this.effectState.target.setAbility(ability); }, name: "Receiver", rating: 0, num: 222, }, reckless: { onBasePowerPriority: 23, onBasePower(basePower, attacker, defender, move) { if (move.recoil || move.hasCrashDamage) { this.debug('Reckless boost'); return this.chainModify([4915, 4096]); } }, name: "Reckless", rating: 3, num: 120, }, refrigerate: { onModifyTypePriority: -1, onModifyType(move, pokemon) { const noModifyType = [ 'judgment', 'multiattack', 'naturalgift', 'revelationdance', 'technoblast', 'terrainpulse', 'weatherball', ]; if (move.type === 'Normal' && !noModifyType.includes(move.id) && !(move.isZ && move.category !== 'Status')) { move.type = 'Ice'; move.refrigerateBoosted = true; } }, onBasePowerPriority: 23, onBasePower(basePower, pokemon, target, move) { if (move.refrigerateBoosted) return this.chainModify([4915, 4096]); }, name: "Refrigerate", rating: 4, num: 174, }, regenerator: { onSwitchOut(pokemon) { pokemon.heal(pokemon.baseMaxhp / 3); }, name: "Regenerator", rating: 4.5, num: 144, }, ripen: { onTryHeal(damage, target, source, effect) { if (!effect) return; if (effect.id === 'berryjuice' || effect.id === 'leftovers') { this.add('-activate', target, 'ability: Ripen'); } if ((effect as Item).isBerry) return this.chainModify(2); }, onBoost(boost, target, source, effect) { if (effect && (effect as Item).isBerry) { let b: BoostID; for (b in boost) { boost[b]! *= 2; } } }, onSourceModifyDamagePriority: -1, onSourceModifyDamage(damage, source, target, move) { if (target.abilityState.berryWeaken) { target.abilityState.berryWeaken = false; return this.chainModify(0.5); } }, onTryEatItemPriority: -1, onTryEatItem(item, pokemon) { this.add('-activate', pokemon, 'ability: Ripen'); }, onEatItem(item, pokemon) { const weakenBerries = [ 'Babiri Berry', 'Charti Berry', 'Chilan Berry', 'Chople Berry', 'Coba Berry', 'Colbur Berry', 'Haban Berry', 'Kasib Berry', 'Kebia Berry', 'Occa Berry', 'Passho Berry', 'Payapa Berry', 'Rindo Berry', 'Roseli Berry', 'Shuca Berry', 'Tanga Berry', 'Wacan Berry', 'Yache Berry', ]; // Record if the pokemon ate a berry to resist the attack pokemon.abilityState.berryWeaken = weakenBerries.includes(item.name); }, name: "Ripen", rating: 2, num: 247, }, rivalry: { onBasePowerPriority: 24, onBasePower(basePower, attacker, defender, move) { if (attacker.gender && defender.gender) { if (attacker.gender === defender.gender) { this.debug('Rivalry boost'); return this.chainModify(1.25); } else { this.debug('Rivalry weaken'); return this.chainModify(0.75); } } }, name: "Rivalry", rating: 0, num: 79, }, rkssystem: { // RKS System's type-changing itself is implemented in statuses.js isPermanent: true, name: "RKS System", rating: 4, num: 225, }, rockhead: { onDamage(damage, target, source, effect) { if (effect.id === 'recoil') { if (!this.activeMove) throw new Error("Battle.activeMove is null"); if (this.activeMove.id !== 'struggle') return null; } }, name: "Rock Head", rating: 3, num: 69, }, roughskin: { onDamagingHitOrder: 1, onDamagingHit(damage, target, source, move) { if (this.checkMoveMakesContact(move, source, target, true)) { this.damage(source.baseMaxhp / 8, source, target); } }, name: "Rough Skin", rating: 2.5, num: 24, }, runaway: { name: "Run Away", rating: 0, num: 50, }, sandforce: { onBasePowerPriority: 21, onBasePower(basePower, attacker, defender, move) { if (this.field.isWeather('sandstorm')) { if (move.type === 'Rock' || move.type === 'Ground' || move.type === 'Steel') { this.debug('Sand Force boost'); return this.chainModify([5325, 4096]); } } }, onImmunity(type, pokemon) { if (type === 'sandstorm') return false; }, name: "Sand Force", rating: 2, num: 159, }, sandrush: { onModifySpe(spe, pokemon) { if (this.field.isWeather('sandstorm')) { return this.chainModify(2); } }, onImmunity(type, pokemon) { if (type === 'sandstorm') return false; }, name: "Sand Rush", rating: 3, num: 146, }, sandspit: { onDamagingHit(damage, target, source, move) { if (this.field.getWeather().id !== 'sandstorm') { this.field.setWeather('sandstorm'); } }, name: "Sand Spit", rating: 2, num: 245, }, sandstream: { onStart(source) { this.field.setWeather('sandstorm'); }, name: "Sand Stream", rating: 4, num: 45, }, sandveil: { onImmunity(type, pokemon) { if (type === 'sandstorm') return false; }, onModifyAccuracyPriority: -1, onModifyAccuracy(accuracy) { if (typeof accuracy !== 'number') return; if (this.field.isWeather('sandstorm')) { this.debug('Sand Veil - decreasing accuracy'); return this.chainModify([3277, 4096]); } }, isBreakable: true, name: "Sand Veil", rating: 1.5, num: 8, }, sapsipper: { onTryHitPriority: 1, onTryHit(target, source, move) { if (target !== source && move.type === 'Grass') { if (!this.boost({atk: 1})) { this.add('-immune', target, '[from] ability: Sap Sipper'); } return null; } }, onAllyTryHitSide(target, source, move) { if (source === this.effectState.target || !target.isAlly(source)) return; if (move.type === 'Grass') { this.boost({atk: 1}, this.effectState.target); } }, isBreakable: true, name: "Sap Sipper", rating: 3, num: 157, }, schooling: { onStart(pokemon) { if (pokemon.baseSpecies.baseSpecies !== 'Wishiwashi' || pokemon.level < 20 || pokemon.transformed) return; if (pokemon.hp > pokemon.maxhp / 4) { if (pokemon.species.id === 'wishiwashi') { pokemon.formeChange('Wishiwashi-School'); } } else { if (pokemon.species.id === 'wishiwashischool') { pokemon.formeChange('Wishiwashi'); } } }, onResidualOrder: 29, onResidual(pokemon) { if ( pokemon.baseSpecies.baseSpecies !== 'Wishiwashi' || pokemon.level < 20 || pokemon.transformed || !pokemon.hp ) return; if (pokemon.hp > pokemon.maxhp / 4) { if (pokemon.species.id === 'wishiwashi') { pokemon.formeChange('Wishiwashi-School'); } } else { if (pokemon.species.id === 'wishiwashischool') { pokemon.formeChange('Wishiwashi'); } } }, isPermanent: true, name: "Schooling", rating: 3, num: 208, }, scrappy: { onModifyMovePriority: -5, onModifyMove(move) { if (!move.ignoreImmunity) move.ignoreImmunity = {}; if (move.ignoreImmunity !== true) { move.ignoreImmunity['Fighting'] = true; move.ignoreImmunity['Normal'] = true; } }, onBoost(boost, target, source, effect) { if (effect.id === 'intimidate') { delete boost.atk; this.add('-fail', target, 'unboost', 'Attack', '[from] ability: Scrappy', '[of] ' + target); } }, name: "Scrappy", rating: 3, num: 113, }, screencleaner: { onStart(pokemon) { let activated = false; for (const sideCondition of ['reflect', 'lightscreen', 'auroraveil']) { for (const side of [pokemon.side, ...pokemon.side.foeSidesWithConditions()]) { if (side.getSideCondition(sideCondition)) { if (!activated) { this.add('-activate', pokemon, 'ability: Screen Cleaner'); activated = true; } side.removeSideCondition(sideCondition); } } } }, name: "Screen Cleaner", rating: 2, num: 251, }, serenegrace: { onModifyMovePriority: -2, onModifyMove(move) { if (move.secondaries) { this.debug('doubling secondary chance'); for (const secondary of move.secondaries) { if (secondary.chance) secondary.chance *= 2; } } if (move.self?.chance) move.self.chance *= 2; }, name: "Serene Grace", rating: 3.5, num: 32, }, shadowshield: { onSourceModifyDamage(damage, source, target, move) { if (target.hp >= target.maxhp) { this.debug('Shadow Shield weaken'); return this.chainModify(0.5); } }, name: "Shadow Shield", rating: 3.5, num: 231, }, shadowtag: { onFoeTrapPokemon(pokemon) { if (!pokemon.hasAbility('shadowtag') && pokemon.isAdjacent(this.effectState.target)) { pokemon.tryTrap(true); } }, onFoeMaybeTrapPokemon(pokemon, source) { if (!source) source = this.effectState.target; if (!source || !pokemon.isAdjacent(source)) return; if (!pokemon.hasAbility('shadowtag')) { pokemon.maybeTrapped = true; } }, name: "Shadow Tag", rating: 5, num: 23, }, shedskin: { onResidualOrder: 5, onResidualSubOrder: 3, onResidual(pokemon) { if (pokemon.hp && pokemon.status && this.randomChance(33, 100)) { this.debug('shed skin'); this.add('-activate', pokemon, 'ability: Shed Skin'); pokemon.cureStatus(); } }, name: "Shed Skin", rating: 3, num: 61, }, sheerforce: { onModifyMove(move, pokemon) { if (move.secondaries) { delete move.secondaries; // Technically not a secondary effect, but it is negated delete move.self;<|fim▁hole|> }, onBasePowerPriority: 21, onBasePower(basePower, pokemon, target, move) { if (move.hasSheerForce) return this.chainModify([5325, 4096]); }, name: "Sheer Force", rating: 3.5, num: 125, }, shellarmor: { onCriticalHit: false, isBreakable: true, name: "Shell Armor", rating: 1, num: 75, }, shielddust: { onModifySecondaries(secondaries) { this.debug('Shield Dust prevent secondary'); return secondaries.filter(effect => !!(effect.self || effect.dustproof)); }, isBreakable: true, name: "Shield Dust", rating: 2, num: 19, }, shieldsdown: { onStart(pokemon) { if (pokemon.baseSpecies.baseSpecies !== 'Minior' || pokemon.transformed) return; if (pokemon.hp > pokemon.maxhp / 2) { if (pokemon.species.forme !== 'Meteor') { pokemon.formeChange('Minior-Meteor'); } } else { if (pokemon.species.forme === 'Meteor') { pokemon.formeChange(pokemon.set.species); } } }, onResidualOrder: 29, onResidual(pokemon) { if (pokemon.baseSpecies.baseSpecies !== 'Minior' || pokemon.transformed || !pokemon.hp) return; if (pokemon.hp > pokemon.maxhp / 2) { if (pokemon.species.forme !== 'Meteor') { pokemon.formeChange('Minior-Meteor'); } } else { if (pokemon.species.forme === 'Meteor') { pokemon.formeChange(pokemon.set.species); } } }, onSetStatus(status, target, source, effect) { if (target.species.id !== 'miniormeteor' || target.transformed) return; if ((effect as Move)?.status) { this.add('-immune', target, '[from] ability: Shields Down'); } return false; }, onTryAddVolatile(status, target) { if (target.species.id !== 'miniormeteor' || target.transformed) return; if (status.id !== 'yawn') return; this.add('-immune', target, '[from] ability: Shields Down'); return null; }, isPermanent: true, name: "Shields Down", rating: 3, num: 197, }, simple: { onBoost(boost, target, source, effect) { if (effect && effect.id === 'zpower') return; let i: BoostID; for (i in boost) { boost[i]! *= 2; } }, isBreakable: true, name: "Simple", rating: 4, num: 86, }, skilllink: { onModifyMove(move) { if (move.multihit && Array.isArray(move.multihit) && move.multihit.length) { move.multihit = move.multihit[1]; } if (move.multiaccuracy) { delete move.multiaccuracy; } }, name: "Skill Link", rating: 3, num: 92, }, slowstart: { onStart(pokemon) { pokemon.addVolatile('slowstart'); }, onEnd(pokemon) { delete pokemon.volatiles['slowstart']; this.add('-end', pokemon, 'Slow Start', '[silent]'); }, condition: { duration: 5, onResidualOrder: 28, onResidualSubOrder: 2, onStart(target) { this.add('-start', target, 'ability: Slow Start'); }, onModifyAtkPriority: 5, onModifyAtk(atk, pokemon) { return this.chainModify(0.5); }, onModifySpe(spe, pokemon) { return this.chainModify(0.5); }, onEnd(target) { this.add('-end', target, 'Slow Start'); }, }, name: "Slow Start", rating: -1, num: 112, }, slushrush: { onModifySpe(spe, pokemon) { if (this.field.isWeather('hail')) { return this.chainModify(2); } }, name: "Slush Rush", rating: 3, num: 202, }, sniper: { onModifyDamage(damage, source, target, move) { if (target.getMoveHitData(move).crit) { this.debug('Sniper boost'); return this.chainModify(1.5); } }, name: "Sniper", rating: 2, num: 97, }, snowcloak: { onImmunity(type, pokemon) { if (type === 'hail') return false; }, onModifyAccuracyPriority: -1, onModifyAccuracy(accuracy) { if (typeof accuracy !== 'number') return; if (this.field.isWeather('hail')) { this.debug('Snow Cloak - decreasing accuracy'); return this.chainModify([3277, 4096]); } }, isBreakable: true, name: "Snow Cloak", rating: 1.5, num: 81, }, snowwarning: { onStart(source) { this.field.setWeather('hail'); }, name: "Snow Warning", rating: 4, num: 117, }, solarpower: { onModifySpAPriority: 5, onModifySpA(spa, pokemon) { if (['sunnyday', 'desolateland'].includes(pokemon.effectiveWeather())) { return this.chainModify(1.5); } }, onWeather(target, source, effect) { if (target.hasItem('utilityumbrella')) return; if (effect.id === 'sunnyday' || effect.id === 'desolateland') { this.damage(target.baseMaxhp / 8, target, target); } }, name: "Solar Power", rating: 2, num: 94, }, solidrock: { onSourceModifyDamage(damage, source, target, move) { if (target.getMoveHitData(move).typeMod > 0) { this.debug('Solid Rock neutralize'); return this.chainModify(0.75); } }, isBreakable: true, name: "Solid Rock", rating: 3, num: 116, }, soulheart: { onAnyFaintPriority: 1, onAnyFaint() { this.boost({spa: 1}, this.effectState.target); }, name: "Soul-Heart", rating: 3.5, num: 220, }, soundproof: { onTryHit(target, source, move) { if (target !== source && move.flags['sound']) { this.add('-immune', target, '[from] ability: Soundproof'); return null; } }, onAllyTryHitSide(target, source, move) { if (move.flags['sound']) { this.add('-immune', this.effectState.target, '[from] ability: Soundproof'); } }, isBreakable: true, name: "Soundproof", rating: 1.5, num: 43, }, speedboost: { onResidualOrder: 28, onResidualSubOrder: 2, onResidual(pokemon) { if (pokemon.activeTurns) { this.boost({spe: 1}); } }, name: "Speed Boost", rating: 4.5, num: 3, }, stakeout: { onModifyAtkPriority: 5, onModifyAtk(atk, attacker, defender) { if (!defender.activeTurns) { this.debug('Stakeout boost'); return this.chainModify(2); } }, onModifySpAPriority: 5, onModifySpA(atk, attacker, defender) { if (!defender.activeTurns) { this.debug('Stakeout boost'); return this.chainModify(2); } }, name: "Stakeout", rating: 4.5, num: 198, }, stall: { onFractionalPriority: -0.1, name: "Stall", rating: -1, num: 100, }, stalwart: { onModifyMovePriority: 1, onModifyMove(move) { // most of the implementation is in Battle#getTarget move.tracksTarget = move.target !== 'scripted'; }, name: "Stalwart", rating: 0, num: 242, }, stamina: { onDamagingHit(damage, target, source, effect) { this.boost({def: 1}); }, name: "Stamina", rating: 3.5, num: 192, }, stancechange: { onModifyMovePriority: 1, onModifyMove(move, attacker, defender) { if (attacker.species.baseSpecies !== 'Aegislash' || attacker.transformed) return; if (move.category === 'Status' && move.id !== 'kingsshield') return; const targetForme = (move.id === 'kingsshield' ? 'Aegislash' : 'Aegislash-Blade'); if (attacker.species.name !== targetForme) attacker.formeChange(targetForme); }, isPermanent: true, name: "Stance Change", rating: 4, num: 176, }, static: { onDamagingHit(damage, target, source, move) { if (this.checkMoveMakesContact(move, source, target)) { if (this.randomChance(3, 10)) { source.trySetStatus('par', target); } } }, name: "Static", rating: 2, num: 9, }, steadfast: { onFlinch(pokemon) { this.boost({spe: 1}); }, name: "Steadfast", rating: 1, num: 80, }, steamengine: { onDamagingHit(damage, target, source, move) { if (['Water', 'Fire'].includes(move.type)) { this.boost({spe: 6}); } }, name: "Steam Engine", rating: 2, num: 243, }, steelworker: { onModifyAtkPriority: 5, onModifyAtk(atk, attacker, defender, move) { if (move.type === 'Steel') { this.debug('Steelworker boost'); return this.chainModify(1.5); } }, onModifySpAPriority: 5, onModifySpA(atk, attacker, defender, move) { if (move.type === 'Steel') { this.debug('Steelworker boost'); return this.chainModify(1.5); } }, name: "Steelworker", rating: 3.5, num: 200, }, steelyspirit: { onAllyBasePowerPriority: 22, onAllyBasePower(basePower, attacker, defender, move) { if (move.type === 'Steel') { this.debug('Steely Spirit boost'); return this.chainModify(1.5); } }, name: "Steely Spirit", rating: 3.5, num: 252, }, stench: { onModifyMovePriority: -1, onModifyMove(move) { if (move.category !== "Status") { this.debug('Adding Stench flinch'); if (!move.secondaries) move.secondaries = []; for (const secondary of move.secondaries) { if (secondary.volatileStatus === 'flinch') return; } move.secondaries.push({ chance: 10, volatileStatus: 'flinch', }); } }, name: "Stench", rating: 0.5, num: 1, }, stickyhold: { onTakeItem(item, pokemon, source) { if (!this.activeMove) throw new Error("Battle.activeMove is null"); if (!pokemon.hp || pokemon.item === 'stickybarb') return; if ((source && source !== pokemon) || this.activeMove.id === 'knockoff') { this.add('-activate', pokemon, 'ability: Sticky Hold'); return false; } }, isBreakable: true, name: "Sticky Hold", rating: 2, num: 60, }, stormdrain: { onTryHit(target, source, move) { if (target !== source && move.type === 'Water') { if (!this.boost({spa: 1})) { this.add('-immune', target, '[from] ability: Storm Drain'); } return null; } }, onAnyRedirectTarget(target, source, source2, move) { if (move.type !== 'Water' || ['firepledge', 'grasspledge', 'waterpledge'].includes(move.id)) return; const redirectTarget = ['randomNormal', 'adjacentFoe'].includes(move.target) ? 'normal' : move.target; if (this.validTarget(this.effectState.target, source, redirectTarget)) { if (move.smartTarget) move.smartTarget = false; if (this.effectState.target !== target) { this.add('-activate', this.effectState.target, 'ability: Storm Drain'); } return this.effectState.target; } }, isBreakable: true, name: "Storm Drain", rating: 3, num: 114, }, strongjaw: { onBasePowerPriority: 19, onBasePower(basePower, attacker, defender, move) { if (move.flags['bite']) { return this.chainModify(1.5); } }, name: "Strong Jaw", rating: 3, num: 173, }, sturdy: { onTryHit(pokemon, target, move) { if (move.ohko) { this.add('-immune', pokemon, '[from] ability: Sturdy'); return null; } }, onDamagePriority: -30, onDamage(damage, target, source, effect) { if (target.hp === target.maxhp && damage >= target.hp && effect && effect.effectType === 'Move') { this.add('-ability', target, 'Sturdy'); return target.hp - 1; } }, isBreakable: true, name: "Sturdy", rating: 3, num: 5, }, suctioncups: { onDragOutPriority: 1, onDragOut(pokemon) { this.add('-activate', pokemon, 'ability: Suction Cups'); return null; }, isBreakable: true, name: "Suction Cups", rating: 1, num: 21, }, superluck: { onModifyCritRatio(critRatio) { return critRatio + 1; }, name: "Super Luck", rating: 1.5, num: 105, }, surgesurfer: { onModifySpe(spe) { if (this.field.isTerrain('electricterrain')) { return this.chainModify(2); } }, name: "Surge Surfer", rating: 3, num: 207, }, swarm: { onModifyAtkPriority: 5, onModifyAtk(atk, attacker, defender, move) { if (move.type === 'Bug' && attacker.hp <= attacker.maxhp / 3) { this.debug('Swarm boost'); return this.chainModify(1.5); } }, onModifySpAPriority: 5, onModifySpA(atk, attacker, defender, move) { if (move.type === 'Bug' && attacker.hp <= attacker.maxhp / 3) { this.debug('Swarm boost'); return this.chainModify(1.5); } }, name: "Swarm", rating: 2, num: 68, }, sweetveil: { name: "Sweet Veil", onAllySetStatus(status, target, source, effect) { if (status.id === 'slp') { this.debug('Sweet Veil interrupts sleep'); const effectHolder = this.effectState.target; this.add('-block', target, 'ability: Sweet Veil', '[of] ' + effectHolder); return null; } }, onAllyTryAddVolatile(status, target) { if (status.id === 'yawn') { this.debug('Sweet Veil blocking yawn'); const effectHolder = this.effectState.target; this.add('-block', target, 'ability: Sweet Veil', '[of] ' + effectHolder); return null; } }, isBreakable: true, rating: 2, num: 175, }, swiftswim: { onModifySpe(spe, pokemon) { if (['raindance', 'primordialsea'].includes(pokemon.effectiveWeather())) { return this.chainModify(2); } }, name: "Swift Swim", rating: 3, num: 33, }, symbiosis: { onAllyAfterUseItem(item, pokemon) { if (pokemon.switchFlag) return; const source = this.effectState.target; const myItem = source.takeItem(); if (!myItem) return; if ( !this.singleEvent('TakeItem', myItem, source.itemState, pokemon, source, this.effect, myItem) || !pokemon.setItem(myItem) ) { source.item = myItem.id; return; } this.add('-activate', source, 'ability: Symbiosis', myItem, '[of] ' + pokemon); }, name: "Symbiosis", rating: 0, num: 180, }, synchronize: { onAfterSetStatus(status, target, source, effect) { if (!source || source === target) return; if (effect && effect.id === 'toxicspikes') return; if (status.id === 'slp' || status.id === 'frz') return; this.add('-activate', target, 'ability: Synchronize'); // Hack to make status-prevention abilities think Synchronize is a status move // and show messages when activating against it. source.trySetStatus(status, target, {status: status.id, id: 'synchronize'} as Effect); }, name: "Synchronize", rating: 2, num: 28, }, tangledfeet: { onModifyAccuracyPriority: -1, onModifyAccuracy(accuracy, target) { if (typeof accuracy !== 'number') return; if (target?.volatiles['confusion']) { this.debug('Tangled Feet - decreasing accuracy'); return this.chainModify(0.5); } }, isBreakable: true, name: "Tangled Feet", rating: 1, num: 77, }, tanglinghair: { onDamagingHit(damage, target, source, move) { if (this.checkMoveMakesContact(move, source, target, true)) { this.add('-ability', target, 'Tangling Hair'); this.boost({spe: -1}, source, target, null, true); } }, name: "Tangling Hair", rating: 2, num: 221, }, technician: { onBasePowerPriority: 30, onBasePower(basePower, attacker, defender, move) { const basePowerAfterMultiplier = this.modify(basePower, this.event.modifier); this.debug('Base Power: ' + basePowerAfterMultiplier); if (basePowerAfterMultiplier <= 60) { this.debug('Technician boost'); return this.chainModify(1.5); } }, name: "Technician", rating: 3.5, num: 101, }, telepathy: { onTryHit(target, source, move) { if (target !== source && target.isAlly(source) && move.category !== 'Status') { this.add('-activate', target, 'ability: Telepathy'); return null; } }, isBreakable: true, name: "Telepathy", rating: 0, num: 140, }, teravolt: { onStart(pokemon) { this.add('-ability', pokemon, 'Teravolt'); }, onModifyMove(move) { move.ignoreAbility = true; }, name: "Teravolt", rating: 3.5, num: 164, }, thickfat: { onSourceModifyAtkPriority: 6, onSourceModifyAtk(atk, attacker, defender, move) { if (move.type === 'Ice' || move.type === 'Fire') { this.debug('Thick Fat weaken'); return this.chainModify(0.5); } }, onSourceModifySpAPriority: 5, onSourceModifySpA(atk, attacker, defender, move) { if (move.type === 'Ice' || move.type === 'Fire') { this.debug('Thick Fat weaken'); return this.chainModify(0.5); } }, isBreakable: true, name: "Thick Fat", rating: 3.5, num: 47, }, tintedlens: { onModifyDamage(damage, source, target, move) { if (target.getMoveHitData(move).typeMod < 0) { this.debug('Tinted Lens boost'); return this.chainModify(2); } }, name: "Tinted Lens", rating: 4, num: 110, }, torrent: { onModifyAtkPriority: 5, onModifyAtk(atk, attacker, defender, move) { if (move.type === 'Water' && attacker.hp <= attacker.maxhp / 3) { this.debug('Torrent boost'); return this.chainModify(1.5); } }, onModifySpAPriority: 5, onModifySpA(atk, attacker, defender, move) { if (move.type === 'Water' && attacker.hp <= attacker.maxhp / 3) { this.debug('Torrent boost'); return this.chainModify(1.5); } }, name: "Torrent", rating: 2, num: 67, }, toughclaws: { onBasePowerPriority: 21, onBasePower(basePower, attacker, defender, move) { if (move.flags['contact']) { return this.chainModify([5325, 4096]); } }, name: "Tough Claws", rating: 3.5, num: 181, }, toxicboost: { onBasePowerPriority: 19, onBasePower(basePower, attacker, defender, move) { if ((attacker.status === 'psn' || attacker.status === 'tox') && move.category === 'Physical') { return this.chainModify(1.5); } }, name: "Toxic Boost", rating: 2.5, num: 137, }, trace: { onStart(pokemon) { // n.b. only affects Hackmons // interaction with No Ability is complicated: https://www.smogon.com/forums/threads/pokemon-sun-moon-battle-mechanics-research.3586701/page-76#post-7790209 if (pokemon.adjacentFoes().some(foeActive => foeActive.ability === 'noability')) { this.effectState.gaveUp = true; } }, onUpdate(pokemon) { if (!pokemon.isStarted || this.effectState.gaveUp) return; const additionalBannedAbilities = [ // Zen Mode included here for compatability with Gen 5-6 'noability', 'flowergift', 'forecast', 'hungerswitch', 'illusion', 'imposter', 'neutralizinggas', 'powerofalchemy', 'receiver', 'trace', 'zenmode', ]; const possibleTargets = pokemon.adjacentFoes().filter(target => ( !target.getAbility().isPermanent && !additionalBannedAbilities.includes(target.ability) )); if (!possibleTargets.length) return; const target = this.sample(possibleTargets); const ability = target.getAbility(); this.add('-ability', pokemon, ability, '[from] ability: Trace', '[of] ' + target); pokemon.setAbility(ability); }, name: "Trace", rating: 2.5, num: 36, }, transistor: { onModifyAtkPriority: 5, onModifyAtk(atk, attacker, defender, move) { if (move.type === 'Electric') { this.debug('Transistor boost'); return this.chainModify(1.5); } }, onModifySpAPriority: 5, onModifySpA(atk, attacker, defender, move) { if (move.type === 'Electric') { this.debug('Transistor boost'); return this.chainModify(1.5); } }, name: "Transistor", rating: 3.5, num: 262, }, triage: { onModifyPriority(priority, pokemon, target, move) { if (move?.flags['heal']) return priority + 3; }, name: "Triage", rating: 3.5, num: 205, }, truant: { onStart(pokemon) { pokemon.removeVolatile('truant'); if (pokemon.activeTurns && (pokemon.moveThisTurnResult !== undefined || !this.queue.willMove(pokemon))) { pokemon.addVolatile('truant'); } }, onBeforeMovePriority: 9, onBeforeMove(pokemon) { if (pokemon.removeVolatile('truant')) { this.add('cant', pokemon, 'ability: Truant'); return false; } pokemon.addVolatile('truant'); }, condition: {}, name: "Truant", rating: -1, num: 54, }, turboblaze: { onStart(pokemon) { this.add('-ability', pokemon, 'Turboblaze'); }, onModifyMove(move) { move.ignoreAbility = true; }, name: "Turboblaze", rating: 3.5, num: 163, }, unaware: { name: "Unaware", onAnyModifyBoost(boosts, pokemon) { const unawareUser = this.effectState.target; if (unawareUser === pokemon) return; if (unawareUser === this.activePokemon && pokemon === this.activeTarget) { boosts['def'] = 0; boosts['spd'] = 0; boosts['evasion'] = 0; } if (pokemon === this.activePokemon && unawareUser === this.activeTarget) { boosts['atk'] = 0; boosts['def'] = 0; boosts['spa'] = 0; boosts['accuracy'] = 0; } }, isBreakable: true, rating: 4, num: 109, }, unburden: { onAfterUseItem(item, pokemon) { if (pokemon !== this.effectState.target) return; pokemon.addVolatile('unburden'); }, onTakeItem(item, pokemon) { pokemon.addVolatile('unburden'); }, onEnd(pokemon) { pokemon.removeVolatile('unburden'); }, condition: { onModifySpe(spe, pokemon) { if (!pokemon.item && !pokemon.ignoringAbility()) { return this.chainModify(2); } }, }, name: "Unburden", rating: 3.5, num: 84, }, unnerve: { onPreStart(pokemon) { this.add('-ability', pokemon, 'Unnerve'); this.effectState.unnerved = true; }, onStart(pokemon) { if (this.effectState.unnerved) return; this.add('-ability', pokemon, 'Unnerve'); this.effectState.unnerved = true; }, onEnd() { this.effectState.unnerved = false; }, onFoeTryEatItem() { return !this.effectState.unnerved; }, name: "Unnerve", rating: 1.5, num: 127, }, unseenfist: { onModifyMove(move) { if (move.flags['contact']) delete move.flags['protect']; }, name: "Unseen Fist", rating: 2, num: 260, }, victorystar: { onAnyModifyAccuracyPriority: -1, onAnyModifyAccuracy(accuracy, target, source) { if (source.isAlly(this.effectState.target) && typeof accuracy === 'number') { return this.chainModify([4506, 4096]); } }, name: "Victory Star", rating: 2, num: 162, }, vitalspirit: { onUpdate(pokemon) { if (pokemon.status === 'slp') { this.add('-activate', pokemon, 'ability: Vital Spirit'); pokemon.cureStatus(); } }, onSetStatus(status, target, source, effect) { if (status.id !== 'slp') return; if ((effect as Move)?.status) { this.add('-immune', target, '[from] ability: Vital Spirit'); } return false; }, isBreakable: true, name: "Vital Spirit", rating: 2, num: 72, }, voltabsorb: { onTryHit(target, source, move) { if (target !== source && move.type === 'Electric') { if (!this.heal(target.baseMaxhp / 4)) { this.add('-immune', target, '[from] ability: Volt Absorb'); } return null; } }, isBreakable: true, name: "Volt Absorb", rating: 3.5, num: 10, }, wanderingspirit: { onDamagingHit(damage, target, source, move) { const additionalBannedAbilities = ['hungerswitch', 'illusion', 'neutralizinggas', 'wonderguard']; if (source.getAbility().isPermanent || additionalBannedAbilities.includes(source.ability) || target.volatiles['dynamax'] ) { return; } if (this.checkMoveMakesContact(move, source, target)) { const sourceAbility = source.setAbility('wanderingspirit', target); if (!sourceAbility) return; if (target.isAlly(source)) { this.add('-activate', target, 'Skill Swap', '', '', '[of] ' + source); } else { this.add('-activate', target, 'ability: Wandering Spirit', this.dex.abilities.get(sourceAbility).name, 'Wandering Spirit', '[of] ' + source); } target.setAbility(sourceAbility); } }, name: "Wandering Spirit", rating: 2.5, num: 254, }, waterabsorb: { onTryHit(target, source, move) { if (target !== source && move.type === 'Water') { if (!this.heal(target.baseMaxhp / 4)) { this.add('-immune', target, '[from] ability: Water Absorb'); } return null; } }, isBreakable: true, name: "Water Absorb", rating: 3.5, num: 11, }, waterbubble: { onSourceModifyAtkPriority: 5, onSourceModifyAtk(atk, attacker, defender, move) { if (move.type === 'Fire') { return this.chainModify(0.5); } }, onSourceModifySpAPriority: 5, onSourceModifySpA(atk, attacker, defender, move) { if (move.type === 'Fire') { return this.chainModify(0.5); } }, onModifyAtk(atk, attacker, defender, move) { if (move.type === 'Water') { return this.chainModify(2); } }, onModifySpA(atk, attacker, defender, move) { if (move.type === 'Water') { return this.chainModify(2); } }, onUpdate(pokemon) { if (pokemon.status === 'brn') { this.add('-activate', pokemon, 'ability: Water Bubble'); pokemon.cureStatus(); } }, onSetStatus(status, target, source, effect) { if (status.id !== 'brn') return; if ((effect as Move)?.status) { this.add('-immune', target, '[from] ability: Water Bubble'); } return false; }, isBreakable: true, name: "Water Bubble", rating: 4.5, num: 199, }, watercompaction: { onDamagingHit(damage, target, source, move) { if (move.type === 'Water') { this.boost({def: 2}); } }, name: "Water Compaction", rating: 1.5, num: 195, }, waterveil: { onUpdate(pokemon) { if (pokemon.status === 'brn') { this.add('-activate', pokemon, 'ability: Water Veil'); pokemon.cureStatus(); } }, onSetStatus(status, target, source, effect) { if (status.id !== 'brn') return; if ((effect as Move)?.status) { this.add('-immune', target, '[from] ability: Water Veil'); } return false; }, isBreakable: true, name: "Water Veil", rating: 2, num: 41, }, weakarmor: { onDamagingHit(damage, target, source, move) { if (move.category === 'Physical') { this.boost({def: -1, spe: 2}, target, target); } }, name: "Weak Armor", rating: 1, num: 133, }, whitesmoke: { onBoost(boost, target, source, effect) { if (source && target === source) return; let showMsg = false; let i: BoostID; for (i in boost) { if (boost[i]! < 0) { delete boost[i]; showMsg = true; } } if (showMsg && !(effect as ActiveMove).secondaries && effect.id !== 'octolock') { this.add("-fail", target, "unboost", "[from] ability: White Smoke", "[of] " + target); } }, isBreakable: true, name: "White Smoke", rating: 2, num: 73, }, wimpout: { onEmergencyExit(target) { if (!this.canSwitch(target.side) || target.forceSwitchFlag || target.switchFlag) return; for (const side of this.sides) { for (const active of side.active) { active.switchFlag = false; } } target.switchFlag = true; this.add('-activate', target, 'ability: Wimp Out'); }, name: "Wimp Out", rating: 1, num: 193, }, wonderguard: { onTryHit(target, source, move) { if (target === source || move.category === 'Status' || move.type === '???' || move.id === 'struggle') return; if (move.id === 'skydrop' && !source.volatiles['skydrop']) return; this.debug('Wonder Guard immunity: ' + move.id); if (target.runEffectiveness(move) <= 0) { if (move.smartTarget) { move.smartTarget = false; } else { this.add('-immune', target, '[from] ability: Wonder Guard'); } return null; } }, isBreakable: true, name: "Wonder Guard", rating: 5, num: 25, }, wonderskin: { onModifyAccuracyPriority: 10, onModifyAccuracy(accuracy, target, source, move) { if (move.category === 'Status' && typeof accuracy === 'number') { this.debug('Wonder Skin - setting accuracy to 50'); return 50; } }, isBreakable: true, name: "Wonder Skin", rating: 2, num: 147, }, zenmode: { onResidualOrder: 29, onResidual(pokemon) { if (pokemon.baseSpecies.baseSpecies !== 'Darmanitan' || pokemon.transformed) { return; } if (pokemon.hp <= pokemon.maxhp / 2 && !['Zen', 'Galar-Zen'].includes(pokemon.species.forme)) { pokemon.addVolatile('zenmode'); } else if (pokemon.hp > pokemon.maxhp / 2 && ['Zen', 'Galar-Zen'].includes(pokemon.species.forme)) { pokemon.addVolatile('zenmode'); // in case of base Darmanitan-Zen pokemon.removeVolatile('zenmode'); } }, onEnd(pokemon) { if (!pokemon.volatiles['zenmode'] || !pokemon.hp) return; pokemon.transformed = false; delete pokemon.volatiles['zenmode']; if (pokemon.species.baseSpecies === 'Darmanitan' && pokemon.species.battleOnly) { pokemon.formeChange(pokemon.species.battleOnly as string, this.effect, false, '[silent]'); } }, condition: { onStart(pokemon) { if (!pokemon.species.name.includes('Galar')) { if (pokemon.species.id !== 'darmanitanzen') pokemon.formeChange('Darmanitan-Zen'); } else { if (pokemon.species.id !== 'darmanitangalarzen') pokemon.formeChange('Darmanitan-Galar-Zen'); } }, onEnd(pokemon) { if (['Zen', 'Galar-Zen'].includes(pokemon.species.forme)) { pokemon.formeChange(pokemon.species.battleOnly as string); } }, }, isPermanent: true, name: "Zen Mode", rating: 0, num: 161, }, // CAP mountaineer: { onDamage(damage, target, source, effect) { if (effect && effect.id === 'stealthrock') { return false; } }, onTryHit(target, source, move) { if (move.type === 'Rock' && !target.activeTurns) { this.add('-immune', target, '[from] ability: Mountaineer'); return null; } }, isNonstandard: "CAP", isBreakable: true, name: "Mountaineer", rating: 3, num: -2, }, rebound: { isNonstandard: "CAP", name: "Rebound", onTryHitPriority: 1, onTryHit(target, source, move) { if (this.effectState.target.activeTurns) return; if (target === source || move.hasBounced || !move.flags['reflectable']) { return; } const newMove = this.dex.getActiveMove(move.id); newMove.hasBounced = true; this.actions.useMove(newMove, target, source); return null; }, onAllyTryHitSide(target, source, move) { if (this.effectState.target.activeTurns) return; if (target.isAlly(source) || move.hasBounced || !move.flags['reflectable']) { return; } const newMove = this.dex.getActiveMove(move.id); newMove.hasBounced = true; this.actions.useMove(newMove, this.effectState.target, source); return null; }, condition: { duration: 1, }, isBreakable: true, rating: 3, num: -3, }, persistent: { isNonstandard: "CAP", name: "Persistent", // implemented in the corresponding move rating: 3, num: -4, }, };<|fim▁end|>
if (move.id === 'clangoroussoulblaze') delete move.selfBoost; // Actual negation of `AfterMoveSecondary` effects implemented in scripts.js move.hasSheerForce = true; }