prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>contrived_std_string_find.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python """Do not call std::string::find_first_of or std::string::find with a string of characters to locate that has the size 1. Use the version of std::string::find that takes a single character to locate instead. Same for find_last_of/rfind. """ error_msg = "Do not use find(\"a\"), use find('a')." regexp = r"""(?x) r?find(_(first|last)_of)?\s* \( "([^\\]|(\\[nt\\"]))"[,)]""" forbidden = [ r'find_first_of("a")', r'find_last_of("a")', r'find("a")', r'rfind("a")', r'find_first_of("\n")', r'find_last_of("\n")', r'find("\n")', r'rfind("\n")', r'find_first_of("\t")', r'find_last_of("\t")', r'find("\t")', r'rfind("\t")', r'find_first_of("\\")', r'find_last_of("\\")', r'find("\\")', r'rfind("\\")', r'find_first_of("\"")', r'find_last_of("\"")', r'find("\"")', r'rfind("\"")', r'find_first_of("a", 1)', r'find_last_of("a", 1)', r'find("a", 1)', r'rfind("a", 1)', ] allowed = [ r'find("ab")', r"find('a')", r"rfind('a')", r'rfind("ab")', r"find('\n')", r'find("\nx")', r"rfind('\n')", r'rfind("\nx")', r"find('\t')", r'find("\tx")',<|fim▁hole|> r'rfind("\tx")', r"find('\\')", r'find("\\x")', r"rfind('\\')", r'rfind("\\x")', r"find('\"')", r'find("\"x")', r"rfind('\"')", r'rfind("\"x")', r"find('a', 1)", r'find("ab", 1)', r"rfind('a', 1)", r'rfind("ab", 1)', ]<|fim▁end|>
r"rfind('\t')",
<|file_name|>contact_ball_ball.rs<|end_file_name|><|fim▁begin|>use crate::math::{Point, Vector}; use crate::query::Contact; use crate::shape::Ball; use na::{self, RealField, Unit}; /// Contact between balls.<|fim▁hole|> b1: &Ball<N>, center2: &Point<N>, b2: &Ball<N>, prediction: N, ) -> Option<Contact<N>> { let r1 = b1.radius; let r2 = b2.radius; let delta_pos = *center2 - *center1; let distance_squared = delta_pos.norm_squared(); let sum_radius = r1 + r2; let sum_radius_with_error = sum_radius + prediction; if distance_squared < sum_radius_with_error * sum_radius_with_error { let normal = if !distance_squared.is_zero() { Unit::new_normalize(delta_pos) } else { Vector::x_axis() }; Some(Contact::new( *center1 + *normal * r1, *center2 + *normal * (-r2), normal, sum_radius - distance_squared.sqrt(), )) } else { None } }<|fim▁end|>
#[inline] pub fn contact_ball_ball<N: RealField>( center1: &Point<N>,
<|file_name|>state.rs<|end_file_name|><|fim▁begin|>use std::{ cell::RefCell, rc::Rc, sync::{ atomic::{AtomicBool, Ordering}, Arc, Mutex, }, }; use smithay::{ reexports::{ calloop::{generic::Generic, Interest, LoopHandle, Mode, PostAction}, wayland_server::{protocol::wl_surface::WlSurface, Display}, }, utils::{Logical, Point}, wayland::{ data_device::{default_action_chooser, init_data_device, set_data_device_focus, DataDeviceEvent}, output::xdg::init_xdg_output_manager, seat::{CursorImageStatus, KeyboardHandle, PointerHandle, Seat, XkbConfig}, shm::init_shm_global, tablet_manager::{init_tablet_manager_global, TabletSeatTrait}, xdg_activation::{init_xdg_activation_global, XdgActivationEvent}, }, }; #[cfg(feature = "xwayland")] use smithay::xwayland::{XWayland, XWaylandEvent}; use crate::shell::init_shell; pub struct AnvilState<BackendData> { pub backend_data: BackendData, pub socket_name: Option<String>, pub running: Arc<AtomicBool>, pub display: Rc<RefCell<Display>>, pub handle: LoopHandle<'static, AnvilState<BackendData>>, pub window_map: Rc<RefCell<crate::window_map::WindowMap>>, pub output_map: Rc<RefCell<crate::output_map::OutputMap>>, pub dnd_icon: Arc<Mutex<Option<WlSurface>>>, pub log: slog::Logger, // input-related fields pub pointer: PointerHandle, pub keyboard: KeyboardHandle, pub suppressed_keys: Vec<u32>, pub pointer_location: Point<f64, Logical>, pub cursor_status: Arc<Mutex<CursorImageStatus>>, pub seat_name: String, pub seat: Seat, pub start_time: std::time::Instant, // things we must keep alive #[cfg(feature = "xwayland")] pub xwayland: XWayland<AnvilState<BackendData>>, } impl<BackendData: Backend + 'static> AnvilState<BackendData> { pub fn init( display: Rc<RefCell<Display>>, handle: LoopHandle<'static, AnvilState<BackendData>>, backend_data: BackendData, log: slog::Logger, listen_on_socket: bool, ) -> AnvilState<BackendData> { // init the wayland connection handle .insert_source( Generic::from_fd(display.borrow().get_poll_fd(), Interest::READ, Mode::Level), move |_, _, state: &mut AnvilState<BackendData>| { let display = state.display.clone(); let mut display = display.borrow_mut(); match display.dispatch(std::time::Duration::from_millis(0), state) { Ok(_) => Ok(PostAction::Continue), Err(e) => { error!(state.log, "I/O error on the Wayland display: {}", e); state.running.store(false, Ordering::SeqCst); Err(e) } } }, ) .expect("Failed to init the wayland event source."); // Init the basic compositor globals init_shm_global(&mut (*display).borrow_mut(), vec![], log.clone()); let shell_handles = init_shell::<BackendData>(display.clone(), log.clone()); init_xdg_output_manager(&mut display.borrow_mut(), log.clone()); init_xdg_activation_global( &mut display.borrow_mut(), |state, req, mut ddata| { let anvil_state = ddata.get::<AnvilState<BackendData>>().unwrap(); match req { XdgActivationEvent::RequestActivation { token, token_data, surface, } => { if token_data.timestamp.elapsed().as_secs() < 10 { // Just grant the wish anvil_state.window_map.borrow_mut().bring_surface_to_top(&surface); } else { // Discard the request state.lock().unwrap().remove_request(&token); } } XdgActivationEvent::DestroyActivationRequest { .. } => {} } }, log.clone(), ); let socket_name = if listen_on_socket { let socket_name = display .borrow_mut() .add_socket_auto() .unwrap() .into_string() .unwrap(); info!(log, "Listening on wayland socket"; "name" => socket_name.clone()); ::std::env::set_var("WAYLAND_DISPLAY", &socket_name); Some(socket_name) } else { None }; // init data device let dnd_icon = Arc::new(Mutex::new(None)); let dnd_icon2 = dnd_icon.clone(); init_data_device( &mut display.borrow_mut(), move |event| match event { DataDeviceEvent::DnDStarted { icon, .. } => { *dnd_icon2.lock().unwrap() = icon; } DataDeviceEvent::DnDDropped => { *dnd_icon2.lock().unwrap() = None; } _ => {} }, default_action_chooser, log.clone(), ); // init input let seat_name = backend_data.seat_name(); let (mut seat, _) = Seat::new(&mut display.borrow_mut(), seat_name.clone(), log.clone()); let cursor_status = Arc::new(Mutex::new(CursorImageStatus::Default)); let cursor_status2 = cursor_status.clone(); let pointer = seat.add_pointer(move |new_status| { // TODO: hide winit system cursor when relevant *cursor_status2.lock().unwrap() = new_status }); init_tablet_manager_global(&mut display.borrow_mut()); let cursor_status3 = cursor_status.clone(); seat.tablet_seat().on_cursor_surface(move |_tool, new_status| { // TODO: tablet tools should have their own cursors *cursor_status3.lock().unwrap() = new_status; }); let keyboard = seat .add_keyboard(XkbConfig::default(), 200, 25, |seat, focus| { set_data_device_focus(seat, focus.and_then(|s| s.as_ref().client())) }) .expect("Failed to initialize the keyboard");<|fim▁hole|> #[cfg(feature = "xwayland")] let xwayland = { let (xwayland, channel) = XWayland::new(handle.clone(), display.clone(), log.clone()); let ret = handle.insert_source(channel, |event, _, anvil_state| match event { XWaylandEvent::Ready { connection, client } => anvil_state.xwayland_ready(connection, client), XWaylandEvent::Exited => anvil_state.xwayland_exited(), }); if let Err(e) = ret { error!( log, "Failed to insert the XWaylandSource into the event loop: {}", e ); } xwayland }; AnvilState { backend_data, running: Arc::new(AtomicBool::new(true)), display, handle, window_map: shell_handles.window_map, output_map: shell_handles.output_map, dnd_icon, log, socket_name, pointer, keyboard, suppressed_keys: Vec::new(), cursor_status, pointer_location: (0.0, 0.0).into(), seat_name, seat, start_time: std::time::Instant::now(), #[cfg(feature = "xwayland")] xwayland, } } } pub trait Backend { fn seat_name(&self) -> String; }<|fim▁end|>
<|file_name|>SprintMB.java<|end_file_name|><|fim▁begin|>/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package edu.mum.ea.mb; import edu.mum.ea.ejb.ProjectEJB; import edu.mum.ea.ejb.ReleaseBacklogEJB; import edu.mum.ea.ejb.SprintEJB; import edu.mum.ea.ejb.TaskEJB; import edu.mum.ea.entity.Project; import edu.mum.ea.entity.ReleaseBacklog; import edu.mum.ea.entity.Sprint; import edu.mum.ea.entity.Task; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.annotation.PostConstruct; import javax.ejb.EJB; import javax.faces.bean.ManagedBean; import javax.faces.bean.ManagedProperty; import javax.faces.bean.RequestScoped; import javax.faces.context.FacesContext; /** * * @author Syed */ @ManagedBean @RequestScoped public class SprintMB { private Sprint sprint; @EJB private SprintEJB sprintEJB; @EJB private ProjectEJB projectEJB; @EJB private ReleaseBacklogEJB releaseBacklogEJB; @EJB private TaskEJB taskEJB; @ManagedProperty(value = "#{sessionMB}") private SessionMB sessionMB; private List<Task> taskList = new ArrayList<Task>(); private List<ReleaseBacklog> releaseBacklogList = new ArrayList<ReleaseBacklog>(); private long relBacklogId; private List<String> selectedTasks = new ArrayList<String>(); private List<Sprint> sprintList; /** * Creates a new instance of ProjectMB */ public SprintMB() { sprint = new Sprint(); sprintList = new ArrayList<Sprint>(); } @PostConstruct public void init() { //Map<String, Object> sessionMap = FacesContext.getCurrentInstance().getExternalContext().getSessionMap(); //Project project = projectEJB.find(sessionMB.getUserSelectedProject().getId()); releaseBacklogList = releaseBacklogEJB.findAllRelBakByProject(sessionMB.getUserSelectedProject().getId());//project.getReleaseBacklogList(); } public SessionMB getSessionMB() { return sessionMB; } public void setSessionMB(SessionMB sessionMB) { this.sessionMB = sessionMB; } public Sprint getSprint() { return sprint; } public void setSprint(Sprint sprint) { this.sprint = sprint; } public List<Sprint> getSprintList() { sprintList = sprintEJB.findAllSprintByProject(sessionMB.getUserSelectedProject().getId());//sprintEJB.findAll(); return sprintList; } public void setSprintList(List<Sprint> sprintList) { this.sprintList = sprintList; } public List<ReleaseBacklog> getReleaseBacklogList() { return releaseBacklogList; } public void setReleaseBacklogList(List<ReleaseBacklog> releaseBacklogList) { this.releaseBacklogList = releaseBacklogList; } public long getRelBacklogId() { return relBacklogId; } public void setRelBacklogId(long relBacklogId) { this.relBacklogId = relBacklogId; } public List<Task> getTaskList() { return taskList; } public void setTaskList(List<Task> taskList) { this.taskList = taskList; } <|fim▁hole|> public List<String> getSelectedTasks() { return selectedTasks; } public void setSelectedTasks(List<String> selectedTasks) { this.selectedTasks = selectedTasks; } public String createSprint() { sprint.setReleaseBacklog(releaseBacklogEJB.find(getRelBacklogId())); sprintEJB.save(sprint); return "sprint-list"; } public String gotoUpdatePage(Long id){ sprint = sprintEJB.find(id); try { setRelBacklogId(sprint.getReleaseBacklog().getId()); } catch(Exception e) { //System.out.println("-----" + e.getMessage()); } return "sprint-update"; } public String updateSprint(){ try { sprint.setReleaseBacklog(releaseBacklogEJB.find(getRelBacklogId())); } catch (Exception e) { //System.out.println("-----" + e.getMessage()); } sprintEJB.edit(sprint); return "sprint-list"; } public String deleteSprint(Long sprintId){ sprintEJB.delete(sprintId); return "sprint-list"; } public String sprintDetail(Long id) { sprint = sprintEJB.find(id); taskList = taskEJB.findAll(); for (Task t : sprint.getTasks()) { selectedTasks.add(t.getId().toString()); } return "sprint-view"; } public String addTaskToSprint() { long sprintId = sprint.getId(); sprint = sprintEJB.find(sprintId); taskList = sprint.getTasks(); int size = taskList.size(); for (int i = 0; i < size; i++) { taskList.remove(taskList.get(i)); size--; --i; } for (String taskId : selectedTasks) { if (!taskList.contains(taskEJB.find(Long.parseLong(taskId)))) { sprint.getTasks().add(taskEJB.find(Long.parseLong(taskId))); } } sprintEJB.edit(sprint); return "/sprint/sprint-list"; } }<|fim▁end|>
<|file_name|>uoccin_lookup.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals, division, absolute_import import os from flexget import plugin from flexget.event import event from flexget.utils import json def load_uoccin_data(path): udata = {} ufile = os.path.join(path, 'uoccin.json') if os.path.exists(ufile): try: with open(ufile, 'r') as f: udata = json.load(f) except Exception as err: raise plugin.PluginError('error reading %s: %s' % (ufile, err)) udata.setdefault('movies', {}) udata.setdefault('series', {}) return udata class UoccinLookup(object): schema = { 'type': 'string', 'format': 'path' } # Run after metainfo_series / thetvdb_lookup / imdb_lookup @plugin.priority(100) def on_task_metainfo(self, task, config): """Retrieves all the information found in the uoccin.json file for the entries. Example:: uoccin_lookup: /path/to/gdrive/uoccin Resulting fields on entries: on series (requires tvdb_id): - uoccin_watchlist (true|false) - uoccin_rating (integer) - uoccin_tags (list) on episodes (requires tvdb_id, series_season and series_episode): - uoccin_collected (true|false) - uoccin_watched (true|false) - uoccin_subtitles (list of language codes) (plus the 3 series specific fields) on movies (requires imdb_id): - uoccin_watchlist (true|false) - uoccin_collected (true|false) - uoccin_watched (true|false) - uoccin_rating (integer) - uoccin_tags (list) - uoccin_subtitles (list of language codes) """ if not task.entries: return udata = load_uoccin_data(config) movies = udata['movies'] series = udata['series'] for entry in task.entries: entry['uoccin_watchlist'] = False entry['uoccin_collected'] = False entry['uoccin_watched'] = False entry['uoccin_rating'] = None entry['uoccin_tags'] = [] entry['uoccin_subtitles'] = [] if 'tvdb_id' in entry: ser = series.get(str(entry['tvdb_id'])) if ser is None: continue entry['uoccin_watchlist'] = ser.get('watchlist', False) entry['uoccin_rating'] = ser.get('rating') entry['uoccin_tags'] = ser.get('tags', []) if all(field in entry for field in ['series_season', 'series_episode']): season = str(entry['series_season']) episode = entry['series_episode'] edata = ser.get('collected', {}).get(season, {}).get(str(episode)) entry['uoccin_collected'] = isinstance(edata, list) entry['uoccin_subtitles'] = edata if entry['uoccin_collected'] else []<|fim▁hole|> try: mov = movies.get(entry['imdb_id']) except plugin.PluginError as e: self.log.trace('entry %s imdb failed (%s)' % (entry['imdb_id'], e.value)) continue if mov is None: continue entry['uoccin_watchlist'] = mov.get('watchlist', False) entry['uoccin_collected'] = mov.get('collected', False) entry['uoccin_watched'] = mov.get('watched', False) entry['uoccin_rating'] = mov.get('rating') entry['uoccin_tags'] = mov.get('tags', []) entry['uoccin_subtitles'] = mov.get('subtitles', []) @event('plugin.register') def register_plugin(): plugin.register(UoccinLookup, 'uoccin_lookup', api_ver=2)<|fim▁end|>
entry['uoccin_watched'] = episode in ser.get('watched', {}).get(season, []) elif 'imdb_id' in entry:
<|file_name|>text_util.py<|end_file_name|><|fim▁begin|>import re from nltk.corpus import stopwords from nltk.corpus import words from nltk.stem.snowball import SnowballStemmer from apiv2.models import QuestionText, Question from apiv2.search.fsearch import formula_extractor as fe cachedStopWords = stopwords.words("english") english_vocab = set(w.lower() for w in words.words()) stemmer = SnowballStemmer("english") # Full text index search def to_lower(text): return ' '.join([word.lower() for word in text.split()])<|fim▁hole|> def remove_stopwords(text): return ' '.join([word for word in text.split() if len(word) > 2 and word not in cachedStopWords]) def english_only(text): return ' '.join([word for word in text.split() if word in english_vocab]) def stem_text(text): return ' '.join([stemmer.stem(word) for word in text.split()]) def preprocess(text, **kwargs): preprocessed_text = text # Recognise and remove LaTeX (detect formula function) preprocessed_text = clean_latex(preprocessed_text) # Remove non alphabetical characters preprocessed_text = remove_non_alphabet(preprocessed_text) # Convert to lower case preprocessed_text = to_lower(preprocessed_text) # Remove stopwords preprocessed_text = remove_stopwords(preprocessed_text) # Filter words if kwargs.get("english", True): preprocessed_text = english_only(preprocessed_text) if kwargs.get("stem", True): preprocessed_text = stem_text(preprocessed_text) return preprocessed_text def preprocess_unique(text, **kwargs): results = preprocess(text, **kwargs).split() return ' '.join(set(results)) def remove_non_alphabet(text): text = re.sub(r'[^a-zA-Z]', " ", text) return text def clean_latex(text): text = re.sub(fe.DOUBLE_DOLLAR_NOTATION, " ", text) text = re.sub(fe.PAREN_NOTATION, " ", text) text = re.sub(fe.BRACKET_NOTATION, " ", text) return text def preprocess_query(text): text = preprocess(text) return text def preprocess_question_text_object(stem=True): QuestionText.objects.all().delete() questions = Question.objects.all() for question in questions: preprocessed_text = preprocess(question.content, stem) print(preprocessed_text) question_text = QuestionText( content=preprocessed_text, question=question ) question_text.save()<|fim▁end|>
<|file_name|>nxos_vpc_interface.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: nxos_vpc_interface extends_documentation_fragment: nxos version_added: "2.2" short_description: Manages interface VPC configuration description: - Manages interface VPC configuration author: - Jason Edelman (@jedelman8) - Gabriele Gerbino (@GGabriele) notes: - Either vpc or peer_link param is required, but not both. - C(state=absent) removes whatever VPC config is on a port-channel if one exists. - Re-assigning a vpc or peerlink from one portchannel to another is not supported. The module will force the user to unconfigure an existing vpc/pl before configuring the same value on a new portchannel options: portchannel: description: - Group number of the portchannel that will be configured. required: true vpc: description: - VPC group/id that will be configured on associated portchannel. required: false default: null peer_link: description: - Set to true/false for peer link config on associated portchannel. required: false default: null state: description: - Manages desired state of the resource. required: true choices: ['present','absent'] ''' EXAMPLES = ''' - nxos_vpc_portchannel: portchannel: 10 vpc: 100 username: "{{ un }}" password: "{{ pwd }}" host: "{{ inventory_hostname }}" ''' RETURN = ''' proposed: description: k/v pairs of parameters passed into module returned: always type: dict sample: {"portchannel": "100", "vpc": "10"} existing: description: k/v pairs of existing configuration type: dict sample: {} end_state: description: k/v pairs of configuration after module execution returned: always type: dict sample: {"peer-link": false, "portchannel": "100", "vpc": "10"} updates: description: commands sent to the device returned: always type: list sample: ["interface port-channel100", "vpc 10"] changed: description: check to see if a change was made on the device returned: always type: boolean sample: true ''' from ansible.module_utils.nxos import get_config, load_config, run_commands from ansible.module_utils.nxos import nxos_argument_spec, check_args from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.netcfg import CustomNetworkConfig def execute_show_command(command, module, command_type='cli_show'): if module.params['transport'] == 'cli': command += ' | json' cmds = [command] body = run_commands(module, cmds) elif module.params['transport'] == 'nxapi': cmds = [command] body = run_commands(module, cmds) return body def flatten_list(command_lists): flat_command_list = [] for command in command_lists: if isinstance(command, list): flat_command_list.extend(command) else: flat_command_list.append(command) return flat_command_list def get_portchannel_list(module): command = 'show port-channel summary' portchannels = [] pc_list = [] body = execute_show_command(command, module) try: pc_list = body[0]['TABLE_channel']['ROW_channel'] except (KeyError, AttributeError): return portchannels if pc_list: if isinstance(pc_list, dict): pc_list = [pc_list] for pc in pc_list: portchannels.append(pc['group']) return portchannels def get_existing_portchannel_to_vpc_mappings(module): command = 'show vpc brief' pc_vpc_mapping = {} body = execute_show_command(command, module) try: vpc_table = body[0]['TABLE_vpc']['ROW_vpc'] except (KeyError, AttributeError, TypeError): vpc_table = None if vpc_table: if isinstance(vpc_table, dict): vpc_table = [vpc_table] for vpc in vpc_table: pc_vpc_mapping[str(vpc['vpc-id'])] = str(vpc['vpc-ifindex']) return pc_vpc_mapping def peer_link_exists(module): found = False run = get_vpc_running_config(module) vpc_list = run.split('\n') for each in vpc_list: if 'peer-link' in each: found = True return found def get_vpc_running_config(module): command = 'show running section vpc' body = execute_show_command(command, module, command_type='cli_show_ascii')[0] return body def get_active_vpc_peer_link(module): command = 'show vpc brief' peer_link = None body = execute_show_command(command, module) try: peer_link = body[0]['TABLE_peerlink']['ROW_peerlink']['peerlink-ifindex'] except (KeyError, AttributeError): return peer_link return peer_link def get_portchannel_vpc_config(module, portchannel): command = 'show vpc brief' peer_link_pc = None peer_link = False vpc = "" pc = "" config = {} body = execute_show_command(command, module) try: table = body[0]['TABLE_peerlink']['ROW_peerlink'] except (KeyError, AttributeError, TypeError): table = {} if table: peer_link_pc = table.get('peerlink-ifindex', None) if peer_link_pc: plpc = str(peer_link_pc[2:]) if portchannel == plpc: config['portchannel'] = portchannel config['peer-link'] = True config['vpc'] = vpc mapping = get_existing_portchannel_to_vpc_mappings(module) for existing_vpc, port_channel in mapping.items(): port_ch = str(port_channel[2:]) if port_ch == portchannel: pc = port_ch vpc = str(existing_vpc) config['portchannel'] = pc config['peer-link'] = peer_link config['vpc'] = vpc return config def get_commands_to_config_vpc_interface(portchannel, delta, config_value, existing): commands = [] if delta.get('peer-link') is False and existing.get('peer-link') is True: command = 'no vpc peer-link' commands.append('no vpc peer-link') commands.insert(0, 'interface port-channel{0}'.format(portchannel)) elif delta.get('peer-link') or not existing.get('vpc'): command = 'vpc {0}'.format(config_value) commands.append(command) commands.insert(0, 'interface port-channel{0}'.format(portchannel)) return commands def main(): argument_spec = dict( portchannel=dict(required=True, type='str'), vpc=dict(required=False, type='str'), peer_link=dict(required=False, type='bool'), state=dict(choices=['absent', 'present'], default='present'), include_defaults=dict(default=False), config=dict(), save=dict(type='bool', default=False) ) argument_spec.update(nxos_argument_spec) module = AnsibleModule(argument_spec=argument_spec, mutually_exclusive=[['vpc', 'peer_link']], supports_check_mode=True) warnings = list() check_args(module, warnings) portchannel = module.params['portchannel'] vpc = module.params['vpc'] peer_link = module.params['peer_link'] state = module.params['state'] changed = False args = {'portchannel': portchannel, 'vpc': vpc, 'peer-link': peer_link} active_peer_link = None if portchannel not in get_portchannel_list(module): module.fail_json(msg="The portchannel you are trying to make a" " VPC or PL is not created yet. " "Create it first!") if vpc:<|fim▁hole|> if vpc in mapping and portchannel != mapping[vpc].strip('Po'): module.fail_json(msg="This vpc is already configured on " "another portchannel. Remove it first " "before trying to assign it here. ", existing_portchannel=mapping[vpc]) for vpcid, existing_pc in mapping.items(): if portchannel == existing_pc.strip('Po') and vpcid != vpc: module.fail_json(msg="This portchannel already has another" " VPC configured. Remove it first " "before assigning this one", existing_vpc=vpcid) if peer_link_exists(module): active_peer_link = get_active_vpc_peer_link(module) if active_peer_link[-2:] == portchannel: module.fail_json(msg="That port channel is the current " "PEER LINK. Remove it if you want it" " to be a VPC") config_value = vpc elif peer_link is not None: if peer_link_exists(module): active_peer_link = get_active_vpc_peer_link(module)[2::] if active_peer_link != portchannel: if peer_link: module.fail_json(msg="A peer link already exists on" " the device. Remove it first", current_peer_link='Po{0}'.format( active_peer_link)) config_value = 'peer-link' proposed = dict((k, v) for k, v in args.items() if v is not None) existing = get_portchannel_vpc_config(module, portchannel) end_state = existing commands = [] if state == 'present': delta = dict(set(proposed.items()).difference(existing.items())) if delta: command = get_commands_to_config_vpc_interface( portchannel, delta, config_value, existing ) commands.append(command) elif state == 'absent': if existing.get('vpc'): command = ['no vpc'] commands.append(command) elif existing.get('peer-link'): command = ['no vpc peer-link'] commands.append(command) if commands: commands.insert(0, ['interface port-channel{0}'.format(portchannel)]) cmds = flatten_list(commands) if cmds: if module.check_mode: module.exit_json(changed=True, commands=cmds) else: changed = True load_config(module, cmds) if module.params['transport'] == 'cli': output = ' '.join(output) if 'error' in output.lower(): module.fail_json(msg=output.replace('\n', '')) end_state = get_portchannel_vpc_config(module, portchannel) if 'configure' in cmds: cmds.pop(0) results = {} results['proposed'] = proposed results['existing'] = existing results['end_state'] = end_state results['updates'] = cmds results['changed'] = changed results['warnings'] = warnings module.exit_json(**results) if __name__ == '__main__': main()<|fim▁end|>
mapping = get_existing_portchannel_to_vpc_mappings(module)
<|file_name|>skill.py<|end_file_name|><|fim▁begin|># -*- coding: iso-8859-1 -*- # Copyright (C) 2011 Daniele Simonetti # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. class MasteryAbility(object): @staticmethod def build_from_xml(elem):<|fim▁hole|> f = MasteryAbility() f.rank = int(elem.attrib['rank']) f.rule = elem.attrib['rule'] if ('rule' in elem.attrib) else None f.desc = elem.text return f class SkillCateg(object): @staticmethod def build_from_xml(elem): f = SkillCateg() f.id = elem.attrib['id'] f.name = elem.text return f def __str__(self): return self.name def __unicode__(self): return self.name def __eq__(self, obj): return obj and obj.id == self.id def __ne__(self, obj): return not self.__eq__(obj) def __hash__(self): return self.id.__hash__() class Skill(object): @staticmethod def build_from_xml(elem): f = Skill() f.name = elem.attrib['name'] f.id = elem.attrib['id'] f.trait = elem.attrib['trait'] f.type = elem.attrib['type'] f.tags = [f.type] if elem.find('Tags'): for se in elem.find('Tags').iter(): if se.tag == 'Tag': f.tags.append(se.text) f.mastery_abilities = [] if elem.find('MasteryAbilities'): for se in elem.find('MasteryAbilities').iter(): if se.tag == 'MasteryAbility': f.mastery_abilities.append(MasteryAbility.build_from_xml(se)) return f def __str__(self): return self.name or self.id def __unicode__(self): return self.name def __eq__(self, obj): return obj and obj.id == self.id def __ne__(self, obj): return not self.__eq__(obj) def __hash__(self): return self.id.__hash__()<|fim▁end|>
<|file_name|>howto.js<|end_file_name|><|fim▁begin|>(function(){ var app = angular.module('howtoApp', []); /*TODO - add custom filters https://scotch.io/tutorials/building-custom-angularjs-filters */ app.filter('facetedNavFilter', function() { return function(input,scope) { /*console.log(scope); var out = []; var tmpArr = []; var isEmptyFilterNav = true; angular.forEach(scope.tmpListing, function(howto) { angular.forEach(howto.metaData, function(metaRaw){ angular.forEach(metaArr,function(meta){ angular.forEach(tmpArr,function(el){ if(el!==meta){ tmpArr.push(meta); isEmptyFilterNav = false; } }); }); }); }); if(!isEmptyFilterNav){ angular.forEach(input, function(facetCollection){ angular.forEach(facetCollection.categories, function(facetCat){ angular.forEach(facetCat.values, function(facet){ //console.log(facet); }); }); }); return out; }else{ return input; }*/ return input; } }); app.filter('facetFilterListings', function() { return function(input,scope) { //scope.tmpListing = []; /*var out = []; var facetSelectionArr = scope.facetSelectionArr; angular.forEach(input, function(howto) { if(facetSelectionArr.length > 0){ var addToOut = false; angular.forEach(howto.metaData, function(metaRaw){ metaArr = metaRaw.split(","); angular.forEach(metaArr,function(meta){ angular.forEach(facetSelectionArr,function(facet){ if(facet === meta){ addToOut = true; } }); }); }); if(!addToOut){ out.push(howto); } }else{ out.push(howto); } }); console.log($scope); return out;*/ return input; } }); app.controller('howToController', ['$scope','$filter','$http',function($scope,$filter,$http){ $scope.orderBy = $filter('orderBy'); $scope.callbackName = 'fbcallback'; <|fim▁hole|> $scope.query = {}; $scope.angularCallback = "&callback=JSON_CALLBACK"; $scope.minInputToSearch = 3; $scope.numRanks = 10; $scope.currentPage = 1; $scope.facetSelectionArr = []; $scope.xhrSource = "search.json?1"; //$scope.xhrSource = "//funnelback-dev.ucl.ac.uk/s/search.json?collection=isd-howto&profile=_default_preview&num_ranks=1000"; $scope.log = function(){ console.log($scope.resultModel); } $scope.fbEncodeURI = function(str){ var str = encodeURI(str); //return 'hello world'; return str.replace('+','%20').replace('%257C','%7C');//convert fb use of + and | char } $scope.loadResultsTmp = function(){ return 'js/includes/listings.html'; } $scope.loadFacetsTmp = function(){ return 'js/includes/facets.html'; } $scope.loadPaginationTmp = function(){ return 'js/includes/pagination.html'; } $scope.removeFacet = function(arr,el){ var pos = arr.indexOf(el); if(pos >= 0){ arr.splice(pos,1);//second arg ensures 1 item is removed from array } return arr; } $scope.filterFacets = function(currentElQry,currentElLabel){ var isSelected = false; if($scope.facetSelectionArr){ for(var i in $scope.facetSelectionArr){ var tmpSelectedItem = $scope.facetSelectionArr[i] if(tmpSelectedItem == currentElLabel){ isSelected = true; $scope.facetSelectionArr = $scope.removeFacet($scope.facetSelectionArr,currentElLabel); } if(isSelected == true)break; } if(isSelected == false){ $scope.facetSelectionArr.push(currentElLabel); } } //$scope.filterListings(); } $scope.updateMeta = function(){ console.log($filter); } $scope.isInputChecked = function(el){ var isChecked = false; var facets = $scope.xhrDataSelectedFacets; for(var i in facets){ var tmpArr = facets[i]; for(var j in tmpArr){ var tmpQry = encodeURI(i + '=' + tmpArr[j]); if($scope.fbEncodeURI(el) === tmpQry) isChecked = true; } } return isChecked; } $scope.facetHasCount = function(el){ //console.log($scope.listingModel); if($scope.getCount(el) > 0){ return true; }else{ return false; } } $scope.getCount = function(el){ var count = 0; angular.forEach($scope.xhrDataResults,function(result){ angular.forEach(result.metaData,function(metaArr){ metaArr = metaArr.split(","); angular.forEach(metaArr,function(meta){ if(el==meta){ count+=1; } }); }); }); return count; } $scope.showFacetCount = function(facetObj){ var showFacet = false; var tmpCount = 0; if(typeof facetObj.categories[0] !== 'undefined'){ for(var i in facetObj.categories[0].values){ var facet = facetObj.categories[0].values[i]; tmpCount += parseInt(facet.count); } } if(tmpCount > 0)showFacet = true; return showFacet; } $scope.updatePage = function(x) { $scope.currentPage = x; return; } $scope.isCurrentPage = function(x) { if(x === $scope.currentPage) { return true; }else{ return false; } } $scope.showListing = function(x) { var rankStart = $scope.currentPage * $scope.numRanks; var rankEnd = ($scope.currentPage + 1) * $scope.numRanks; if(x >= rankStart && x < rankEnd) { return true; }else{ return false; } } $scope.getData = function(){ var requestUrl = $scope.xhrSource + "&query=" + $scope.defaultQry + $scope.angularCallback; $http.jsonp(requestUrl).success(function(data) { $scope.data = data;//make available to $scope variable //$scope.xhrDataResults = $scope.orderBy(data.response.resultPacket.results,'title',$scope.direction); $scope.xhrDataResults = data.response.resultPacket.results; $scope.xhrDataFacets = $scope.orderBy(data.response.facets,'title',$scope.direction); $scope.xhrDataSelectedFacets = data.question.selectedCategoryValues; $scope.totalPages = Math.ceil(data.response.resultPacket.resultsSummary.fullyMatching/$scope.numRanks); $scope.paginationArr = []; var i=1; for(i=1;i<=$scope.totalPages;i++){ $scope.paginationArr.push(i); } }); } $scope.getData(); }]); })();<|fim▁end|>
$scope.defaultQry = "!padrenullquery";
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>/* * SPKI - Simple Public Key Infrastructure * * Copyright © 2014 Senso-Rezo * All rigths reserved. * * See LICENSE file for licensing information. * See http://github/olemaire/spki for more information. */ fn main() { Usage(); } fn Usage() { let message = "Copyright (C) Senso-Rezo - SPKI (http://github.com/olemaire/spki) Usage: spki <option> (<subject>) Available options are: --initialize Initialize a new Certificate Authority --issue <type> <subject> Issue a certificate of <type> for <subject> server <fqdn> issue a Server certificate user <email> issue an User certificate --verify <email,fqdn> Verify a given certificate --renew <email,fqdn> (reason) Renew a given certificate --revoke <email,fqdn> (reason) Revoke a given certificate --crl Generate a Certificate Revocation List --print <email,fqdn,ca,crl> Will display a raw print of certificate/CRL --info (email,fqdn,ca,crl) Will give human readable information on SPKI certificate/CA/CRL --status Will give SPKI overall operational status<|fim▁hole|> Exemples: spki --issue server www.senso-rezo.org spki --issue user [email protected] spki --info www.senso-rezo.org spki --revoke ldap.senso-rezo.org spki --revoke www.senso-rezo.org keyCompromise spki --renew [email protected] spki --crl spki --print crl "; println(message); } /* This Is The End */<|fim▁end|>
--help Display this short help message --version Will display the SPKI version
<|file_name|>FileDetail.js<|end_file_name|><|fim▁begin|>/** * Wheel, copyright (c) 2019 - present by Arno van der Vegt * Distributed under an MIT license: https://arnovandervegt.github.io/wheel/license.txt **/ const File = require('./File'); exports.FileDetail = class extends File.File { constructor(opts) { opts.className = 'file detail'; super(opts); } initDOM(parentNode) { let file = this._file; this.create( parentNode, { id: this.setElement.bind(this), className: this._className, children: [ File.getIcon(this._getImage, file), { id: this.setLinkElement.bind(this), type: 'a', href: '#', className: 'no-select name', innerHTML: file.name<|fim▁hole|> { type: 'span', href: '#', className: 'no-select size', innerHTML: file.size + ' - ' + this.bytesToSize(file.size) } : null, (file.modified || file.hash) ? { type: 'span', href: '#', className: 'no-select modified', innerHTML: file.modified || file.hash } : null ] } ); } /** * https://stackoverflow.com/questions/15900485/correct-way-to-convert-size-in-bytes-to-kb-mb-gb-in-javascript **/ bytesToSize(bytes) { const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']; if (parseInt(bytes, 10) === 0) { return '0 Byte'; } let i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024))); return Math.round(bytes / Math.pow(1024, i), 2) + ' ' + sizes[i]; } };<|fim▁end|>
}, !file.directory && file.size ?
<|file_name|>SynIO.hpp<|end_file_name|><|fim▁begin|>/* * File: SynIO.hpp * Author: Matteo Di Carlo * Created on March 17, 2016, 9:05 PM * * Copyright (C) 2016 Matteo Di Carlo - www.gleeno.com * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License<|fim▁hole|> * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #ifndef SYNIO_HPP #define SYNIO_HPP #include <iostream> #include <fstream> #include "Synapsis/Common/Status.hpp" #include <json/json.h> class SynIO { public: status_t getTextFile(std::string* source, std::string* result); //todo: setTextFile status_t getJsonFile (std::string* source, Json::Value* result); static int toJson(void* source, Json::Value* result); }; #endif /* SYNIO_HPP */<|fim▁end|>
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>use super::*; #[test] fn from_vec() { let q = Queue::from(vec![1, 2, 3]); assert_eq!(&vec![1, 2, 3], q.vec()); } #[test] fn from_empty_vec() { let v: Vec<i64> = Vec::new(); let q = Queue::from(v.clone()); assert_eq!(&v, q.vec()); } #[test] fn queue() { let mut q = Queue::new(); assert_eq!(q.queue("hello"), Ok(1)); } #[test] fn dequeue() { let mut q = Queue::new(); q.queue("hello").unwrap(); assert_eq!(q.dequeue(), Some("hello")); } #[test] fn dequeue_empty() { let mut q: Queue<i64> = Queue::new(); assert_eq!(q.dequeue(), None); } #[test] fn no_capacity() { let q: Queue<u8> = Queue::new(); assert_eq!(q.capacity(), None); } #[test] fn some_capacity() { let q: Queue<u8> = Queue::with_capacity(12); assert_eq!(q.capacity(), Some(12)); } #[test] fn queue_full_capacity() { let mut q: Queue<u8> = Queue::with_capacity(0); assert_eq!(q.queue(3), Err(())); } #[test] fn vec_empty() { let q: Queue<i64> = Queue::new(); let v: Vec<i64> = Vec::new(); assert_eq!(&v, q.vec()); } #[test]<|fim▁hole|> q.queue(1).unwrap(); q.queue(2).unwrap(); q.queue(3).unwrap(); let v = vec![1, 2, 3]; assert_eq!(&v, q.vec()); } #[test] fn peek_at_empty() { let q: Queue<u8> = Queue::with_capacity(1); assert_eq!(q.peek(), None); } #[test] fn peek_at_something() { let mut q = Queue::new(); q.queue(1).unwrap(); assert_eq!(q.peek(), Some(1)); } #[test] fn force_queue_with_capacity() { let mut q: Queue<u8> = Queue::with_capacity(1); q.queue(1).unwrap(); let _ = q.force_queue(2); assert_eq!(q.peek(), Some(2)); } #[test] fn force_queue_with_capacity_check_size() { let mut q = Queue::with_capacity(1); q.queue(1).unwrap(); assert_eq!(1, q.force_queue(2)); } #[test] fn force_queue_no_capacity() { let mut q = Queue::new(); q.queue(1).unwrap(); assert_eq!(2, q.force_queue(2)); } #[test] fn no_len() { let q: Queue<u8> = Queue::new(); assert_eq!(q.len(), 0); } #[test] fn some_len() { let mut q = Queue::new(); q.queue(1).unwrap(); q.queue(2).unwrap(); assert_eq!(q.len(), 2); } #[test] fn is_empty_empty() { let q: Queue<i64> = Queue::new(); assert_eq!(q.is_empty(), true); } #[test] fn is_empty_some() { let mut q = Queue::new(); q.queue(1).unwrap(); assert_eq!(q.is_empty(), false); }<|fim▁end|>
fn vec_some() { let mut q = Queue::new();
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>declare module 'date-fns/end_of_month' { import {endOfMonth} from 'date-fns'<|fim▁hole|><|fim▁end|>
export = endOfMonth }
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>''' Decomposition ------------- The core of sector decomposition. This module implements the actual decomposition routines. Common ~~~~~~ This module collects routines that are used by multiple decompition modules. .. autoclass:: pySecDec.decomposition.Sector .. autofunction:: pySecDec.decomposition.squash_symmetry_redundant_sectors_sort .. autofunction:: pySecDec.decomposition.squash_symmetry_redundant_sectors_dreadnaut Iterative ~~~~~~~~~<|fim▁hole|>Geometric ~~~~~~~~~ .. automodule:: pySecDec.decomposition.geometric :members: Splitting ~~~~~~~~~ .. automodule:: pySecDec.decomposition.splitting :members: ''' from . import iterative, geometric, splitting from .common import *<|fim▁end|>
.. automodule:: pySecDec.decomposition.iterative :members:
<|file_name|>gradient_check.py<|end_file_name|><|fim▁begin|>import numpy as np from Other_samples.testCases import * from Other_samples.Gradient_check.gc_utils import sigmoid, relu, dictionary_to_vector, vector_to_dictionary, \ gradients_to_vector def forward_propagation(x, theta): """ Implement the linear forward propagation (compute J) presented in Figure 1 (J(theta) = theta * x) Arguments: x -- a real-valued input theta -- our parameter, a real number as well Returns: J -- the value of function J, computed using the formula J(theta) = theta * x """ J = theta * x return J x, theta = 2, 4 J = forward_propagation(x, theta) print("J = " + str(J)) def backward_propagation(x, theta): """ Computes the derivative of J with respect to theta (see Figure 1). Arguments: x -- a real-valued input theta -- our parameter, a real number as well Returns: dtheta -- the gradient of the cost with respect to theta """ dtheta = x return dtheta x, theta = 2, 4 dtheta = backward_propagation(x, theta) print("dtheta = " + str(dtheta)) def gradient_check(x, theta, epsilon=1e-7): """ Implement the backward propagation presented in Figure 1. Arguments: x -- a real-valued input theta -- our parameter, a real number as well epsilon -- tiny shift to the input to compute approximated gradient with formula(1) Returns: difference -- difference (2) between the approximated gradient and the backward propagation gradient """ thetaplus = theta + epsilon # Step 1 thetaminus = theta - epsilon # Step 2 J_plus = forward_propagation(x, thetaplus) # Step 3 J_minus = forward_propagation(x, thetaminus) # Step 4 gradapprox = (J_plus - J_minus) / (2 * epsilon) # Step 5 grad = backward_propagation(x, gradapprox) numerator = np.linalg.norm(grad - gradapprox) # Step 1' denominator = np.linalg.norm(grad) + np.linalg.norm(gradapprox) # Step 2' difference = numerator / denominator # Step 3' if difference < 1e-7: print("The gradient is correct!") else: print("The gradient is wrong!") return difference x, theta = 2, 4 difference = gradient_check(x, theta) print("difference = " + str(difference)) def forward_propagation_n(X, Y, parameters): """ Implements the forward propagation (and computes the cost) presented in Figure 3. Arguments: X -- training set for m examples Y -- labels for m examples parameters -- python dictionary containing your parameters "W1", "b1", "W2", "b2", "W3", "b3": W1 -- weight matrix of shape (5, 4) b1 -- bias vector of shape (5, 1) W2 -- weight matrix of shape (3, 5) b2 -- bias vector of shape (3, 1) W3 -- weight matrix of shape (1, 3) b3 -- bias vector of shape (1, 1) Returns: cost -- the cost function (logistic cost for one example) """ # retrieve parameters m = X.shape[1] W1 = parameters["W1"] b1 = parameters["b1"] W2 = parameters["W2"] b2 = parameters["b2"] W3 = parameters["W3"] b3 = parameters["b3"] # LINEAR -> RELU -> LINEAR -> RELU -> LINEAR -> SIGMOID Z1 = np.dot(W1, X) + b1 A1 = relu(Z1) Z2 = np.dot(W2, A1) + b2 A2 = relu(Z2) Z3 = np.dot(W3, A2) + b3 A3 = sigmoid(Z3) # Cost logprobs = np.multiply(-np.log(A3), Y) + np.multiply(-np.log(1 - A3), 1 - Y) cost = 1. / m * np.sum(logprobs) cache = (Z1, A1, W1, b1, Z2, A2, W2, b2, Z3, A3, W3, b3) return cost, cache def backward_propagation_n(X, Y, cache): """ Implement the backward propagation presented in figure 2. Arguments: X -- input datapoint, of shape (input size, 1) Y -- true "label" cache -- cache output from forward_propagation_n() Returns: gradients -- A dictionary with the gradients of the cost with respect to each parameter, activation and pre-activation variables. """ m = X.shape[1] (Z1, A1, W1, b1, Z2, A2, W2, b2, Z3, A3, W3, b3) = cache dZ3 = A3 - Y dW3 = 1. / m * np.dot(dZ3, A2.T) db3 = 1. / m * np.sum(dZ3, axis=1, keepdims=True) dA2 = np.dot(W3.T, dZ3) dZ2 = np.multiply(dA2, np.int64(A2 > 0)) dW2 = 1. / m * np.dot(dZ2, A1.T) db2 = 1. / m * np.sum(dZ2, axis=1, keepdims=True) dA1 = np.dot(W2.T, dZ2) dZ1 = np.multiply(dA1, np.int64(A1 > 0)) dW1 = 1. / m * np.dot(dZ1, X.T) db1 = 1. / m * np.sum(dZ1, axis=1, keepdims=True) gradients = {"dZ3": dZ3, "dW3": dW3, "db3": db3, "dA2": dA2, "dZ2": dZ2, "dW2": dW2, "db2": db2, "dA1": dA1, "dZ1": dZ1, "dW1": dW1, "db1": db1} return gradients def gradient_check_n(parameters, gradients, X, Y, epsilon=1e-7): """ Checks if backward_propagation_n computes correctly the gradient of the cost output by forward_propagation_n Arguments: parameters -- python dictionary containing your parameters "W1", "b1", "W2", "b2", "W3", "b3": grad -- output of backward_propagation_n, contains gradients of the cost with respect to the parameters. x -- input datapoint, of shape (input size, 1) y -- true "label" epsilon -- tiny shift to the input to compute approximated gradient with formula(1) Returns: difference -- difference (2) between the approximated gradient and the backward propagation gradient """ # Set-up variables parameters_values, _ = dictionary_to_vector(parameters) grad = gradients_to_vector(gradients) num_parameters = parameters_values.shape[0] J_plus = np.zeros((num_parameters, 1)) J_minus = np.zeros((num_parameters, 1)) gradapprox = np.zeros((num_parameters, 1)) # Compute gradapprox for i in range(num_parameters): thetaplus = np.copy(parameters_values) # Step 1 thetaplus[i][0] = thetaplus[i] + epsilon # Step 2 J_plus[i], _ = forward_propagation_n(X, Y, vector_to_dictionary(thetaplus)) # Step 3 thetaminus = np.copy(parameters_values) # Step 1 thetaminus[i][0] = thetaminus[i] - epsilon # Step 2 J_minus[i], _ = forward_propagation_n(X, Y, vector_to_dictionary(thetaminus)) # Step 3 gradapprox[i] = (J_plus[i] - J_minus[i]) / (2 * epsilon) numerator = np.linalg.norm(grad - gradapprox) # Step 1'<|fim▁hole|> difference = numerator / denominator # Step 3' if difference > 1e-7: print( "\033[93m" + "There is a mistake in the backward propagation! difference = " + str(difference) + "\033[0m") else: print( "\033[92m" + "Your backward propagation works perfectly fine! difference = " + str(difference) + "\033[0m") return difference X, Y, parameters = gradient_check_n_test_case() cost, cache = forward_propagation_n(X, Y, parameters) gradients = backward_propagation_n(X, Y, cache) difference = gradient_check_n(parameters, gradients, X, Y)<|fim▁end|>
denominator = np.linalg.norm(grad) + np.linalg.norm(gradapprox) # Step 2'
<|file_name|>compute_rmsd_pdb_files.py<|end_file_name|><|fim▁begin|>""" Routines to compute RMSD of all PROT_IND_ files These routines were developed by: Rodrigo Antonio Faccioli - [email protected] / [email protected] Leandro Oliveira Bortot - [email protected] / [email protected] """ import os import sys from collections import OrderedDict native = "1VII.pdb" path_gromacs ="/home/faccioli/Programs/gmx-4.6.5/no_mpi/bin/" main_command = "echo C-alpha C-alpha | @PATH_GROMACS@./g_rms -f @PROT@ -s @NATIVE@ -o temporary_rmsd.xvg 2>/dev/null" """ This function obtains all pdb files in mypath """ def get_PROT_IND_files_pdb(mypath): only_pdb_file = [] for root, dirs, files in os.walk(mypath): for file in files: #if file.endswith(".pdb"): if file.find("PROT_IND_") >=0: f_path = os.path.join(root,file) only_pdb_file.append(f_path) return only_pdb_file def main(): pdb_path = sys.argv[1] dict_rmsd = {} all_pdbs = get_PROT_IND_files_pdb(pdb_path) for pdb in all_pdbs: aux_command = main_command.replace("@PATH_GROMACS@", path_gromacs).replace("@PROT@",pdb).replace("@NATIVE@", native) os.system(aux_command) temp_rmsd = open("temporary_rmsd.xvg", "r") for line in temp_rmsd.readlines(): if line.find("@") < 0 and line.find("#") <0: rmsd_value = float(str(line).split()[1]) only_pdb_file_name = os.path.basename(pdb) dict_rmsd[only_pdb_file_name] = rmsd_value temp_rmsd.close() os.remove("temporary_rmsd.xvg") #Saving dictionary rmsd_final = open("all_rmsd.txt", "w") d_sorted_by_value = OrderedDict(sorted(dict_rmsd.items(), key=lambda x: x[1])) for key, value in d_sorted_by_value.items(): rmsd_final.write(str(key) +"\t" + str(value) + "\n") rmsd_final.close()<|fim▁hole|> main()<|fim▁end|>
<|file_name|>funcs.go<|end_file_name|><|fim▁begin|>package runners import ( "encoding/json" "fmt" "reflect" "strings" "text/template" "github.com/davecgh/go-spew/spew" "github.com/kylelemons/godebug/pretty" yaml "gopkg.in/yaml.v2" )<|fim▁hole|> return template.FuncMap{ "pretty": func(i interface{}) string { return pretty.Sprint(i) }, "json": func(i interface{}) string { json, _ := json.MarshalIndent(i, "", "\t") return string(json) }, "yaml": func(i interface{}) string { yaml, _ := yaml.Marshal(i) return string(yaml) }, "spew": func(i interface{}) string { return spew.Sprint(i) }, "describe": func(i interface{}) string { return describeStruct(i, 0) }, } } func describeStruct(t interface{}, depth int) string { prefix := strings.Repeat(" ", depth) var out string s := reflect.Indirect(reflect.ValueOf(t)) typeOfT := s.Type() for i := 0; i < s.NumField(); i++ { f := s.Field(i) out = fmt.Sprintf("%s%s%s %s\n", out, prefix, typeOfT.Field(i).Name, typeOfT.Field(i).Type) switch f.Type().Kind() { case reflect.Struct, reflect.Ptr: out = fmt.Sprintf("%s%s{\n", out, prefix) out = fmt.Sprintf("%s%s", out, describeStruct(f.Interface(), depth+1)) out = fmt.Sprintf("%s%s}\n", out, prefix) } } return out }<|fim▁end|>
func getFuncs() template.FuncMap {
<|file_name|>Interface.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Form implementation generated from reading ui file '11.ui' # # Created by: PyQt5 UI code generator 5.8.2 # # WARNING! All changes made in this file will be lost! from __future__ import print_function from keras.preprocessing import sequence from keras.models import Sequential<|fim▁hole|>from keras.layers import Conv1D, GlobalMaxPooling1D from sklearn.metrics import roc_curve, auc import matplotlib.pyplot as plt import numpy as np from numpy import vstack, row_stack, asarray from nltk.corpus import stopwords from nltk.tokenize import RegexpTokenizer from sklearn.feature_extraction.text import CountVectorizer from sklearn.cross_validation import train_test_split from pandas import read_csv from pymystem3 import Mystem from keras.preprocessing.sequence import pad_sequences from keras.preprocessing.text import Tokenizer from collections import Counter import Twarc import json import codecs import tweepy auth = tweepy.OAuthHandler('DkNRJdzhUnThKJ7G5C9IftdUp', 'C14fr0ews91xJI8AH1I3BNhZrZ2gdlyz9KqnXFPQOnmZqJUmc7') auth.set_access_token('866132837082296320-GRx4gxwbRVffxwXgMhjZhXbxgn4RaM0', 'rhtMycE2gFiJchJVIJtlEIf7qgkvqtCbmue9rPDoXEpkt') api = tweepy.API(auth) from PyQt5 import QtCore, QtGui, QtWidgets # Create a summary of a tweet, only showing relevant fields. def summarize(tweet, extra_fields = None): new_tweet = {} for field, value in tweet.items(): if field in ["text", "id_str", "screen_name", "retweet_count", "favorite_count", "in_reply_to_status_id_str", "in_reply_to_screen_name", "in_reply_to_user_id_str"] and value is not None: new_tweet[field] = value elif extra_fields and field in extra_fields: new_tweet[field] = value elif field in ["retweeted_status", "quoted_status", "user"]: new_tweet[field] = summarize(value) return new_tweet # Print out a tweet, with optional colorizing of selected fields. def dump(tweet, colorize_fields=None, summarize_tweet=True): colorize_field_strings = [] for line in json.dumps(summarize(tweet) if summarize_tweet else tweet, indent=4, sort_keys=True).splitlines(): colorize = False for colorize_field in colorize_fields or []: if "\"{}\":".format(colorize_field) in line: print("\x1b" + line + "\x1b") break else: print(line) tweet = list(t.hydrate(['']))[0] dump(summarize(tweet, extra_fields=['in_reply_to_status_id_str', 'in_reply_to_user_id']), colorize_fields=['in_reply_to_status_id', 'in_reply_to_status_id_str', 'in_reply_to_screen_name', 'in_reply_to_user_id', 'in_reply_to_user_id_str'], summarize_tweet=False) def stemconvtext(text): return(''.join(Mystem().lemmatize(text))) model = Sequential() # we start off with an efficient embedding layer which maps # our vocab indices into embedding_dims dimensions model.add(Embedding(max_features, embedding_dims, input_length=maxlen)) model.add(Dropout(0.2)) # we add a Convolution1D, which will learn filters # word group filters of size filter_length: model.add(Conv1D(filters, kernel_size, padding='valid', activation='linear', strides=1)) # we use max pooling: model.add(GlobalMaxPooling1D()) # We add a vanilla hidden layer: model.add(Dense(hidden_dims)) model.add(Dropout(0.2)) model.add(Activation('linear')) # We project onto a single unit output layer, and squash it with a sigmoid: model.add(Dense(1)) model.add(Activation('sigmoid')) class Ui_MainWindow(object): def load_tweet(self): tweet = api.get_status(self.plainTextEdit_2.toPlainText()) self.textBrowser_2.setPlainText(tweet.text) def setupUi(self, MainWindow): MainWindow.setObjectName("MainWindow") MainWindow.resize(911, 597) self.centralwidget = QtWidgets.QWidget(MainWindow) self.centralwidget.setObjectName("centralwidget") self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget) self.verticalLayout.setObjectName("verticalLayout") self.horizontalLayout = QtWidgets.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout") self.tabWidget = QtWidgets.QTabWidget(self.centralwidget) self.tabWidget.setObjectName("tabWidget") self.tab = QtWidgets.QWidget() self.tab.setObjectName("tab") self.layoutWidget = QtWidgets.QWidget(self.tab) self.layoutWidget.setGeometry(QtCore.QRect(510, 10, 371, 411)) self.layoutWidget.setObjectName("layoutWidget") self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.layoutWidget) self.verticalLayout_2.setContentsMargins(0, 0, 0, 0) self.verticalLayout_2.setObjectName("verticalLayout_2") self.formLayout = QtWidgets.QFormLayout() self.formLayout.setObjectName("formLayout") self.checkBox = QtWidgets.QCheckBox(self.layoutWidget) self.checkBox.setObjectName("checkBox") self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.checkBox) self.dateEdit_2 = QtWidgets.QDateEdit(self.layoutWidget) self.dateEdit_2.setMinimumDateTime(QtCore.QDateTime(QtCore.QDate(2000, 1, 1), QtCore.QTime(0, 0, 0))) self.dateEdit_2.setMaximumDate(QtCore.QDate(2017, 6, 30)) self.dateEdit_2.setObjectName("dateEdit_2") self.formLayout.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.dateEdit_2) self.dateEdit = QtWidgets.QDateEdit(self.layoutWidget) self.dateEdit.setDateTime(QtCore.QDateTime(QtCore.QDate(2017, 6, 15), QtCore.QTime(0, 0, 0))) self.dateEdit.setObjectName("dateEdit") self.formLayout.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.dateEdit) self.label_2 = QtWidgets.QLabel(self.layoutWidget) self.label_2.setObjectName("label_2") self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.label_2) self.spinBox = QtWidgets.QSpinBox(self.layoutWidget) self.spinBox.setMaximum(3) self.spinBox.setObjectName("spinBox") self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.spinBox) self.label = QtWidgets.QLabel(self.layoutWidget) self.label.setObjectName("label") self.formLayout.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.label) self.label_3 = QtWidgets.QLabel(self.layoutWidget) self.label_3.setObjectName("label_3") self.formLayout.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.label_3) self.verticalLayout_2.addLayout(self.formLayout) self.label_4 = QtWidgets.QLabel(self.layoutWidget) self.label_4.setObjectName("label_4") self.verticalLayout_2.addWidget(self.label_4) self.plainTextEdit_2 = QtWidgets.QPlainTextEdit(self.layoutWidget) self.plainTextEdit_2.setObjectName("plainTextEdit_2") self.verticalLayout_2.addWidget(self.plainTextEdit_2) self.label_5 = QtWidgets.QLabel(self.layoutWidget) self.label_5.setObjectName("label_5") self.verticalLayout_2.addWidget(self.label_5) self.textBrowser = QtWidgets.QTextBrowser(self.layoutWidget) self.textBrowser.setEnabled(True) self.textBrowser.setObjectName("textBrowser") self.verticalLayout_2.addWidget(self.textBrowser) self.horizontalLayout_2 = QtWidgets.QHBoxLayout() self.horizontalLayout_2.setObjectName("horizontalLayout_2") self.label_7 = QtWidgets.QLabel(self.layoutWidget) self.label_7.setObjectName("label_7") self.horizontalLayout_2.addWidget(self.label_7) self.lcdNumber_5 = QtWidgets.QLCDNumber(self.layoutWidget) self.lcdNumber_5.setProperty("intValue", 0) self.lcdNumber_5.setObjectName("lcdNumber_5") self.horizontalLayout_2.addWidget(self.lcdNumber_5) self.verticalLayout_2.addLayout(self.horizontalLayout_2) self.layoutWidget1 = QtWidgets.QWidget(self.tab) self.layoutWidget1.setGeometry(QtCore.QRect(0, 0, 481, 451)) self.layoutWidget1.setObjectName("layoutWidget1") self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.layoutWidget1) self.verticalLayout_3.setContentsMargins(1, 0, 0, 0) self.verticalLayout_3.setObjectName("verticalLayout_3") self.textBrowser_2 = QtWidgets.QTextBrowser(self.layoutWidget1) self.textBrowser_2.setEnabled(True) self.textBrowser_2.setObjectName("textBrowser_2") self.verticalLayout_3.addWidget(self.textBrowser_2) self.lcdNumber_4 = QtWidgets.QLCDNumber(self.layoutWidget1) self.lcdNumber_4.setProperty("intValue", 0) self.lcdNumber_4.setObjectName("lcdNumber_4") self.verticalLayout_3.addWidget(self.lcdNumber_4) self.tabWidget.addTab(self.tab, "") self.tab_2 = QtWidgets.QWidget() self.tab_2.setObjectName("tab_2") self.textBrowser_3 = QtWidgets.QTextBrowser(self.tab_2) self.textBrowser_3.setGeometry(QtCore.QRect(0, 0, 411, 431)) self.textBrowser_3.setObjectName("textBrowser_3") self.lcdNumber = QtWidgets.QLCDNumber(self.tab_2) self.lcdNumber.setEnabled(True) self.lcdNumber.setGeometry(QtCore.QRect(414, 14, 421, 31)) self.lcdNumber.setSmallDecimalPoint(False) self.lcdNumber.setProperty("intValue", 0) self.lcdNumber.setObjectName("lcdNumber") self.tabWidget.addTab(self.tab_2, "") self.tab_3 = QtWidgets.QWidget() self.tab_3.setObjectName("tab_3") self.textBrowser_4 = QtWidgets.QTextBrowser(self.tab_3) self.textBrowser_4.setGeometry(QtCore.QRect(0, 0, 411, 431)) self.textBrowser_4.setObjectName("textBrowser_4") self.lcdNumber_2 = QtWidgets.QLCDNumber(self.tab_3) self.lcdNumber_2.setEnabled(True) self.lcdNumber_2.setGeometry(QtCore.QRect(414, 14, 421, 31)) self.lcdNumber_2.setProperty("intValue", 0) self.lcdNumber_2.setObjectName("lcdNumber_2") self.tabWidget.addTab(self.tab_3, "") self.tab_4 = QtWidgets.QWidget() self.tab_4.setObjectName("tab_4") self.textBrowser_5 = QtWidgets.QTextBrowser(self.tab_4) self.textBrowser_5.setGeometry(QtCore.QRect(0, 0, 411, 431)) self.textBrowser_5.setObjectName("textBrowser_5") self.lcdNumber_3 = QtWidgets.QLCDNumber(self.tab_4) self.lcdNumber_3.setEnabled(True) self.lcdNumber_3.setGeometry(QtCore.QRect(414, 14, 421, 31)) self.lcdNumber_3.setProperty("intValue", 0) self.lcdNumber_3.setObjectName("lcdNumber_3") self.tabWidget.addTab(self.tab_4, "") self.horizontalLayout.addWidget(self.tabWidget) self.verticalLayout.addLayout(self.horizontalLayout) self.label_6 = QtWidgets.QLabel(self.centralwidget) self.label_6.setObjectName("label_6") self.verticalLayout.addWidget(self.label_6) self.pushButton = QtWidgets.QPushButton(self.centralwidget) self.pushButton.setObjectName("pushButton") self.verticalLayout.addWidget(self.pushButton) MainWindow.setCentralWidget(self.centralwidget) self.menubar = QtWidgets.QMenuBar(MainWindow) self.menubar.setGeometry(QtCore.QRect(0, 0, 911, 26)) self.menubar.setObjectName("menubar") MainWindow.setMenuBar(self.menubar) self.statusbar = QtWidgets.QStatusBar(MainWindow) self.statusbar.setObjectName("statusbar") MainWindow.setStatusBar(self.statusbar) self.pushButton.clicked.connect(self.load_tweet) self.retranslateUi(MainWindow) self.tabWidget.setCurrentIndex(0) QtCore.QMetaObject.connectSlotsByName(MainWindow) def retranslateUi(self, MainWindow): _translate = QtCore.QCoreApplication.translate MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow")) self.checkBox.setText(_translate("MainWindow", "Анализировать комментарии")) self.label_2.setText(_translate("MainWindow", "Количество комментариев")) self.label.setText(_translate("MainWindow", "верхняя граница даты")) self.label_3.setText(_translate("MainWindow", "нижняя граница даты")) self.label_4.setText(_translate("MainWindow", "Id на пост")) self.plainTextEdit_2.setPlainText(_translate("MainWindow", "")) self.label_5.setText(_translate("MainWindow", "Список первых трех комментариев выбранных по дате")) self.textBrowser.setHtml(_translate("MainWindow", "")) self.label_7.setText(_translate("MainWindow", "Средняя тональность всех комментариев ")) self.textBrowser_2.setHtml(_translate("MainWindow", "")) self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "Пост")) self.textBrowser_3.setHtml(_translate("MainWindow", "")) self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Первый комментарий")) self.textBrowser_4.setHtml(_translate("MainWindow", "")) self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_3), _translate("MainWindow", "Второй комментарий")) self.textBrowser_5.setHtml(_translate("MainWindow", "")) self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_4), _translate("MainWindow", "Третий комментарий")) self.label_6.setText(_translate("MainWindow", "Эмоциональная тональность от 0 - абсолютный негатив, до 100 - абсолютный позитив")) self.pushButton.setText(_translate("MainWindow", "Анализ")) if __name__ == "__main__": import sys app = QtWidgets.QApplication(sys.argv) MainWindow = QtWidgets.QMainWindow() ui = Ui_MainWindow() ui.setupUi(MainWindow) MainWindow.show() sys.exit(app.exec_())<|fim▁end|>
from keras.layers import Dense, SpatialDropout1D,Dropout, Activation from keras.layers import Embedding
<|file_name|>QSETPeriodicHullImpl.java<|end_file_name|><|fim▁begin|>/* The contents of this file are subject to the Health Level-7 Public * License Version 1.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the * License at http://www.hl7.org/HPL/ * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and * limitations under the License. * * The Original Code is all this file. * * The Initial Developer of the Original Code is . * Portions created by Initial Developer are Copyright (C) 2002-2004 * Health Level Seven, Inc. All Rights Reserved. * * Contributor(s): */ package org.hl7.types.impl; import org.hl7.types.ANY; import org.hl7.types.BL; import org.hl7.types.Criterion; import org.hl7.types.INT; import org.hl7.types.IVL; import org.hl7.types.PQ; import org.hl7.types.QSET; import org.hl7.types.QTY; import org.hl7.types.SET; import org.hl7.types.TS; /* * Result of periodic hull operation between two QSETs */ public class QSETPeriodicHullImpl<T extends QTY> extends QSETTermBase<T> implements QSET<T> { /* * The hull is considered the space (inclusive) of occurence intervals of _thisset and _thatset. Note: this is * different from the intervals of _thatset and _thisset. As of now, we assume that the two sets are interleaving */ QSET<T> _thisset; // occurs first QSET<T> _thatset; // occurs second @Override public String toString() { return _thisset.toString() + " .. " + _thatset.toString(); } public static QSETPeriodicHullImpl valueOf(final QSET thisset, final QSET thatset) { return new QSETPeriodicHullImpl(thisset, thatset); } private QSETPeriodicHullImpl(final QSET<T> thisset, final QSET<T> thatset) { _thisset = thisset; _thatset = thatset; } public QSET<T> getThisSet() { return _thisset; } public QSET<T> getThatSet() { return _thatset; } public BL contains(final T element) { return this.nextTo(element).low().lessOrEqual(element).and(element.lessOrEqual(this.nextTo(element).high())); } public BL contains(final SET<T> subset) { if (subset instanceof IVL) { final IVL<T> ivl = (IVL) subset; return this.contains(ivl.low()).and(this.contains(ivl.high())).and( this.nextTo(ivl.low()).equal(this.nextTo(ivl.high()))); } else { throw new UnsupportedOperationException(); } } public IVL<T> hull() { throw new UnsupportedOperationException(); } public IVL<T> nextTo(final T element) { IVL<T> thisIVL, thatIVL; thisIVL = _thisset.nextTo(element); thatIVL = _thatset.nextTo(element); if (thisIVL.low().lessOrEqual(element).isTrue()) { return IVLimpl.valueOf(thisIVL.lowClosed(), thisIVL.low(), thatIVL.high(), thatIVL.highClosed()); } else if (thatIVL.high().lessOrEqual(thisIVL.low()).isTrue()) { final PQ diff = (PQ) _thisset.nextAfter(thisIVL.low()).low().minus(thisIVL.low()); return IVLimpl.valueOf(thisIVL.lowClosed(), (T) ((TS) thisIVL.low()).minus(diff), thatIVL.high(), thatIVL .highClosed()); } else { return IVLimpl.valueOf(thisIVL.lowClosed(), thisIVL.low(), thatIVL.high(), thatIVL.highClosed()); } } public IVL<T> nextAfter(final T element) { final IVL<T> ans = this.nextTo(element); if (element.lessOrEqual(ans.low()).isTrue()) { return ans; } else { // we have to get the next ans final IVL<T> thisIVL = _thisset.nextAfter(ans.high()); final IVL<T> thatIVL = _thatset.nextAfter(ans.high()); return IVLimpl.valueOf(thisIVL.lowClosed(), thisIVL.low(), thatIVL.high(), thatIVL.highClosed()); } } public BL interleaves(final QSET<T> otherset) { throw new UnsupportedOperationException(); } @Override public BL equal(final ANY that) { throw new UnsupportedOperationException(); } public INT cardinality() { throw new UnsupportedOperationException(); } public BL isEmpty() { return _thisset.isEmpty().and(_thatset.isEmpty()); } public T any() { throw new UnsupportedOperationException(); } public SET<T> select(final Criterion c) {<|fim▁hole|><|fim▁end|>
throw new UnsupportedOperationException(); } }
<|file_name|>debug_endpoint.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import import logging import os import sys import datetime import psutil from six import StringIO from twisted.web import http, resource from Tribler.Core.Utilities.instrumentation import WatchDog import Tribler.Core.Utilities.json_util as json HAS_MELIAE = True try: from meliae import scanner except ImportError: HAS_MELIAE = False class MemoryDumpBuffer(StringIO): """ Meliae expects its file handle to support write(), flush() and __call__(). The StringIO class does not support __call__(), therefore we provide this subclass. """ def __call__(self, s): StringIO.write(self, s) class DebugEndpoint(resource.Resource): """ This endpoint is responsible for handing requests regarding debug information in Tribler. """ def __init__(self, session): resource.Resource.__init__(self) child_handler_dict = {"circuits": DebugCircuitsEndpoint, "open_files": DebugOpenFilesEndpoint, "open_sockets": DebugOpenSocketsEndpoint, "threads": DebugThreadsEndpoint, "cpu": DebugCPUEndpoint, "memory": DebugMemoryEndpoint, "log": DebugLogEndpoint, "profiler": DebugProfilerEndpoint} for path, child_cls in child_handler_dict.iteritems(): self.putChild(path, child_cls(session)) class DebugCircuitsEndpoint(resource.Resource): """ This class handles requests regarding the tunnel community debug information. """ def __init__(self, session): resource.Resource.__init__(self) self.session = session self.putChild("slots", DebugCircuitSlotsEndpoint(session)) def render_GET(self, request): """ .. http:get:: /debug/circuits A GET request to this endpoint returns information about the built circuits in the tunnel community. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/circuits **Example response**: .. sourcecode:: javascript { "circuits": [{ "id": 1234, "state": "EXTENDING", "goal_hops": 4, "bytes_up": 45, "bytes_down": 49, "created": 1468176257, "hops": [{ "host": "unknown" }, { "host": "39.95.147.20:8965" }], ... }, ...] } """ tunnel_community = self.session.lm.tunnel_community if not tunnel_community: request.setResponseCode(http.NOT_FOUND) return json.dumps({"error": "tunnel community not found"}) circuits_json = [] for circuit_id, circuit in tunnel_community.circuits.iteritems(): item = {'id': circuit_id, 'state': str(circuit.state), 'goal_hops': circuit.goal_hops, 'bytes_up': circuit.bytes_up, 'bytes_down': circuit.bytes_down, 'created': circuit.creation_time} hops_array = [] for hop in circuit.hops: hops_array.append({'host': 'unknown' if 'UNKNOWN HOST' in hop.host else '%s:%s' % (hop.host, hop.port)}) item['hops'] = hops_array circuits_json.append(item) return json.dumps({'circuits': circuits_json}) class DebugCircuitSlotsEndpoint(resource.Resource): """ This class handles requests for information about slots in the tunnel overlay. """ def __init__(self, session): resource.Resource.__init__(self) self.session = session def render_GET(self, request): """ .. http:get:: /debug/circuits/slots A GET request to this endpoint returns information about the slots in the tunnel overlay. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/circuits/slots **Example response**: .. sourcecode:: javascript { "open_files": [{ "path": "path/to/open/file.txt", "fd": 33, }, ...] } """ return json.dumps({ "slots": { "random": self.session.lm.tunnel_community.random_slots, "competing": self.session.lm.tunnel_community.competing_slots } }) class DebugOpenFilesEndpoint(resource.Resource): """ This class handles request for information about open files. """ def __init__(self, session): resource.Resource.__init__(self) self.session = session def render_GET(self, request): """ .. http:get:: /debug/open_files A GET request to this endpoint returns information about files opened by Tribler. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/open_files **Example response**: .. sourcecode:: javascript { "open_files": [{ "path": "path/to/open/file.txt", "fd": 33, }, ...] } """ my_process = psutil.Process() return json.dumps({ "open_files": [{"path": open_file.path, "fd": open_file.fd} for open_file in my_process.open_files()]}) class DebugOpenSocketsEndpoint(resource.Resource): """ This class handles request for information about open sockets. """ def __init__(self, session): resource.Resource.__init__(self) self.session = session def render_GET(self, request): """ .. http:get:: /debug/open_sockets A GET request to this endpoint returns information about open sockets. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/openfiles **Example response**: .. sourcecode:: javascript { "open_sockets": [{ "family": 2, "status": "ESTABLISHED", "laddr": "0.0.0.0:0", "raddr": "0.0.0.0:0", "type": 30 }, ...] } """ my_process = psutil.Process() sockets = [] for open_socket in my_process.connections(): sockets.append({ "family": open_socket.family, "status": open_socket.status, "laddr": ("%s:%d" % open_socket.laddr) if open_socket.laddr else "-", "raddr": ("%s:%d" % open_socket.raddr) if open_socket.raddr else "-", "type": open_socket.type }) return json.dumps({"open_sockets": sockets}) class DebugThreadsEndpoint(resource.Resource): """ This class handles request for information about threads. """ def __init__(self, session): resource.Resource.__init__(self) self.session = session def render_GET(self, request): """ .. http:get:: /debug/threads A GET request to this endpoint returns information about running threads. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/threads **Example response**: .. sourcecode:: javascript { "threads": [{ "thread_id": 123456, "thread_name": "my_thread", "frames": ["my_frame", ...] }, ...] } """ watchdog = WatchDog() return json.dumps({"threads": watchdog.get_threads_info()}) class DebugCPUEndpoint(resource.Resource): """ This class handles request for information about CPU. """ def __init__(self, session): resource.Resource.__init__(self) self.putChild("history", DebugCPUHistoryEndpoint(session)) class DebugCPUHistoryEndpoint(resource.Resource): """ This class handles request for information about CPU usage history. """ def __init__(self, session): resource.Resource.__init__(self) self.session = session def render_GET(self, request): """ .. http:get:: /debug/cpu/history A GET request to this endpoint returns information about CPU usage history in the form of a list. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/cpu/history **Example response**: .. sourcecode:: javascript { "cpu_history": [{ "time": 1504015291214, "cpu": 3.4, }, ...] } """ history = self.session.lm.resource_monitor.get_cpu_history_dict() if self.session.lm.resource_monitor else {} return json.dumps({"cpu_history": history}) class DebugMemoryEndpoint(resource.Resource): """ This class handles request for information about memory. """ def __init__(self, session): resource.Resource.__init__(self) self.putChild("history", DebugMemoryHistoryEndpoint(session)) if HAS_MELIAE: self.putChild("dump", DebugMemoryDumpEndpoint(session)) class DebugMemoryHistoryEndpoint(resource.Resource): """ This class handles request for information about memory usage history. """ def __init__(self, session): resource.Resource.__init__(self) self.session = session def render_GET(self, request): """ .. http:get:: /debug/memory/history A GET request to this endpoint returns information about memory usage history in the form of a list. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/memory/history **Example response**: .. sourcecode:: javascript { "memory_history": [{ "time": 1504015291214, "mem": 324324, }, ...] } """ history = self.session.lm.resource_monitor.get_memory_history_dict() if self.session.lm.resource_monitor else {} return json.dumps({"memory_history": history}) class DebugMemoryDumpEndpoint(resource.Resource): """ This class handles request for dumping memory contents. """ def __init__(self, session): resource.Resource.__init__(self) self.session = session def render_GET(self, request): """ .. http:get:: /debug/memory/dump A GET request to this endpoint returns a Meliae-compatible dump of the memory contents. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/memory/dump **Example response**: The content of the memory dump file. """ content = "" if sys.platform == "win32": # On Windows meliae (especially older versions) segfault on writing to file dump_buffer = MemoryDumpBuffer() try: scanner.dump_all_objects(dump_buffer) except OverflowError as e: # https://bugs.launchpad.net/meliae/+bug/569947 logging.error("meliae dump failed (your version may be too old): %s", str(e)) content = dump_buffer.getvalue() dump_buffer.close() else: # On other platforms, simply writing to file is much faster dump_file_path = os.path.join(self.session.config.get_state_dir(), 'memory_dump.json') scanner.dump_all_objects(dump_file_path) with open(dump_file_path, 'r') as dump_file: content = dump_file.read() date_str = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") request.setHeader(b'content-type', 'application/json') request.setHeader(b'Content-Disposition', 'attachment; filename=tribler_memory_dump_%s.json' % date_str) return content class DebugLogEndpoint(resource.Resource): """ This class handles the request for displaying the logs. """ def __init__(self, session): resource.Resource.__init__(self) self.session = session def render_GET(self, request): """ .. http:get:: /debug/log?process=<core|gui>&max_lines=<max_lines> A GET request to this endpoint returns a json with content of core or gui log file & max_lines requested **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/log?process=core&max_lines=5 **Example response**: A JSON with content of the log file & max_lines requested, for eg. { "max_lines" : 5, "content" :"INFO 1506675301.76 sqlitecachedb:181 Reading database version... INFO 1506675301.76 sqlitecachedb:185 Current database version is 29 INFO 1506675301.76 sqlitecachedb:203 Beginning the first transaction... INFO 1506675301.76 upgrade:93 tribler is in the latest version,... INFO 1506675302.08 LaunchManyCore:254 lmc: Starting Dispersy..." } """ # First, flush all the logs to make sure it is written to file for handler in logging.getLogger().handlers: handler.flush() # Get the location of log file param_process = request.args['process'][0] if request.args['process'] else 'core' log_file_name = os.path.join(self.session.config.get_log_dir(), 'tribler-%s-info.log' % param_process) # Default response response = {'content': '', 'max_lines': 0} # Check if log file exists and return last requested 'max_lines' of log if os.path.exists(log_file_name): try: max_lines = int(request.args['max_lines'][0]) with open(log_file_name, 'r') as log_file: response['content'] = self.tail(log_file, max_lines) response['max_lines'] = max_lines except ValueError: with open(log_file_name, 'r') as log_file: response['content'] = self.tail(log_file, 100) # default 100 lines response['max_lines'] = 0 return json.dumps(response) def tail(self, file_handler, lines=1): """Tail a file and get X lines from the end""" # place holder for the lines found lines_found = [] byte_buffer = 1024 # block counter will be multiplied by buffer # to get the block size from the end block_counter = -1 # loop until we find X lines while len(lines_found) < lines: try: file_handler.seek(block_counter * byte_buffer, os.SEEK_END) except IOError: # either file is too small, or too many lines requested file_handler.seek(0) lines_found = file_handler.readlines() break lines_found = file_handler.readlines() # we found enough lines, get out if len(lines_found) > lines: break # decrement the block counter to get the # next X bytes block_counter -= 1 return ''.join(lines_found[-lines:]) class DebugProfilerEndpoint(resource.Resource): """ This class handles requests for the profiler. """ def __init__(self, session): resource.Resource.__init__(self) self.session = session def render_GET(self, request): """ .. http:get:: /debug/profiler A GET request to this endpoint returns information about the state of the profiler. This state is either STARTED or STOPPED. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/profiler **Example response**: .. sourcecode:: javascript { "state": "STARTED" } """ monitor_enabled = self.session.config.get_resource_monitor_enabled() state = "STARTED" if (monitor_enabled and self.session.lm.resource_monitor.profiler_running) else "STOPPED" return json.dumps({"state": state}) def render_PUT(self, request): """ .. http:put:: /debug/profiler A PUT request to this endpoint starts the profiler. **Example request**: .. sourcecode:: none curl -X PUT http://localhost:8085/debug/profiler **Example response**: .. sourcecode:: javascript { "success": "true" } """ self.session.lm.resource_monitor.start_profiler() return json.dumps({"success": True}) def render_DELETE(self, request): """ .. http:delete:: /debug/profiler A PUT request to this endpoint stops the profiler.<|fim▁hole|> **Example request**: .. sourcecode:: none curl -X DELETE http://localhost:8085/debug/profiler **Example response**: .. sourcecode:: javascript { "success": "true" } """ file_path = self.session.lm.resource_monitor.stop_profiler() return json.dumps({"success": True, "profiler_file": file_path})<|fim▁end|>
<|file_name|>HiveWriteUtils.java<|end_file_name|><|fim▁begin|>/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.hive.HdfsEnvironment.HdfsContext; import com.facebook.presto.hive.metastore.Database; import com.facebook.presto.hive.metastore.Partition; import com.facebook.presto.hive.metastore.SemiTransactionalHiveMetastore; import com.facebook.presto.hive.metastore.Storage; import com.facebook.presto.hive.metastore.Table; import com.facebook.presto.hive.s3.PrestoS3FileSystem; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.SchemaNotFoundException; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.StandardErrorCode; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.type.BigintType; import com.facebook.presto.spi.type.BooleanType; import com.facebook.presto.spi.type.CharType; import com.facebook.presto.spi.type.DateType; import com.facebook.presto.spi.type.DecimalType; import com.facebook.presto.spi.type.Decimals; import com.facebook.presto.spi.type.DoubleType; import com.facebook.presto.spi.type.IntegerType; import com.facebook.presto.spi.type.RealType; import com.facebook.presto.spi.type.SmallintType; import com.facebook.presto.spi.type.TimestampType; import com.facebook.presto.spi.type.TinyintType; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.VarbinaryType; import com.facebook.presto.spi.type.VarcharType; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Shorts; import com.google.common.primitives.SignedBytes; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FilterFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.ProtectMode; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.Serializer; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.ByteWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reporter; import org.joda.time.DateTimeZone; import java.io.IOException; import java.math.BigInteger; import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.concurrent.TimeUnit; import static com.facebook.presto.hive.HiveErrorCode.HIVE_DATABASE_LOCATION_ERROR; import static com.facebook.presto.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR; import static com.facebook.presto.hive.HiveErrorCode.HIVE_WRITER_DATA_ERROR; import static com.facebook.presto.hive.HiveUtil.checkCondition; import static com.facebook.presto.hive.HiveUtil.isArrayType; import static com.facebook.presto.hive.HiveUtil.isMapType; import static com.facebook.presto.hive.HiveUtil.isRowType; import static com.facebook.presto.hive.metastore.MetastoreUtil.getProtectMode; import static com.facebook.presto.hive.metastore.MetastoreUtil.verifyOnline; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static com.facebook.presto.spi.type.Chars.isCharType; import static com.google.common.base.Strings.padEnd; import static java.lang.Float.intBitsToFloat; import static java.lang.Math.toIntExact; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.UUID.randomUUID; import static java.util.stream.Collectors.toList; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.COMPRESSRESULT; import static org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBinaryObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBooleanObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableByteObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDoubleObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableFloatObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableIntObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableShortObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; import static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo; import static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getVarcharTypeInfo; import static org.joda.time.DateTimeZone.UTC; public final class HiveWriteUtils { @SuppressWarnings("OctalInteger") private static final FsPermission ALL_PERMISSIONS = new FsPermission((short) 0777); private HiveWriteUtils() { } public static RecordWriter createRecordWriter(Path target, JobConf conf, Properties properties, String outputFormatName) { try { boolean compress = HiveConf.getBoolVar(conf, COMPRESSRESULT); Object writer = Class.forName(outputFormatName).getConstructor().newInstance(); return ((HiveOutputFormat<?, ?>) writer).getHiveRecordWriter(conf, target, Text.class, compress, properties, Reporter.NULL); } catch (IOException | ReflectiveOperationException e) { throw new PrestoException(HIVE_WRITER_DATA_ERROR, e); } } @SuppressWarnings("deprecation") public static Serializer initializeSerializer(Configuration conf, Properties properties, String serializerName) { try { Serializer result = (Serializer) Class.forName(serializerName).getConstructor().newInstance(); result.initialize(conf, properties); return result; } catch (SerDeException | ReflectiveOperationException e) { throw Throwables.propagate(e); } } public static ObjectInspector getJavaObjectInspector(Type type) { if (type.equals(BooleanType.BOOLEAN)) { return javaBooleanObjectInspector; } else if (type.equals(BigintType.BIGINT)) { return javaLongObjectInspector; } else if (type.equals(IntegerType.INTEGER)) { return javaIntObjectInspector; } else if (type.equals(SmallintType.SMALLINT)) { return javaShortObjectInspector; } else if (type.equals(TinyintType.TINYINT)) { return javaByteObjectInspector; } else if (type.equals(RealType.REAL)) { return javaFloatObjectInspector; } else if (type.equals(DoubleType.DOUBLE)) { return javaDoubleObjectInspector; } else if (type instanceof VarcharType) { return writableStringObjectInspector; } else if (type instanceof CharType) { return writableHiveCharObjectInspector; } else if (type.equals(VarbinaryType.VARBINARY)) { return javaByteArrayObjectInspector; } else if (type.equals(DateType.DATE)) { return javaDateObjectInspector; } else if (type.equals(TimestampType.TIMESTAMP)) { return javaTimestampObjectInspector; } else if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return getPrimitiveJavaObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale())); } else if (isArrayType(type)) { return ObjectInspectorFactory.getStandardListObjectInspector(getJavaObjectInspector(type.getTypeParameters().get(0))); } else if (isMapType(type)) { ObjectInspector keyObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(0)); ObjectInspector valueObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(1)); return ObjectInspectorFactory.getStandardMapObjectInspector(keyObjectInspector, valueObjectInspector); } else if (isRowType(type)) { return ObjectInspectorFactory.getStandardStructObjectInspector( type.getTypeSignature().getParameters().stream() .map(parameter -> parameter.getNamedTypeSignature().getName()) .collect(toList()), type.getTypeParameters().stream() .map(HiveWriteUtils::getJavaObjectInspector) .collect(toList())); } throw new IllegalArgumentException("unsupported type: " + type); } public static Object getField(Type type, Block block, int position) { if (block.isNull(position)) { return null; } if (BooleanType.BOOLEAN.equals(type)) { return type.getBoolean(block, position); } if (BigintType.BIGINT.equals(type)) { return type.getLong(block, position); } if (IntegerType.INTEGER.equals(type)) { return (int) type.getLong(block, position); } if (SmallintType.SMALLINT.equals(type)) { return (short) type.getLong(block, position); } if (TinyintType.TINYINT.equals(type)) { return (byte) type.getLong(block, position); } if (RealType.REAL.equals(type)) { return intBitsToFloat((int) type.getLong(block, position)); } if (DoubleType.DOUBLE.equals(type)) { return type.getDouble(block, position); } if (type instanceof VarcharType) { return new Text(type.getSlice(block, position).getBytes()); } if (type instanceof CharType) { CharType charType = (CharType) type; return new Text(padEnd(type.getSlice(block, position).toStringUtf8(), charType.getLength(), ' ')); } if (VarbinaryType.VARBINARY.equals(type)) { return type.getSlice(block, position).getBytes(); } if (DateType.DATE.equals(type)) { long days = type.getLong(block, position); return new Date(UTC.getMillisKeepLocal(DateTimeZone.getDefault(), TimeUnit.DAYS.toMillis(days))); } if (TimestampType.TIMESTAMP.equals(type)) { long millisUtc = type.getLong(block, position); return new Timestamp(millisUtc); } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return getHiveDecimal(decimalType, block, position); } if (isArrayType(type)) { Type elementType = type.getTypeParameters().get(0); Block arrayBlock = block.getObject(position, Block.class); List<Object> list = new ArrayList<>(arrayBlock.getPositionCount()); for (int i = 0; i < arrayBlock.getPositionCount(); i++) { Object element = getField(elementType, arrayBlock, i); list.add(element); } return Collections.unmodifiableList(list); } if (isMapType(type)) { Type keyType = type.getTypeParameters().get(0); Type valueType = type.getTypeParameters().get(1); Block mapBlock = block.getObject(position, Block.class); Map<Object, Object> map = new HashMap<>(); for (int i = 0; i < mapBlock.getPositionCount(); i += 2) { Object key = getField(keyType, mapBlock, i); Object value = getField(valueType, mapBlock, i + 1); map.put(key, value); } return Collections.unmodifiableMap(map); } if (isRowType(type)) { Block rowBlock = block.getObject(position, Block.class); List<Type> fieldTypes = type.getTypeParameters(); checkCondition(fieldTypes.size() == rowBlock.getPositionCount(), StandardErrorCode.GENERIC_INTERNAL_ERROR, "Expected row value field count does not match type field count"); List<Object> row = new ArrayList<>(rowBlock.getPositionCount()); for (int i = 0; i < rowBlock.getPositionCount(); i++) { Object element = getField(fieldTypes.get(i), rowBlock, i); row.add(element); } return Collections.unmodifiableList(row); } throw new PrestoException(NOT_SUPPORTED, "unsupported type: " + type); } public static void checkTableIsWritable(Table table, boolean writesToNonManagedTablesEnabled) { if (!writesToNonManagedTablesEnabled && !table.getTableType().equals(MANAGED_TABLE.toString())) { throw new PrestoException(NOT_SUPPORTED, "Cannot write to non-managed Hive table"); } checkWritable( new SchemaTableName(table.getDatabaseName(), table.getTableName()), Optional.empty(), getProtectMode(table), table.getParameters(), table.getStorage()); } public static void checkPartitionIsWritable(String partitionName, Partition partition) { checkWritable( new SchemaTableName(partition.getDatabaseName(), partition.getTableName()), Optional.of(partitionName), getProtectMode(partition), partition.getParameters(), partition.getStorage()); } private static void checkWritable( SchemaTableName tableName, Optional<String> partitionName, ProtectMode protectMode, Map<String, String> parameters, Storage storage) { String tablePartitionDescription = "Table '" + tableName + "'"; if (partitionName.isPresent()) { tablePartitionDescription += " partition '" + partitionName.get() + "'"; } // verify online verifyOnline(tableName, partitionName, protectMode, parameters); // verify not read only if (protectMode.readOnly) { throw new HiveReadOnlyException(tableName, partitionName); } // verify sorting if (storage.isSorted()) { throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed sorted tables is not supported. %s", tablePartitionDescription)); } // verify skew info if (storage.isSkewed()) { throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed tables with skew is not supported. %s", tablePartitionDescription)); } } public static Path getTableDefaultLocation(HdfsContext context, SemiTransactionalHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, String schemaName, String tableName) { Optional<String> location = getDatabase(metastore, schemaName).getLocation(); if (!location.isPresent() || location.get().isEmpty()) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location is not set", schemaName)); } Path databasePath = new Path(location.get()); if (!isS3FileSystem(context, hdfsEnvironment, databasePath)) { if (!pathExists(context, hdfsEnvironment, databasePath)) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location does not exist: %s", schemaName, databasePath)); } if (!isDirectory(context, hdfsEnvironment, databasePath)) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location is not a directory: %s", schemaName, databasePath)); } } return new Path(databasePath, tableName); } private static Database getDatabase(SemiTransactionalHiveMetastore metastore, String database) { return metastore.getDatabase(database).orElseThrow(() -> new SchemaNotFoundException(database)); } public static boolean pathExists(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { return hdfsEnvironment.getFileSystem(context, path).exists(path); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } public static boolean isS3FileSystem(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { return getRawFileSystem(hdfsEnvironment.getFileSystem(context, path)) instanceof PrestoS3FileSystem; } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } public static boolean isViewFileSystem(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { // Hadoop 1.x does not have the ViewFileSystem class return getRawFileSystem(hdfsEnvironment.getFileSystem(context, path)) .getClass().getName().equals("org.apache.hadoop.fs.viewfs.ViewFileSystem"); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } private static FileSystem getRawFileSystem(FileSystem fileSystem) { if (fileSystem instanceof FilterFileSystem) { return getRawFileSystem(((FilterFileSystem) fileSystem).getRawFileSystem()); } return fileSystem; } private static boolean isDirectory(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { return hdfsEnvironment.getFileSystem(context, path).isDirectory(path); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } public static Path createTemporaryPath(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path targetPath) { // use a per-user temporary directory to avoid permission problems String temporaryPrefix = "/tmp/presto-" + context.getIdentity().getUser(); // use relative temporary directory on ViewFS if (isViewFileSystem(context, hdfsEnvironment, targetPath)) { temporaryPrefix = ".hive-staging"; } // create a temporary directory on the same filesystem Path temporaryRoot = new Path(targetPath, temporaryPrefix); Path temporaryPath = new Path(temporaryRoot, randomUUID().toString()); createDirectory(context, hdfsEnvironment, temporaryPath); return temporaryPath; } public static void createDirectory(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { if (!hdfsEnvironment.getFileSystem(context, path).mkdirs(path, ALL_PERMISSIONS)) { throw new IOException("mkdirs returned false"); } } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed to create directory: " + path, e); } // explicitly set permission since the default umask overrides it on creation try { hdfsEnvironment.getFileSystem(context, path).setPermission(path, ALL_PERMISSIONS); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed to set permission on directory: " + path, e); } } public static boolean isWritableType(HiveType hiveType) { return isWritableType(hiveType.getTypeInfo()); } private static boolean isWritableType(TypeInfo typeInfo) { switch (typeInfo.getCategory()) { case PRIMITIVE: PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory(); return isWritablePrimitiveType(primitiveCategory); case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; return isWritableType(mapTypeInfo.getMapKeyTypeInfo()) && isWritableType(mapTypeInfo.getMapValueTypeInfo()); case LIST: ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo; return isWritableType(listTypeInfo.getListElementTypeInfo()); case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; return structTypeInfo.getAllStructFieldTypeInfos().stream().allMatch(HiveWriteUtils::isWritableType); } return false; } private static boolean isWritablePrimitiveType(PrimitiveCategory primitiveCategory) { switch (primitiveCategory) { case BOOLEAN: case LONG: case INT: case SHORT: case BYTE: case FLOAT: case DOUBLE: case STRING: case DATE: case TIMESTAMP: case BINARY: case DECIMAL: case VARCHAR: case CHAR: return true; } return false; } public static List<ObjectInspector> getRowColumnInspectors(List<Type> types) { return types.stream() .map(HiveWriteUtils::getRowColumnInspector) .collect(toList()); } public static ObjectInspector getRowColumnInspector(Type type) { if (type.equals(BooleanType.BOOLEAN)) { return writableBooleanObjectInspector; } if (type.equals(BigintType.BIGINT)) { return writableLongObjectInspector; } if (type.equals(IntegerType.INTEGER)) { return writableIntObjectInspector; } if (type.equals(SmallintType.SMALLINT)) { return writableShortObjectInspector; } if (type.equals(TinyintType.TINYINT)) { return writableByteObjectInspector; } if (type.equals(RealType.REAL)) { return writableFloatObjectInspector; } if (type.equals(DoubleType.DOUBLE)) { return writableDoubleObjectInspector; } if (type instanceof VarcharType) { VarcharType varcharType = (VarcharType) type; int varcharLength = varcharType.getLength(); // VARCHAR columns with the length less than or equal to 65535 are supported natively by Hive if (varcharLength <= HiveVarchar.MAX_VARCHAR_LENGTH) { return getPrimitiveWritableObjectInspector(getVarcharTypeInfo(varcharLength)); } // Unbounded VARCHAR is not supported by Hive. // Values for such columns must be stored as STRING in Hive else if (varcharLength == VarcharType.UNBOUNDED_LENGTH) { return writableStringObjectInspector; } } if (isCharType(type)) { CharType charType = (CharType) type; int charLength = charType.getLength(); return getPrimitiveWritableObjectInspector(getCharTypeInfo(charLength)); } if (type.equals(VarbinaryType.VARBINARY)) { return writableBinaryObjectInspector; } if (type.equals(DateType.DATE)) { return writableDateObjectInspector; } if (type.equals(TimestampType.TIMESTAMP)) { return writableTimestampObjectInspector; } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return getPrimitiveWritableObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale())); } if (isArrayType(type) || isMapType(type) || isRowType(type)) { return getJavaObjectInspector(type); } throw new IllegalArgumentException("unsupported type: " + type); } public static FieldSetter createFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type) { if (type.equals(BooleanType.BOOLEAN)) { return new BooleanFieldSetter(rowInspector, row, field); } if (type.equals(BigintType.BIGINT)) { return new BigintFieldBuilder(rowInspector, row, field); } if (type.equals(IntegerType.INTEGER)) { return new IntFieldSetter(rowInspector, row, field); } if (type.equals(SmallintType.SMALLINT)) { return new SmallintFieldSetter(rowInspector, row, field); } if (type.equals(TinyintType.TINYINT)) { return new TinyintFieldSetter(rowInspector, row, field); } if (type.equals(RealType.REAL)) { return new FloatFieldSetter(rowInspector, row, field); } if (type.equals(DoubleType.DOUBLE)) { return new DoubleFieldSetter(rowInspector, row, field); } if (type instanceof VarcharType) { return new VarcharFieldSetter(rowInspector, row, field, type); } if (type instanceof CharType) { return new CharFieldSetter(rowInspector, row, field, type); } if (type.equals(VarbinaryType.VARBINARY)) { return new BinaryFieldSetter(rowInspector, row, field); } if (type.equals(DateType.DATE)) { return new DateFieldSetter(rowInspector, row, field); } if (type.equals(TimestampType.TIMESTAMP)) { return new TimestampFieldSetter(rowInspector, row, field); } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return new DecimalFieldSetter(rowInspector, row, field, decimalType); } if (isArrayType(type)) { return new ArrayFieldSetter(rowInspector, row, field, type.getTypeParameters().get(0)); } if (isMapType(type)) { return new MapFieldSetter(rowInspector, row, field, type.getTypeParameters().get(0), type.getTypeParameters().get(1)); } if (isRowType(type)) { return new RowFieldSetter(rowInspector, row, field, type.getTypeParameters()); } throw new IllegalArgumentException("unsupported type: " + type); } public abstract static class FieldSetter { protected final SettableStructObjectInspector rowInspector; protected final Object row; protected final StructField field; protected FieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { this.rowInspector = requireNonNull(rowInspector, "rowInspector is null"); this.row = requireNonNull(row, "row is null"); this.field = requireNonNull(field, "field is null"); } public abstract void setField(Block block, int position); } private static class BooleanFieldSetter extends FieldSetter { private final BooleanWritable value = new BooleanWritable(); public BooleanFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(BooleanType.BOOLEAN.getBoolean(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class BigintFieldBuilder extends FieldSetter { private final LongWritable value = new LongWritable(); public BigintFieldBuilder(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(BigintType.BIGINT.getLong(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class IntFieldSetter extends FieldSetter { private final IntWritable value = new IntWritable(); public IntFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(toIntExact(IntegerType.INTEGER.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class SmallintFieldSetter extends FieldSetter { private final ShortWritable value = new ShortWritable(); public SmallintFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(Shorts.checkedCast(SmallintType.SMALLINT.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class TinyintFieldSetter extends FieldSetter { private final ByteWritable value = new ByteWritable(); public TinyintFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(SignedBytes.checkedCast(TinyintType.TINYINT.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class DoubleFieldSetter extends FieldSetter { private final DoubleWritable value = new DoubleWritable(); public DoubleFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(DoubleType.DOUBLE.getDouble(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class FloatFieldSetter extends FieldSetter { private final FloatWritable value = new FloatWritable(); public FloatFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(intBitsToFloat((int) RealType.REAL.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class VarcharFieldSetter extends FieldSetter { private final Text value = new Text(); private final Type type; public VarcharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type) { super(rowInspector, row, field); this.type = type; } @Override public void setField(Block block, int position) { value.set(type.getSlice(block, position).getBytes()); rowInspector.setStructFieldData(row, field, value); } } private static class CharFieldSetter extends FieldSetter { private final Text value = new Text(); private final Type type; public CharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type) { super(rowInspector, row, field); this.type = type; } @Override public void setField(Block block, int position) { value.set(type.getSlice(block, position).getBytes()); rowInspector.setStructFieldData(row, field, value); } } private static class BinaryFieldSetter extends FieldSetter { private final BytesWritable value = new BytesWritable(); public BinaryFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { byte[] bytes = VarbinaryType.VARBINARY.getSlice(block, position).getBytes(); value.set(bytes, 0, bytes.length); rowInspector.setStructFieldData(row, field, value); } } private static class DateFieldSetter extends FieldSetter { private final DateWritable value = new DateWritable(); public DateFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(toIntExact(DateType.DATE.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class TimestampFieldSetter extends FieldSetter { private final TimestampWritable value = new TimestampWritable(); public TimestampFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { long millisUtc = TimestampType.TIMESTAMP.getLong(block, position); value.setTime(millisUtc); rowInspector.setStructFieldData(row, field, value); } } private static class DecimalFieldSetter extends FieldSetter { private final HiveDecimalWritable value = new HiveDecimalWritable(); private final DecimalType decimalType; public DecimalFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, DecimalType decimalType) { super(rowInspector, row, field); this.decimalType = decimalType;<|fim▁hole|> @Override public void setField(Block block, int position) { value.set(getHiveDecimal(decimalType, block, position)); rowInspector.setStructFieldData(row, field, value); } } private static HiveDecimal getHiveDecimal(DecimalType decimalType, Block block, int position) { BigInteger unscaledValue; if (decimalType.isShort()) { unscaledValue = BigInteger.valueOf(decimalType.getLong(block, position)); } else { unscaledValue = Decimals.decodeUnscaledValue(decimalType.getSlice(block, position)); } return HiveDecimal.create(unscaledValue, decimalType.getScale()); } private static class ArrayFieldSetter extends FieldSetter { private final Type elementType; public ArrayFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type elementType) { super(rowInspector, row, field); this.elementType = requireNonNull(elementType, "elementType is null"); } @Override public void setField(Block block, int position) { Block arrayBlock = block.getObject(position, Block.class); List<Object> list = new ArrayList<>(arrayBlock.getPositionCount()); for (int i = 0; i < arrayBlock.getPositionCount(); i++) { Object element = getField(elementType, arrayBlock, i); list.add(element); } rowInspector.setStructFieldData(row, field, list); } } private static class MapFieldSetter extends FieldSetter { private final Type keyType; private final Type valueType; public MapFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type keyType, Type valueType) { super(rowInspector, row, field); this.keyType = requireNonNull(keyType, "keyType is null"); this.valueType = requireNonNull(valueType, "valueType is null"); } @Override public void setField(Block block, int position) { Block mapBlock = block.getObject(position, Block.class); Map<Object, Object> map = new HashMap<>(mapBlock.getPositionCount() * 2); for (int i = 0; i < mapBlock.getPositionCount(); i += 2) { Object key = getField(keyType, mapBlock, i); Object value = getField(valueType, mapBlock, i + 1); map.put(key, value); } rowInspector.setStructFieldData(row, field, map); } } private static class RowFieldSetter extends FieldSetter { private final List<Type> fieldTypes; public RowFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, List<Type> fieldTypes) { super(rowInspector, row, field); this.fieldTypes = ImmutableList.copyOf(fieldTypes); } @Override public void setField(Block block, int position) { Block rowBlock = block.getObject(position, Block.class); // TODO reuse row object and use FieldSetters, like we do at the top level // Ideally, we'd use the same recursive structure starting from the top, but // this requires modeling row types in the same way we model table rows // (multiple blocks vs all fields packed in a single block) List<Object> value = new ArrayList<>(fieldTypes.size()); for (int i = 0; i < fieldTypes.size(); i++) { Object element = getField(fieldTypes.get(i), rowBlock, i); value.add(element); } rowInspector.setStructFieldData(row, field, value); } } }<|fim▁end|>
}
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>use std::env; use std::fs::{self, File}; use std::path::{MAIN_SEPARATOR, Path}; use j4rs; use j4rs::{Jvm, JvmBuilder, LocalJarArtifact, MavenArtifact}; fn main() { let ui_jar = "rust-keylock-ui-java-0.14.0.jar"; let desktop_ui_jar_in_java_target = format!("../java/target/{}", ui_jar); println!("cargo:rerun-if-changed={}", desktop_ui_jar_in_java_target); // The target os is needed for the classifiers of javafx dependencies let target_os = match env::var("CARGO_CFG_TARGET_OS").unwrap_or("linux".to_string()).as_ref() { "macos" => "mac".to_string(), "windows" => "win".to_string(), _ => "linux".to_string() }; // If the java target directory exists, copy the desktop-ui jar to rust copy_from_java(&desktop_ui_jar_in_java_target); let j4rs_installation_path = match env::var("RKL_J4RS_INST_DIR") { Ok(path) => { path } Err(_) => { let mut j4rs_installation_path_buf = rust_keylock::default_rustkeylock_location(); j4rs_installation_path_buf.push("lib"); j4rs_installation_path_buf.to_str().unwrap().to_owned() } }; fs::create_dir_all(&j4rs_installation_path).unwrap(); let _ = fs_extra::remove_items(vec![&j4rs_installation_path].as_ref()); Jvm::copy_j4rs_libs_under(&j4rs_installation_path).unwrap(); let jvm = JvmBuilder::new() .with_base_path(&j4rs_installation_path) .build() .unwrap(); // Deploy the desktop-ui jar let home = env::var("CARGO_MANIFEST_DIR").unwrap(); let javaassets_path_buf = Path::new(&home).join("javaassets"); let javaassets_path = javaassets_path_buf.to_str().unwrap().to_owned(); let artf1 = LocalJarArtifact::new(&format!("{}{}{}", javaassets_path, MAIN_SEPARATOR, ui_jar)); jvm.deploy_artifact(&artf1).unwrap(); // Deploy from Maven println!("cargo:warning=Downloading Maven dependencies... This may take a while the first time you build."); maven("org.openjfx:javafx-base:13.0.2", &jvm); maven(&format!("org.openjfx:javafx-base:13.0.2:{}", target_os), &jvm); maven("org.openjfx:javafx-controls:13.0.2", &jvm); maven(&format!("org.openjfx:javafx-controls:13.0.2:{}", target_os), &jvm); maven("org.openjfx:javafx-fxml:13.0.2", &jvm); maven(&format!("org.openjfx:javafx-fxml:13.0.2:{}", target_os), &jvm); maven("org.openjfx:javafx-graphics:13.0.2", &jvm); maven(&format!("org.openjfx:javafx-graphics:13.0.2:{}", target_os), &jvm); maven("org.openjfx:javafx-media:13.0.2", &jvm); maven(&format!("org.openjfx:javafx-media:13.0.2:{}", target_os), &jvm); println!("cargo:warning=Maven dependencies downloaded!"); } fn maven(s: &str, jvm: &Jvm) { let artifact = MavenArtifact::from(s); let _ = jvm.deploy_artifact(&artifact).map_err(|error| { println!("cargo:warning=Could not download Maven artifact {}: {:?}", s, error); }); } fn copy_from_java(desktop_ui_jar_in_java_target: &str) { if File::open(desktop_ui_jar_in_java_target).is_ok() { let home = env::var("CARGO_MANIFEST_DIR").unwrap();<|fim▁hole|> let _ = fs_extra::remove_items(vec![javaassets_path.clone()].as_ref()); let _ = fs::create_dir_all(javaassets_path_buf.clone()) .map_err(|error| panic!("Cannot create dir '{:?}': {:?}", javaassets_path_buf, error)); let jar_source_path = desktop_ui_jar_in_java_target; let ref options = fs_extra::dir::CopyOptions::new(); let _ = fs_extra::copy_items(vec![jar_source_path].as_ref(), javaassets_path, options); } }<|fim▁end|>
let javaassets_path_buf = Path::new(&home).join("javaassets"); let javaassets_path = javaassets_path_buf.to_str().unwrap().to_owned();
<|file_name|>multibuffer_data_source_unittest.cc<|end_file_name|><|fim▁begin|>// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <stddef.h> #include <stdint.h> #include "base/bind.h" #include "base/macros.h" #include "base/message_loop/message_loop.h" #include "base/run_loop.h" #include "base/strings/string_number_conversions.h" #include "media/base/media_log.h" #include "media/base/mock_filters.h" #include "media/base/test_helpers.h" #include "media/blink/buffered_data_source.h" #include "media/blink/mock_webframeclient.h" #include "media/blink/mock_weburlloader.h" #include "media/blink/multibuffer_data_source.h" #include "media/blink/multibuffer_reader.h" #include "media/blink/resource_multibuffer_data_provider.h" #include "media/blink/test_response_generator.h" #include "third_party/WebKit/public/platform/WebURLResponse.h" #include "third_party/WebKit/public/web/WebLocalFrame.h" #include "third_party/WebKit/public/web/WebView.h" using ::testing::_; using ::testing::Assign; using ::testing::DoAll; using ::testing::Invoke; using ::testing::InvokeWithoutArgs; using ::testing::InSequence; using ::testing::NiceMock; using ::testing::StrictMock; using blink::WebLocalFrame; using blink::WebString; using blink::WebURLLoader; using blink::WebURLResponse; using blink::WebView; namespace media { class TestResourceMultiBuffer; class TestMultiBufferDataProvider; std::set<TestMultiBufferDataProvider*> test_data_providers; class TestMultiBufferDataProvider : public ResourceMultiBufferDataProvider { public: TestMultiBufferDataProvider(UrlData* url_data, MultiBuffer::BlockId pos) : ResourceMultiBufferDataProvider(url_data, pos), loading_(false) { CHECK(test_data_providers.insert(this).second); } ~TestMultiBufferDataProvider() override { CHECK_EQ(static_cast<size_t>(1), test_data_providers.erase(this)); } void Start() override { // Create a mock active loader. // Keep track of active loading state via loadAsynchronously() and cancel(). NiceMock<MockWebURLLoader>* url_loader = new NiceMock<MockWebURLLoader>(); ON_CALL(*url_loader, cancel()) .WillByDefault(Invoke([this]() { // Check that we have not been destroyed first. if (test_data_providers.find(this) != test_data_providers.end()) { this->loading_ = false; } })); loading_ = true; active_loader_.reset( new ActiveLoader(std::unique_ptr<WebURLLoader>(url_loader))); if (!on_start_.is_null()) { on_start_.Run(); } } bool loading() const { return loading_; } void RunOnStart(base::Closure cb) { on_start_ = cb; } private: bool loading_; base::Closure on_start_; }; class TestUrlData; class TestResourceMultiBuffer : public ResourceMultiBuffer { public: explicit TestResourceMultiBuffer(UrlData* url_data, int shift) : ResourceMultiBuffer(url_data, shift) {} std::unique_ptr<MultiBuffer::DataProvider> CreateWriter( const BlockId& pos) override { TestMultiBufferDataProvider* ret = new TestMultiBufferDataProvider(url_data_, pos); ret->Start(); return std::unique_ptr<MultiBuffer::DataProvider>(ret); } // TODO: Make these global TestMultiBufferDataProvider* GetProvider() { EXPECT_EQ(test_data_providers.size(), 1U); if (test_data_providers.size() != 1) return nullptr; return *test_data_providers.begin(); } TestMultiBufferDataProvider* GetProvider_allownull() { EXPECT_LE(test_data_providers.size(), 1U); if (test_data_providers.size() != 1U) return nullptr; return *test_data_providers.begin(); } bool HasProvider() const { return test_data_providers.size() == 1U; } bool loading() { if (test_data_providers.empty()) return false; return GetProvider()->loading(); } }; class TestUrlData : public UrlData { public: TestUrlData(const GURL& url, CORSMode cors_mode, const base::WeakPtr<UrlIndex>& url_index) : UrlData(url, cors_mode, url_index), block_shift_(url_index->block_shift()) {} ResourceMultiBuffer* multibuffer() override { if (!test_multibuffer_.get()) { test_multibuffer_.reset(new TestResourceMultiBuffer(this, block_shift_)); } return test_multibuffer_.get(); } TestResourceMultiBuffer* test_multibuffer() { if (!test_multibuffer_.get()) { test_multibuffer_.reset(new TestResourceMultiBuffer(this, block_shift_)); } return test_multibuffer_.get(); } protected: ~TestUrlData() override {} const int block_shift_; std::unique_ptr<TestResourceMultiBuffer> test_multibuffer_; }; class TestUrlIndex : public UrlIndex { public: explicit TestUrlIndex(blink::WebFrame* frame) : UrlIndex(frame) {} scoped_refptr<UrlData> NewUrlData(const GURL& url, UrlData::CORSMode cors_mode) override { last_url_data_ = new TestUrlData(url, cors_mode, weak_factory_.GetWeakPtr()); return last_url_data_; } scoped_refptr<TestUrlData> last_url_data() { EXPECT_TRUE(last_url_data_); return last_url_data_; } private: scoped_refptr<TestUrlData> last_url_data_; }; class MockBufferedDataSourceHost : public BufferedDataSourceHost { public: MockBufferedDataSourceHost() {} virtual ~MockBufferedDataSourceHost() {} MOCK_METHOD1(SetTotalBytes, void(int64_t total_bytes)); MOCK_METHOD2(AddBufferedByteRange, void(int64_t start, int64_t end)); private: DISALLOW_COPY_AND_ASSIGN(MockBufferedDataSourceHost); }; class MockMultibufferDataSource : public MultibufferDataSource { public: MockMultibufferDataSource( const GURL& url, UrlData::CORSMode cors_mode, const scoped_refptr<base::SingleThreadTaskRunner>& task_runner, linked_ptr<UrlIndex> url_index, WebLocalFrame* frame, BufferedDataSourceHost* host) : MultibufferDataSource( url, cors_mode, task_runner, url_index, frame, new media::MediaLog(), host, base::Bind(&MockMultibufferDataSource::set_downloading, base::Unretained(this))), downloading_(false) {} bool downloading() { return downloading_; } void set_downloading(bool downloading) { downloading_ = downloading; } bool range_supported() { return url_data_->range_supported(); } private: // Whether the resource is downloading or deferred. bool downloading_; DISALLOW_COPY_AND_ASSIGN(MockMultibufferDataSource); }; static const int64_t kFileSize = 5000000; static const int64_t kFarReadPosition = 3997696; static const int kDataSize = 32 << 10; static const char kHttpUrl[] = "http://localhost/foo.webm"; static const char kFileUrl[] = "file:///tmp/bar.webm"; static const char kHttpDifferentPathUrl[] = "http://localhost/bar.webm"; static const char kHttpDifferentOriginUrl[] = "http://127.0.0.1/foo.webm"; class MultibufferDataSourceTest : public testing::Test { public: MultibufferDataSourceTest() : view_(WebView::create(nullptr, blink::WebPageVisibilityStateVisible)), frame_( WebLocalFrame::create(blink::WebTreeScopeType::Document, &client_)), preload_(MultibufferDataSource::AUTO), url_index_(make_linked_ptr(new TestUrlIndex(frame_))) { view_->setMainFrame(frame_); } virtual ~MultibufferDataSourceTest() { view_->close(); frame_->close(); } MOCK_METHOD1(OnInitialize, void(bool)); void InitializeWithCORS(const char* url, bool expected, UrlData::CORSMode cors_mode) { GURL gurl(url); data_source_.reset(new MockMultibufferDataSource( gurl, cors_mode, message_loop_.task_runner(), url_index_, view_->mainFrame()->toWebLocalFrame(), &host_)); data_source_->SetPreload(preload_); response_generator_.reset(new TestResponseGenerator(gurl, kFileSize)); EXPECT_CALL(*this, OnInitialize(expected)); data_source_->Initialize(base::Bind( &MultibufferDataSourceTest::OnInitialize, base::Unretained(this))); base::RunLoop().RunUntilIdle(); // Not really loading until after OnInitialize is called. EXPECT_EQ(data_source_->downloading(), false); } void Initialize(const char* url, bool expected) { InitializeWithCORS(url, expected, UrlData::CORS_UNSPECIFIED); } // Helper to initialize tests with a valid 200 response. void InitializeWith200Response() { Initialize(kHttpUrl, true); EXPECT_CALL(host_, SetTotalBytes(response_generator_->content_length())); Respond(response_generator_->Generate200()); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); ReceiveData(kDataSize); } // Helper to initialize tests with a valid 206 response. void InitializeWith206Response() { Initialize(kHttpUrl, true); EXPECT_CALL(host_, SetTotalBytes(response_generator_->content_length())); Respond(response_generator_->Generate206(0)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); ReceiveData(kDataSize); } // Helper to initialize tests with a valid file:// response. void InitializeWithFileResponse() { Initialize(kFileUrl, true); EXPECT_CALL(host_, SetTotalBytes(kFileSize)); EXPECT_CALL(host_, AddBufferedByteRange(0, kFileSize)); Respond(response_generator_->GenerateFileResponse(0)); ReceiveData(kDataSize); } // Stops any active loaders and shuts down the data source. // // This typically happens when the page is closed and for our purposes is // appropriate to do when tearing down a test. void Stop() { if (loading()) { data_provider()->didFail(url_loader(), response_generator_->GenerateError()); base::RunLoop().RunUntilIdle(); } data_source_->Stop(); base::RunLoop().RunUntilIdle(); } void Respond(const WebURLResponse& response) { EXPECT_TRUE(url_loader()); if (!active_loader()) return; data_provider()->didReceiveResponse(url_loader(), response); base::RunLoop().RunUntilIdle(); } void ReceiveData(int size) { EXPECT_TRUE(url_loader()); if (!url_loader()) return; std::unique_ptr<char[]> data(new char[size]); memset(data.get(), 0xA5, size); // Arbitrary non-zero value. data_provider()->didReceiveData(url_loader(), data.get(), size, size, size); base::RunLoop().RunUntilIdle(); } void FinishLoading() { EXPECT_TRUE(url_loader()); if (!url_loader()) return; data_provider()->didFinishLoading(url_loader(), 0, -1); base::RunLoop().RunUntilIdle(); } void FailLoading() { data_provider()->didFail(url_loader(), response_generator_->GenerateError()); base::RunLoop().RunUntilIdle(); } void Restart() { EXPECT_TRUE(data_provider()); EXPECT_FALSE(active_loader_allownull()); if (!data_provider()) return; data_provider()->Start(); } MOCK_METHOD1(ReadCallback, void(int size)); void ReadAt(int64_t position, int64_t howmuch = kDataSize) { data_source_->Read(position, howmuch, buffer_, base::Bind(&MultibufferDataSourceTest::ReadCallback, base::Unretained(this))); base::RunLoop().RunUntilIdle(); } void ExecuteMixedResponseSuccessTest(const WebURLResponse& response1, const WebURLResponse& response2) { EXPECT_CALL(host_, SetTotalBytes(kFileSize)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); EXPECT_CALL(*this, ReadCallback(kDataSize)).Times(2); Respond(response1); ReceiveData(kDataSize); ReadAt(0); EXPECT_TRUE(loading()); FinishLoading(); Restart(); ReadAt(kDataSize); Respond(response2); ReceiveData(kDataSize); FinishLoading(); Stop(); } void ExecuteMixedResponseFailureTest(const WebURLResponse& response1, const WebURLResponse& response2) { EXPECT_CALL(host_, SetTotalBytes(kFileSize)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); EXPECT_CALL(*this, ReadCallback(kDataSize)); // Stop() will also cause the readback to be called with kReadError, but // we want to make sure it was called before Stop(). bool failed_ = false; EXPECT_CALL(*this, ReadCallback(media::DataSource::kReadError)) .WillOnce(Assign(&failed_, true)); Respond(response1); ReceiveData(kDataSize); ReadAt(0); EXPECT_TRUE(loading()); FinishLoading(); Restart(); ReadAt(kDataSize); Respond(response2); EXPECT_TRUE(failed_); Stop(); } void CheckCapacityDefer() { EXPECT_EQ(2 << 20, preload_low()); EXPECT_EQ(3 << 20, preload_high()); } void CheckReadThenDefer() { EXPECT_EQ(0, preload_low()); EXPECT_EQ(0, preload_high()); } void CheckNeverDefer() { EXPECT_EQ(1LL << 40, preload_low()); EXPECT_EQ(1LL << 40, preload_high()); } // Accessors for private variables on |data_source_|. MultiBufferReader* loader() { return data_source_->reader_.get(); } TestResourceMultiBuffer* multibuffer() { return url_index_->last_url_data()->test_multibuffer(); } TestMultiBufferDataProvider* data_provider() { return multibuffer()->GetProvider(); } ActiveLoader* active_loader() { EXPECT_TRUE(data_provider()); if (!data_provider()) return nullptr; return data_provider()->active_loader_.get(); } ActiveLoader* active_loader_allownull() { TestMultiBufferDataProvider* data_provider = multibuffer()->GetProvider_allownull(); if (!data_provider) return nullptr; return data_provider->active_loader_.get(); } WebURLLoader* url_loader() { EXPECT_TRUE(active_loader()); if (!active_loader()) return nullptr; return active_loader()->loader_.get(); } bool loading() { return multibuffer()->loading(); } MultibufferDataSource::Preload preload() { return data_source_->preload_; } void set_preload(MultibufferDataSource::Preload preload) { preload_ = preload; } int64_t preload_high() { CHECK(loader()); return loader()->preload_high(); } int64_t preload_low() { CHECK(loader()); return loader()->preload_low(); } int data_source_bitrate() { return data_source_->bitrate_; } double data_source_playback_rate() { return data_source_->playback_rate_; } bool is_local_source() { return data_source_->assume_fully_buffered(); } void set_might_be_reused_from_cache_in_future(bool value) { data_source_->url_data_->set_cacheable(value); } protected: MockWebFrameClient client_; WebView* view_; WebLocalFrame* frame_; MultibufferDataSource::Preload preload_; base::MessageLoop message_loop_; linked_ptr<TestUrlIndex> url_index_; std::unique_ptr<MockMultibufferDataSource> data_source_; std::unique_ptr<TestResponseGenerator> response_generator_; StrictMock<MockBufferedDataSourceHost> host_; // Used for calling MultibufferDataSource::Read(). uint8_t buffer_[kDataSize * 2]; DISALLOW_COPY_AND_ASSIGN(MultibufferDataSourceTest); }; TEST_F(MultibufferDataSourceTest, Range_Supported) { InitializeWith206Response(); EXPECT_TRUE(loading()); EXPECT_FALSE(data_source_->IsStreaming()); Stop(); } TEST_F(MultibufferDataSourceTest, Range_InstanceSizeUnknown) { Initialize(kHttpUrl, true); Respond(response_generator_->Generate206( 0, TestResponseGenerator::kNoContentRangeInstanceSize)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); ReceiveData(kDataSize);<|fim▁hole|>} TEST_F(MultibufferDataSourceTest, Range_NotFound) { Initialize(kHttpUrl, false); Respond(response_generator_->Generate404()); EXPECT_FALSE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, Range_NotSupported) { InitializeWith200Response(); EXPECT_TRUE(loading()); EXPECT_TRUE(data_source_->IsStreaming()); Stop(); } TEST_F(MultibufferDataSourceTest, Range_NotSatisfiable) { Initialize(kHttpUrl, true); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); Respond(response_generator_->GenerateResponse(416)); EXPECT_FALSE(loading()); Stop(); } // Special carve-out for Apache versions that choose to return a 200 for // Range:0- ("because it's more efficient" than a 206) TEST_F(MultibufferDataSourceTest, Range_SupportedButReturned200) { Initialize(kHttpUrl, true); EXPECT_CALL(host_, SetTotalBytes(response_generator_->content_length())); WebURLResponse response = response_generator_->Generate200(); response.setHTTPHeaderField(WebString::fromUTF8("Accept-Ranges"), WebString::fromUTF8("bytes")); Respond(response); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); ReceiveData(kDataSize); EXPECT_TRUE(loading()); EXPECT_FALSE(data_source_->IsStreaming()); Stop(); } TEST_F(MultibufferDataSourceTest, Range_MissingContentRange) { Initialize(kHttpUrl, false); Respond(response_generator_->Generate206( 0, TestResponseGenerator::kNoContentRange)); EXPECT_FALSE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, Range_MissingContentLength) { Initialize(kHttpUrl, true); // It'll manage without a Content-Length response. EXPECT_CALL(host_, SetTotalBytes(response_generator_->content_length())); Respond(response_generator_->Generate206( 0, TestResponseGenerator::kNoContentLength)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); ReceiveData(kDataSize); EXPECT_TRUE(loading()); EXPECT_FALSE(data_source_->IsStreaming()); Stop(); } TEST_F(MultibufferDataSourceTest, Range_WrongContentRange) { Initialize(kHttpUrl, false); // Now it's done and will fail. Respond(response_generator_->Generate206(1337)); EXPECT_FALSE(loading()); Stop(); } // Test the case where the initial response from the server indicates that // Range requests are supported, but a later request prove otherwise. TEST_F(MultibufferDataSourceTest, Range_ServerLied) { InitializeWith206Response(); // Read causing a new request to be made -- we'll expect it to error. ReadAt(kFarReadPosition); // Return a 200 in response to a range request. EXPECT_CALL(*this, ReadCallback(media::DataSource::kReadError)); Respond(response_generator_->Generate200()); EXPECT_FALSE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, Http_AbortWhileReading) { InitializeWith206Response(); // Make sure there's a pending read -- we'll expect it to error. ReadAt(kFileSize); // Abort!!! EXPECT_CALL(*this, ReadCallback(media::DataSource::kReadError)); data_source_->Abort(); base::RunLoop().RunUntilIdle(); EXPECT_FALSE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, File_AbortWhileReading) { InitializeWithFileResponse(); // Make sure there's a pending read -- we'll expect it to error. ReadAt(kFileSize); // Abort!!! EXPECT_CALL(*this, ReadCallback(media::DataSource::kReadError)); data_source_->Abort(); base::RunLoop().RunUntilIdle(); EXPECT_FALSE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, Http_Retry) { InitializeWith206Response(); // Read to advance our position. EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0); // Issue a pending read but terminate the connection to force a retry. ReadAt(kDataSize); FinishLoading(); Restart(); Respond(response_generator_->Generate206(kDataSize)); // Complete the read. EXPECT_CALL(*this, ReadCallback(kDataSize)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); ReceiveData(kDataSize); EXPECT_TRUE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, Http_RetryOnError) { InitializeWith206Response(); // Read to advance our position. EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0); // Issue a pending read but trigger an error to force a retry. EXPECT_CALL(*this, ReadCallback(kDataSize)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); ReadAt(kDataSize); base::RunLoop run_loop; data_provider()->didFail(url_loader(), response_generator_->GenerateError()); data_provider()->RunOnStart(run_loop.QuitClosure()); run_loop.Run(); Respond(response_generator_->Generate206(kDataSize)); ReceiveData(kDataSize); FinishLoading(); EXPECT_FALSE(loading()); Stop(); } // Make sure that we prefetch across partial responses. (crbug.com/516589) TEST_F(MultibufferDataSourceTest, Http_PartialResponsePrefetch) { Initialize(kHttpUrl, true); WebURLResponse response1 = response_generator_->GeneratePartial206(0, kDataSize - 1); WebURLResponse response2 = response_generator_->GeneratePartial206(kDataSize, kDataSize * 3 - 1); EXPECT_CALL(host_, SetTotalBytes(kFileSize)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 3)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); EXPECT_CALL(*this, ReadCallback(kDataSize)); Respond(response1); ReceiveData(kDataSize); ReadAt(0); EXPECT_TRUE(loading()); FinishLoading(); Restart(); Respond(response2); ReceiveData(kDataSize); ReceiveData(kDataSize); FinishLoading(); Stop(); } TEST_F(MultibufferDataSourceTest, Http_PartialResponse) { Initialize(kHttpUrl, true); WebURLResponse response1 = response_generator_->GeneratePartial206(0, kDataSize - 1); WebURLResponse response2 = response_generator_->GeneratePartial206(kDataSize, kDataSize * 2 - 1); // The origin URL of response1 and response2 are same. So no error should // occur. ExecuteMixedResponseSuccessTest(response1, response2); } TEST_F(MultibufferDataSourceTest, Http_MixedResponse_RedirectedToDifferentPathResponse) { Initialize(kHttpUrl, true); WebURLResponse response1 = response_generator_->GeneratePartial206(0, kDataSize - 1); WebURLResponse response2 = response_generator_->GeneratePartial206(kDataSize, kDataSize * 2 - 1); response2.setURL(GURL(kHttpDifferentPathUrl)); // The origin URL of response1 and response2 are same. So no error should // occur. ExecuteMixedResponseSuccessTest(response1, response2); } TEST_F(MultibufferDataSourceTest, Http_MixedResponse_RedirectedToDifferentOriginResponse) { Initialize(kHttpUrl, true); WebURLResponse response1 = response_generator_->GeneratePartial206(0, kDataSize - 1); WebURLResponse response2 = response_generator_->GeneratePartial206(kDataSize, kDataSize * 2 - 1); response2.setURL(GURL(kHttpDifferentOriginUrl)); // The origin URL of response1 and response2 are different. So an error should // occur. ExecuteMixedResponseFailureTest(response1, response2); } TEST_F(MultibufferDataSourceTest, Http_MixedResponse_ServiceWorkerGeneratedResponseAndNormalResponse) { Initialize(kHttpUrl, true); WebURLResponse response1 = response_generator_->GeneratePartial206(0, kDataSize - 1); response1.setWasFetchedViaServiceWorker(true); WebURLResponse response2 = response_generator_->GeneratePartial206(kDataSize, kDataSize * 2 - 1); // response1 is generated in a Service Worker but response2 is from a native // server. So an error should occur. ExecuteMixedResponseFailureTest(response1, response2); } TEST_F(MultibufferDataSourceTest, Http_MixedResponse_ServiceWorkerProxiedAndSameURLResponse) { Initialize(kHttpUrl, true); WebURLResponse response1 = response_generator_->GeneratePartial206(0, kDataSize - 1); response1.setWasFetchedViaServiceWorker(true); response1.setOriginalURLViaServiceWorker(GURL(kHttpUrl)); WebURLResponse response2 = response_generator_->GeneratePartial206(kDataSize, kDataSize * 2 - 1); // The origin URL of response1 and response2 are same. So no error should // occur. ExecuteMixedResponseSuccessTest(response1, response2); } TEST_F(MultibufferDataSourceTest, Http_MixedResponse_ServiceWorkerProxiedAndDifferentPathResponse) { Initialize(kHttpUrl, true); WebURLResponse response1 = response_generator_->GeneratePartial206(0, kDataSize - 1); response1.setWasFetchedViaServiceWorker(true); response1.setOriginalURLViaServiceWorker(GURL(kHttpDifferentPathUrl)); WebURLResponse response2 = response_generator_->GeneratePartial206(kDataSize, kDataSize * 2 - 1); // The origin URL of response1 and response2 are same. So no error should // occur. ExecuteMixedResponseSuccessTest(response1, response2); } TEST_F(MultibufferDataSourceTest, Http_MixedResponse_ServiceWorkerProxiedAndDifferentOriginResponse) { Initialize(kHttpUrl, true); WebURLResponse response1 = response_generator_->GeneratePartial206(0, kDataSize - 1); response1.setWasFetchedViaServiceWorker(true); response1.setOriginalURLViaServiceWorker(GURL(kHttpDifferentOriginUrl)); WebURLResponse response2 = response_generator_->GeneratePartial206(kDataSize, kDataSize * 2 - 1); // The origin URL of response1 and response2 are different. So an error should // occur. ExecuteMixedResponseFailureTest(response1, response2); } TEST_F(MultibufferDataSourceTest, Http_MixedResponse_ServiceWorkerProxiedAndDifferentOriginResponseCORS) { InitializeWithCORS(kHttpUrl, true, UrlData::CORS_ANONYMOUS); WebURLResponse response1 = response_generator_->GeneratePartial206(0, kDataSize - 1); response1.setWasFetchedViaServiceWorker(true); response1.setOriginalURLViaServiceWorker(GURL(kHttpDifferentOriginUrl)); WebURLResponse response2 = response_generator_->GeneratePartial206(kDataSize, kDataSize * 2 - 1); // The origin URL of response1 and response2 are different, but a CORS check // has been passed for each request, so expect success. ExecuteMixedResponseSuccessTest(response1, response2); } TEST_F(MultibufferDataSourceTest, File_Retry) { InitializeWithFileResponse(); // Read to advance our position. EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0); // Issue a pending read but terminate the connection to force a retry. ReadAt(kDataSize); FinishLoading(); Restart(); Respond(response_generator_->GenerateFileResponse(kDataSize)); // Complete the read. EXPECT_CALL(*this, ReadCallback(kDataSize)); ReceiveData(kDataSize); EXPECT_TRUE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, Http_TooManyRetries) { InitializeWith206Response(); // Make sure there's a pending read -- we'll expect it to error. ReadAt(kDataSize); for (int i = 0; i < ResourceMultiBufferDataProvider::kMaxRetries; i++) { FailLoading(); data_provider()->Start(); Respond(response_generator_->Generate206(kDataSize)); } // Stop() will also cause the readback to be called with kReadError, but // we want to make sure it was called during FailLoading(). bool failed_ = false; EXPECT_CALL(*this, ReadCallback(media::DataSource::kReadError)) .WillOnce(Assign(&failed_, true)); FailLoading(); EXPECT_TRUE(failed_); EXPECT_FALSE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, File_TooManyRetries) { InitializeWithFileResponse(); // Make sure there's a pending read -- we'll expect it to error. ReadAt(kDataSize); for (int i = 0; i < ResourceMultiBufferDataProvider::kMaxRetries; i++) { FailLoading(); data_provider()->Start(); Respond(response_generator_->Generate206(kDataSize)); } // Stop() will also cause the readback to be called with kReadError, but // we want to make sure it was called during FailLoading(). bool failed_ = false; EXPECT_CALL(*this, ReadCallback(media::DataSource::kReadError)) .WillOnce(Assign(&failed_, true)); FailLoading(); EXPECT_TRUE(failed_); EXPECT_FALSE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, File_InstanceSizeUnknown) { Initialize(kFileUrl, false); Respond( response_generator_->GenerateFileResponse(media::DataSource::kReadError)); ReceiveData(kDataSize); EXPECT_FALSE(data_source_->downloading()); EXPECT_FALSE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, File_Successful) { InitializeWithFileResponse(); EXPECT_TRUE(loading()); EXPECT_FALSE(data_source_->IsStreaming()); Stop(); } TEST_F(MultibufferDataSourceTest, StopDuringRead) { InitializeWith206Response(); uint8_t buffer[256]; data_source_->Read(0, arraysize(buffer), buffer, base::Bind(&MultibufferDataSourceTest::ReadCallback, base::Unretained(this))); // The outstanding read should fail before the stop callback runs. { InSequence s; EXPECT_CALL(*this, ReadCallback(media::DataSource::kReadError)); data_source_->Stop(); } base::RunLoop().RunUntilIdle(); } TEST_F(MultibufferDataSourceTest, DefaultValues) { InitializeWith206Response(); // Ensure we have sane values for default loading scenario. EXPECT_EQ(MultibufferDataSource::AUTO, preload()); EXPECT_EQ(2 << 20, preload_low()); EXPECT_EQ(3 << 20, preload_high()); EXPECT_EQ(0, data_source_bitrate()); EXPECT_EQ(0.0, data_source_playback_rate()); EXPECT_TRUE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, SetBitrate) { InitializeWith206Response(); data_source_->SetBitrate(1234); base::RunLoop().RunUntilIdle(); EXPECT_EQ(1234, data_source_bitrate()); // Read so far ahead to cause the loader to get recreated. TestMultiBufferDataProvider* old_loader = data_provider(); ReadAt(kFarReadPosition); Respond(response_generator_->Generate206(kFarReadPosition)); // Verify loader changed but still has same bitrate. EXPECT_NE(old_loader, data_provider()); EXPECT_TRUE(loading()); EXPECT_CALL(*this, ReadCallback(media::DataSource::kReadError)); Stop(); } TEST_F(MultibufferDataSourceTest, MediaPlaybackRateChanged) { InitializeWith206Response(); data_source_->MediaPlaybackRateChanged(2.0); base::RunLoop().RunUntilIdle(); EXPECT_EQ(2.0, data_source_playback_rate()); // Read so far ahead to cause the loader to get recreated. TestMultiBufferDataProvider* old_loader = data_provider(); ReadAt(kFarReadPosition); Respond(response_generator_->Generate206(kFarReadPosition)); // Verify loader changed but still has same playback rate. EXPECT_NE(old_loader, data_provider()); EXPECT_TRUE(loading()); EXPECT_CALL(*this, ReadCallback(media::DataSource::kReadError)); Stop(); } TEST_F(MultibufferDataSourceTest, Http_Read) { InitializeWith206Response(); EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0, kDataSize * 2); ReadAt(kDataSize, kDataSize); EXPECT_CALL(*this, ReadCallback(kDataSize)); EXPECT_CALL(host_, AddBufferedByteRange(kDataSize, kDataSize + kDataSize / 2)); ReceiveData(kDataSize / 2); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); ReceiveData(kDataSize / 2); EXPECT_TRUE(data_source_->downloading()); Stop(); } TEST_F(MultibufferDataSourceTest, Http_ShareData) { InitializeWith206Response(); EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0, kDataSize * 2); ReadAt(kDataSize, kDataSize); EXPECT_CALL(*this, ReadCallback(kDataSize)); EXPECT_CALL(host_, AddBufferedByteRange(kDataSize, kDataSize + kDataSize / 2)); ReceiveData(kDataSize / 2); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); ReceiveData(kDataSize / 2); EXPECT_TRUE(data_source_->downloading()); StrictMock<MockBufferedDataSourceHost> host2; MockMultibufferDataSource source2( GURL(kHttpUrl), UrlData::CORS_UNSPECIFIED, message_loop_.task_runner(), url_index_, view_->mainFrame()->toWebLocalFrame(), &host2); source2.SetPreload(preload_); EXPECT_CALL(*this, OnInitialize(true)); // This call would not be expected if we were not sharing data. EXPECT_CALL(host2, SetTotalBytes(response_generator_->content_length())); source2.Initialize(base::Bind(&MultibufferDataSourceTest::OnInitialize, base::Unretained(this))); base::RunLoop().RunUntilIdle(); // Always loading after initialize. EXPECT_EQ(source2.downloading(), true); Stop(); } TEST_F(MultibufferDataSourceTest, Http_Read_Seek) { InitializeWith206Response(); // Read a bit from the beginning. EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0); // Simulate a seek by reading a bit beyond kDataSize. ReadAt(kDataSize * 2); // We receive data leading up to but not including our read. // No notification will happen, since it's progress outside // of our current range. ReceiveData(kDataSize); // We now receive the rest of the data for our read. EXPECT_CALL(*this, ReadCallback(kDataSize)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 3)); ReceiveData(kDataSize); EXPECT_TRUE(data_source_->downloading()); Stop(); } TEST_F(MultibufferDataSourceTest, File_Read) { InitializeWithFileResponse(); EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0, kDataSize * 2); ReadAt(kDataSize, kDataSize); EXPECT_CALL(*this, ReadCallback(kDataSize)); ReceiveData(kDataSize); Stop(); } TEST_F(MultibufferDataSourceTest, Http_FinishLoading) { InitializeWith206Response(); EXPECT_TRUE(data_source_->downloading()); // premature didFinishLoading() will cause a retry. FinishLoading(); EXPECT_TRUE(data_source_->downloading()); Stop(); } TEST_F(MultibufferDataSourceTest, File_FinishLoading) { InitializeWithFileResponse(); ReceiveData(kDataSize); EXPECT_FALSE(data_source_->downloading()); // premature didFinishLoading() will cause a retry. FinishLoading(); EXPECT_FALSE(data_source_->downloading()); Stop(); } TEST_F(MultibufferDataSourceTest, LocalResource_DeferStrategy) { InitializeWithFileResponse(); EXPECT_EQ(MultibufferDataSource::AUTO, preload()); EXPECT_TRUE(is_local_source()); CheckCapacityDefer(); data_source_->MediaIsPlaying(); CheckCapacityDefer(); data_source_->SetBufferingStrategy( BufferedDataSourceInterface::BUFFERING_STRATEGY_AGGRESSIVE); CheckCapacityDefer(); Stop(); } TEST_F(MultibufferDataSourceTest, LocalResource_PreloadMetadata_DeferStrategy) { set_preload(MultibufferDataSource::METADATA); InitializeWithFileResponse(); EXPECT_EQ(MultibufferDataSource::METADATA, preload()); EXPECT_TRUE(is_local_source()); CheckReadThenDefer(); data_source_->MediaIsPlaying(); CheckCapacityDefer(); data_source_->SetBufferingStrategy( BufferedDataSourceInterface::BUFFERING_STRATEGY_AGGRESSIVE); CheckCapacityDefer(); Stop(); } TEST_F(MultibufferDataSourceTest, ExternalResource_Reponse200_DeferStrategy) { InitializeWith200Response(); EXPECT_EQ(MultibufferDataSource::AUTO, preload()); EXPECT_FALSE(is_local_source()); EXPECT_FALSE(data_source_->range_supported()); CheckCapacityDefer(); data_source_->MediaIsPlaying(); CheckCapacityDefer(); data_source_->SetBufferingStrategy( BufferedDataSourceInterface::BUFFERING_STRATEGY_AGGRESSIVE); CheckCapacityDefer(); Stop(); } TEST_F(MultibufferDataSourceTest, ExternalResource_Response200_PreloadMetadata_DeferStrategy) { set_preload(MultibufferDataSource::METADATA); InitializeWith200Response(); EXPECT_EQ(MultibufferDataSource::METADATA, preload()); EXPECT_FALSE(is_local_source()); EXPECT_FALSE(data_source_->range_supported()); CheckReadThenDefer(); data_source_->MediaIsPlaying(); CheckCapacityDefer(); data_source_->SetBufferingStrategy( BufferedDataSourceInterface::BUFFERING_STRATEGY_AGGRESSIVE); CheckCapacityDefer(); Stop(); } TEST_F(MultibufferDataSourceTest, ExternalResource_Reponse206_DeferStrategy) { InitializeWith206Response(); EXPECT_EQ(MultibufferDataSource::AUTO, preload()); EXPECT_FALSE(is_local_source()); EXPECT_TRUE(data_source_->range_supported()); CheckCapacityDefer(); data_source_->MediaIsPlaying(); CheckCapacityDefer(); set_might_be_reused_from_cache_in_future(true); data_source_->SetBufferingStrategy( BufferedDataSourceInterface::BUFFERING_STRATEGY_AGGRESSIVE); CheckNeverDefer(); data_source_->SetBufferingStrategy( BufferedDataSourceInterface::BUFFERING_STRATEGY_NORMAL); data_source_->MediaIsPlaying(); CheckCapacityDefer(); set_might_be_reused_from_cache_in_future(false); data_source_->SetBufferingStrategy( BufferedDataSourceInterface::BUFFERING_STRATEGY_AGGRESSIVE); CheckCapacityDefer(); Stop(); } TEST_F(MultibufferDataSourceTest, ExternalResource_Response206_PreloadMetadata_DeferStrategy) { set_preload(MultibufferDataSource::METADATA); InitializeWith206Response(); EXPECT_EQ(MultibufferDataSource::METADATA, preload()); EXPECT_FALSE(is_local_source()); EXPECT_TRUE(data_source_->range_supported()); CheckReadThenDefer(); data_source_->MediaIsPlaying(); CheckCapacityDefer(); set_might_be_reused_from_cache_in_future(true); data_source_->SetBufferingStrategy( BufferedDataSourceInterface::BUFFERING_STRATEGY_AGGRESSIVE); CheckNeverDefer(); data_source_->SetBufferingStrategy( BufferedDataSourceInterface::BUFFERING_STRATEGY_NORMAL); data_source_->MediaIsPlaying(); CheckCapacityDefer(); set_might_be_reused_from_cache_in_future(false); data_source_->SetBufferingStrategy( BufferedDataSourceInterface::BUFFERING_STRATEGY_AGGRESSIVE); CheckCapacityDefer(); Stop(); } TEST_F(MultibufferDataSourceTest, ExternalResource_Response206_VerifyDefer) { set_preload(MultibufferDataSource::METADATA); InitializeWith206Response(); EXPECT_EQ(MultibufferDataSource::METADATA, preload()); EXPECT_FALSE(is_local_source()); EXPECT_TRUE(data_source_->range_supported()); CheckReadThenDefer(); // Read a bit from the beginning. EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0); ASSERT_TRUE(active_loader()); EXPECT_TRUE(active_loader()->deferred()); } TEST_F(MultibufferDataSourceTest, ExternalResource_Response206_CancelAfterDefer) { set_preload(MultibufferDataSource::METADATA); InitializeWith206Response(); EXPECT_EQ(MultibufferDataSource::METADATA, preload()); EXPECT_FALSE(is_local_source()); EXPECT_TRUE(data_source_->range_supported()); CheckReadThenDefer(); ReadAt(kDataSize); data_source_->OnBufferingHaveEnough(false); ASSERT_TRUE(active_loader()); EXPECT_CALL(*this, ReadCallback(kDataSize)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); ReceiveData(kDataSize); EXPECT_FALSE(active_loader_allownull()); } TEST_F(MultibufferDataSourceTest, ExternalResource_Response206_CancelAfterPlay) { set_preload(BufferedDataSource::METADATA); InitializeWith206Response(); EXPECT_EQ(MultibufferDataSource::METADATA, preload()); EXPECT_FALSE(is_local_source()); EXPECT_TRUE(data_source_->range_supported()); CheckReadThenDefer(); ReadAt(kDataSize); // Marking the media as playing should prevent deferral. It also tells the // data source to start buffering beyond the initial load. data_source_->MediaIsPlaying(); data_source_->OnBufferingHaveEnough(false); CheckCapacityDefer(); ASSERT_TRUE(active_loader()); // Read a bit from the beginning and ensure deferral hasn't happened yet. EXPECT_CALL(*this, ReadCallback(kDataSize)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); ReceiveData(kDataSize); ASSERT_TRUE(active_loader()); data_source_->OnBufferingHaveEnough(true); ASSERT_TRUE(active_loader()); ASSERT_FALSE(active_loader()->deferred()); // Deliver data until capacity is reached and verify deferral. int bytes_received = 0; EXPECT_CALL(host_, AddBufferedByteRange(_, _)).Times(testing::AtLeast(1)); while (active_loader_allownull() && !active_loader()->deferred()) { ReceiveData(kDataSize); bytes_received += kDataSize; } EXPECT_GT(bytes_received, 0); EXPECT_LT(bytes_received + kDataSize, kFileSize); EXPECT_FALSE(active_loader_allownull()); } TEST_F(MultibufferDataSourceTest, SeekPastEOF) { GURL gurl(kHttpUrl); data_source_.reset(new MockMultibufferDataSource( gurl, UrlData::CORS_UNSPECIFIED, message_loop_.task_runner(), url_index_, view_->mainFrame()->toWebLocalFrame(), &host_)); data_source_->SetPreload(preload_); response_generator_.reset(new TestResponseGenerator(gurl, kDataSize + 1)); EXPECT_CALL(*this, OnInitialize(true)); data_source_->Initialize(base::Bind(&MultibufferDataSourceTest::OnInitialize, base::Unretained(this))); base::RunLoop().RunUntilIdle(); // Not really loading until after OnInitialize is called. EXPECT_EQ(data_source_->downloading(), false); EXPECT_CALL(host_, SetTotalBytes(response_generator_->content_length())); Respond(response_generator_->Generate206(0)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); ReceiveData(kDataSize); // Read a bit from the beginning. EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0); EXPECT_CALL(host_, AddBufferedByteRange(kDataSize, kDataSize + 1)); ReceiveData(1); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 3)); FinishLoading(); EXPECT_CALL(*this, ReadCallback(0)); ReadAt(kDataSize + 5, kDataSize * 2); Stop(); } TEST_F(MultibufferDataSourceTest, Http_RetryThenRedirect) { InitializeWith206Response(); // Read to advance our position. EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0); // Issue a pending read but trigger an error to force a retry. EXPECT_CALL(*this, ReadCallback(kDataSize - 10)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); ReadAt(kDataSize + 10, kDataSize - 10); base::RunLoop run_loop; data_provider()->didFail(url_loader(), response_generator_->GenerateError()); data_provider()->RunOnStart(run_loop.QuitClosure()); run_loop.Run(); // Server responds with a redirect. blink::WebURLRequest request((GURL(kHttpDifferentPathUrl))); blink::WebURLResponse response((GURL(kHttpUrl))); response.setHTTPStatusCode(307); data_provider()->willFollowRedirect(url_loader(), request, response, 0); Respond(response_generator_->Generate206(kDataSize)); ReceiveData(kDataSize); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 3)); FinishLoading(); EXPECT_FALSE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, Http_NotStreamingAfterRedirect) { Initialize(kHttpUrl, true); // Server responds with a redirect. blink::WebURLRequest request((GURL(kHttpDifferentPathUrl))); blink::WebURLResponse response((GURL(kHttpUrl))); response.setHTTPStatusCode(307); data_provider()->willFollowRedirect(url_loader(), request, response, 0); EXPECT_CALL(host_, SetTotalBytes(response_generator_->content_length())); Respond(response_generator_->Generate206(0)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); ReceiveData(kDataSize); EXPECT_FALSE(data_source_->IsStreaming()); FinishLoading(); EXPECT_FALSE(loading()); Stop(); } TEST_F(MultibufferDataSourceTest, Http_RangeNotSatisfiableAfterRedirect) { Initialize(kHttpUrl, true); // Server responds with a redirect. blink::WebURLRequest request((GURL(kHttpDifferentPathUrl))); blink::WebURLResponse response((GURL(kHttpUrl))); response.setHTTPStatusCode(307); data_provider()->willFollowRedirect(url_loader(), request, response, 0); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); Respond(response_generator_->GenerateResponse(416)); Stop(); } TEST_F(MultibufferDataSourceTest, LengthKnownAtEOF) { Initialize(kHttpUrl, true); // Server responds without content-length. WebURLResponse response = response_generator_->Generate200(); response.clearHTTPHeaderField(WebString::fromUTF8("Content-Length")); response.setExpectedContentLength(kPositionNotSpecified); Respond(response); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize)); ReceiveData(kDataSize); int64_t len; EXPECT_FALSE(data_source_->GetSize(&len)); EXPECT_TRUE(data_source_->IsStreaming()); EXPECT_CALL(*this, ReadCallback(kDataSize)); ReadAt(0); ReadAt(kDataSize); EXPECT_CALL(host_, SetTotalBytes(kDataSize)); EXPECT_CALL(*this, ReadCallback(0)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); FinishLoading(); // Done loading, now we should know the length. EXPECT_TRUE(data_source_->GetSize(&len)); EXPECT_EQ(kDataSize, len); Stop(); } TEST_F(MultibufferDataSourceTest, FileSizeLessThanBlockSize) { Initialize(kHttpUrl, true); GURL gurl(kHttpUrl); blink::WebURLResponse response(gurl); response.setHTTPStatusCode(200); response.setHTTPHeaderField( WebString::fromUTF8("Content-Length"), WebString::fromUTF8(base::Int64ToString(kDataSize / 2))); response.setExpectedContentLength(kDataSize / 2); Respond(response); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize / 2)); EXPECT_CALL(host_, SetTotalBytes(kDataSize / 2)); EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2)); ReceiveData(kDataSize / 2); FinishLoading(); int64_t len = 0; EXPECT_TRUE(data_source_->GetSize(&len)); EXPECT_EQ(kDataSize / 2, len); Stop(); } } // namespace media<|fim▁end|>
EXPECT_TRUE(loading()); EXPECT_TRUE(data_source_->IsStreaming()); Stop();
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/** * @license Apache-2.0 * * Copyright (c) 2022 The Stdlib Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; /** * Compute the hyperbolic cosecant of a number. * * @module @stdlib/math/base/special/csch * * @example * var csch = require( '@stdlib/math/base/special/csch' ); * * var v = csch( 0.0 ); * // returns Infinity * * v = csch( 2.0 ); * // returns ~0.2757 * * v = csch( -2.0 ); * // returns ~-0.2757 * * v = csch( NaN ); * // returns NaN */ // MODULES // var main = require( './main.js' ); <|fim▁hole|><|fim▁end|>
// EXPORTS // module.exports = main;
<|file_name|>LoginController.java<|end_file_name|><|fim▁begin|>package com.ues21.ferreteria.login; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.servlet.http.HttpSession; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import com.ues21.ferreteria.productos.Productos; import com.ues21.ferreteria.productos.ProductosDAO; import com.ues21.ferreteria.usuarios.Usuarios; import com.ues21.ferreteria.usuarios.UsuariosDAO; @Controller public class LoginController { @Autowired private LoginDAO loginDAO; @Autowired private UsuariosDAO usuariosDAO; /* @RequestMapping(value = "/login", method = RequestMethod.GET) public String listaHome(Model model) {<|fim▁hole|> model.addAttribute("login", null); return "login"; } */ @RequestMapping(value = "/login", method = RequestMethod.GET) public String viewRegistration(Map<String, Object> model) { Login userForm = new Login(); model.put("userForm", userForm); return "login"; } @RequestMapping(value = "/login", method = RequestMethod.POST) public String processRegistration(@ModelAttribute("userForm") Login user, Model model, HttpSession session) { // implement your own registration logic here... Login login = loginDAO.verificarUsuario(user); // for testing purpose: System.out.println("username: " + user.getDni()); System.out.println("password: " + user.getContrasena()); if (login==null){ model.addAttribute("loginError", "Error logging in. Please try again"); return "index"; } else { Usuarios usuario = usuariosDAO.getUsuario(user.getDni()); session.setAttribute("loggedInUser", usuario); return "home"; } } @RequestMapping(value = "/logout", method = RequestMethod.GET) public String logout(HttpSession session){ session.removeAttribute("loggedInUser"); return "index"; } }<|fim▁end|>
<|file_name|>RedisFactory.py<|end_file_name|><|fim▁begin|>from JumpScale9 import j redisFound = False try: from .Redis import Redis from .RedisQueue import RedisQueue from redis._compat import nativestr # import itertools import socket redisFound = True except: pass import os import time # import sys from JumpScale9 import tcpPortConnectionTest from JumpScale9.core.JSBase import JSBase as JSBASE class RedisFactory(JSBASE): """ """ def __init__(self): self.__jslocation__ = "j.clients.redis" JSBASE.__init__(self)<|fim▁hole|> def cache_clear(self): """ clear the cache formed by the functions get() and getQueue() """ self._redis = {} self._redisq = {} self._config = {} def get( self, ipaddr="localhost", port=6379, password="", fromcache=True, unixsocket=None, ardb_patch=False, set_patch=False, ssl=False, ssl_certfile=None, ssl_keyfile=None, **args): """ get an instance of redis client, store it in cache so we could easily retrieve it later :param ipaddr: used to form the key when no unixsocket :param port: used to form the key when no unixsocket :param fromcache: if False, will create a new one instead of checking cache :param unixsocket: path of unixsocket to be used while creating Redis other arguments to redis: ssl_cert_reqs=None, ssl_ca_certs=None set_patch is needed when using the client for gedis """ if redisFound == False: raise RuntimeError("redis libraries are not installed, please pip3 install them.") if unixsocket is None: key = "%s_%s" % (ipaddr, port) else: key = unixsocket if key not in self._redis or not fromcache: if unixsocket is None: self._redis[key] = Redis(ipaddr, port, password=password, ssl=ssl, ssl_certfile=ssl_certfile, ssl_keyfile=ssl_keyfile, **args) else: self._redis[key] = Redis(unix_socket_path=unixsocket, password=password, ssl=ssl, ssl_certfile=ssl_certfile, ssl_keyfile=ssl_keyfile, **args) if ardb_patch: self._ardb_patch(self._redis[key]) if set_patch: self._set_patch(self._redis[key]) return self._redis[key] def _ardb_patch(self, client): client.response_callbacks['HDEL'] = lambda r: r and nativestr(r) == 'OK' def _set_patch(self, client): client.response_callbacks['SET'] = lambda r: r def getQueue(self, ipaddr, port, name, namespace="queues", fromcache=True): """ get an instance of redis queue, store it in cache so we can easily retrieve it later :param ipaddr: used to form the key when no unixsocket :param port: used to form the key when no unixsocket :param name: name of the queue :param namespace: value of namespace for the queue :param fromcache: if False, will create a new one instead of checking cache """ if not fromcache: return RedisQueue(self.get(ipaddr, port, fromcache=False), name, namespace=namespace) key = "%s_%s_%s_%s" % (ipaddr, port, name, namespace) if key not in self._redisq: self._redisq[key] = RedisQueue( self.get(ipaddr, port), name, namespace=namespace) return self._redisq[key] def core_get(self): """ will try to create redis connection to $tmpdir/redis.sock if that doesn't work then will look for std redis port if that does not work then will return None j.clients.redis.core_get() """ unix_socket_path = '%s/redis.sock' % j.dirs.TMPDIR db = None if os.path.exists(path=unix_socket_path): db = Redis(unix_socket_path=unix_socket_path) else: self.core_start() db = Redis(unix_socket_path=unix_socket_path) return db def kill(self): """ kill all running redis instances """ j.sal.process.execute("redis-cli -s %s/redis.sock shutdown" % j.dirs.TMPDIR, die=False, showout=False) j.sal.process.execute("redis-cli shutdown", die=False, showout=False) j.sal.process.killall("redis") j.sal.process.killall("redis-server") def core_running(self): if self._running==None: self._running=j.sal.nettools.tcpPortConnectionTest("localhost",6379) return self._running def core_check(self): if not self.core_running(): self.core_start() return self._running def core_start(self, timeout=20): """ starts a redis instance in separate ProcessLookupError standard on $tmpdir/redis.sock """ if j.core.platformtype.myplatform.isMac: if not j.sal.process.checkInstalled("redis-server"): # prefab.system.package.install('redis') j.sal.process.execute("brew unlink redis", die=False) j.sal.process.execute("brew install redis;brew link redis") if not j.sal.process.checkInstalled("redis-server"): raise RuntimeError("Cannot find redis-server even after install") j.sal.process.execute("redis-cli -s %s/redis.sock shutdown" % j.dirs.TMPDIR, die=False, showout=False) j.sal.process.execute("redis-cli shutdown", die=False, showout=False) elif j.core.platformtype.myplatform.isLinux: if j.core.platformtype.myplatform.isAlpine: os.system("apk add redis") elif j.core.platformtype.myplatform.isUbuntu: os.system("apt install redis-server -y") else: raise RuntimeError("platform not supported for start redis") # cmd = "redis-server --port 6379 --unixsocket %s/redis.sock --maxmemory 100000000 --daemonize yes" % tmpdir # 100MB # self.logger.info("start redis in background (osx)") # os.system(cmd) # self.logger.info("started") # time.sleep(1) # elif j.core.platformtype.myplatform.isCygwin: # cmd = "redis-server --maxmemory 100000000 & " # self.logger.info("start redis in background (win)") # os.system(cmd) cmd = "echo never > /sys/kernel/mm/transparent_hugepage/enabled" os.system(cmd) if not j.core.platformtype.myplatform.isMac: cmd = "sysctl vm.overcommit_memory=1" os.system(cmd) # redis_bin = "redis-server" if "TMPDIR" in os.environ: tmpdir = os.environ["TMPDIR"] else: tmpdir = "/tmp" cmd = "redis-server --port 6379 --unixsocket %s/redis.sock --maxmemory 100000000 --daemonize yes" % tmpdir self.logger.info(cmd) j.sal.process.execute(cmd) limit_timeout = time.time() + timeout while time.time() < limit_timeout: if tcpPortConnectionTest("localhost", 6379): break time.sleep(2) else: raise j.exceptions.Timeout("Couldn't start redis server")<|fim▁end|>
self.cache_clear() self._running = None
<|file_name|>HeaderTransformer.ts<|end_file_name|><|fim▁begin|>import isUndefined from 'lodash.isundefined'; import isFunction from 'lodash.isfunction'; import uniq from 'lodash.uniq'; import groupBy from 'lodash.groupby'; import { ParserOptions } from '../ParserOptions'; import { HeaderArray, HeaderTransformFunction, Row, RowArray, RowMap, RowValidationResult, RowValidatorCallback, } from '../types'; export class HeaderTransformer<O extends Row> { private readonly parserOptions: ParserOptions; headers: HeaderArray | null = null; private receivedHeaders = false; private readonly shouldUseFirstRow: boolean = false;<|fim▁hole|> private headersLength = 0; private readonly headersTransform?: HeaderTransformFunction; public constructor(parserOptions: ParserOptions) { this.parserOptions = parserOptions; if (parserOptions.headers === true) { this.shouldUseFirstRow = true; } else if (Array.isArray(parserOptions.headers)) { this.setHeaders(parserOptions.headers); } else if (isFunction(parserOptions.headers)) { this.headersTransform = parserOptions.headers; } } public transform(row: RowArray, cb: RowValidatorCallback<O>): void { if (!this.shouldMapRow(row)) { return cb(null, { row: null, isValid: true }); } return cb(null, this.processRow(row)); } private shouldMapRow(row: Row): boolean { const { parserOptions } = this; if (!this.headersTransform && parserOptions.renameHeaders && !this.processedFirstRow) { if (!this.receivedHeaders) { throw new Error('Error renaming headers: new headers must be provided in an array'); } this.processedFirstRow = true; return false; } if (!this.receivedHeaders && Array.isArray(row)) { if (this.headersTransform) { this.setHeaders(this.headersTransform(row)); } else if (this.shouldUseFirstRow) { this.setHeaders(row); } else { // dont do anything with the headers if we didnt receive a transform or shouldnt use the first row. return true; } return false; } return true; } private processRow(row: RowArray<string>): RowValidationResult<O> { if (!this.headers) { return { row: (row as never) as O, isValid: true }; } const { parserOptions } = this; if (!parserOptions.discardUnmappedColumns && row.length > this.headersLength) { if (!parserOptions.strictColumnHandling) { throw new Error( `Unexpected Error: column header mismatch expected: ${this.headersLength} columns got: ${row.length}`, ); } return { row: (row as never) as O, isValid: false, reason: `Column header mismatch expected: ${this.headersLength} columns got: ${row.length}`, }; } if (parserOptions.strictColumnHandling && row.length < this.headersLength) { return { row: (row as never) as O, isValid: false, reason: `Column header mismatch expected: ${this.headersLength} columns got: ${row.length}`, }; } return { row: this.mapHeaders(row), isValid: true }; } private mapHeaders(row: RowArray<string>): O { const rowMap: RowMap = {}; const { headers, headersLength } = this; for (let i = 0; i < headersLength; i += 1) { const header = (headers as string[])[i]; if (!isUndefined(header)) { const val = row[i]; // eslint-disable-next-line no-param-reassign if (isUndefined(val)) { rowMap[header] = ''; } else { rowMap[header] = val; } } } return rowMap as O; } private setHeaders(headers: HeaderArray): void { const filteredHeaders = headers.filter((h) => !!h); if (uniq(filteredHeaders).length !== filteredHeaders.length) { const grouped = groupBy(filteredHeaders); const duplicates = Object.keys(grouped).filter((dup) => grouped[dup].length > 1); throw new Error(`Duplicate headers found ${JSON.stringify(duplicates)}`); } this.headers = headers; this.receivedHeaders = true; this.headersLength = this.headers?.length || 0; } }<|fim▁end|>
private processedFirstRow = false;
<|file_name|>ComparatorLambda.java<|end_file_name|><|fim▁begin|>package at.ltd.tools.fw.peer2peerFirewall.backend.entities.comparator; <|fim▁hole|> @FunctionalInterface public interface ComparatorLambda<T> { int comp(T t1, T t2); }<|fim▁end|>
<|file_name|>collection_artifact.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import import json from changes.config import db from changes.constants import Result from changes.models.jobplan import JobPlan from changes.utils.http import build_web_uri from .base import ArtifactHandler, ArtifactParseError class CollectionArtifactHandler(ArtifactHandler): """ Base class artifact handler for collection (jobs.json and tests.json) files. Does the required job expansion. Subclasses are expected to set cls.FILENAMES to the handleable files in question.<|fim▁hole|> except ValueError: uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex)) self.logger.warning('Failed to parse json; (step=%s, build=%s)', self.step.id.hex, uri, exc_info=True) self.report_malformed() else: _, implementation = JobPlan.get_build_step_for_job(job_id=self.step.job_id) try: implementation.expand_jobs(self.step, phase_config) except ArtifactParseError: uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex)) self.logger.warning('malformed %s artifact (step=%s, build=%s)', self.FILENAMES[0], self.step.id.hex, uri, exc_info=True) self.report_malformed() except Exception: uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex)) self.logger.warning('expand_jobs failed (step=%s, build=%s)', self.step.id.hex, uri, exc_info=True) self.step.result = Result.infra_failed db.session.add(self.step) db.session.commit() class TestsJsonHandler(CollectionArtifactHandler): # only match in the root directory FILENAMES = ('/tests.json',)<|fim▁end|>
""" def process(self, fp, artifact): try: phase_config = json.load(fp)
<|file_name|>TableViewEditPart.java<|end_file_name|><|fim▁begin|>// LICENSE package com.forgedui.editor.edit; import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.List; import org.eclipse.draw2d.geometry.Rectangle; import org.eclipse.gef.commands.Command; import org.eclipse.gef.requests.CreateRequest; import com.forgedui.editor.GUIEditorPlugin; import com.forgedui.editor.edit.command.AddToTableViewElementCommand; import com.forgedui.editor.edit.policy.ContainerEditPolicy; import com.forgedui.editor.figures.TableViewFigure; import com.forgedui.model.titanium.SearchBar; import com.forgedui.model.titanium.TableView; import com.forgedui.model.titanium.TableViewRow; import com.forgedui.model.titanium.TableViewSection; import com.forgedui.model.titanium.TitaniumUIBoundedElement; import com.forgedui.model.titanium.TitaniumUIElement; /** * @author Dmitry {[email protected]} * */ public class TableViewEditPart extends TitaniumContainerEditPart<TableView> { @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public List<?> getModelChildren_() {<|fim▁hole|> list.add(model.getHeaderView()); } if (model.getFooterView() != null){ list.add(model.getFooterView()); } if ((model.getSearchHidden() == null || !model.getSearchHidden()) && model.getSearch() != null){ list.add(model.getSearch()); } return list; } /** * Making sure to refresh things visual. */ @Override public void propertyChange(PropertyChangeEvent evt) { final String propName = evt.getPropertyName(); if (TableView.PROP_HEADER_VIEW.equals(propName) || TableView.PROP_FOOTER_VIEW.equals(propName) || TableView.PROP_SEARCH_VIEW.equals(propName) || TableView.PROP_SEARCH_VIEW_HIDDEN.equals(propName) || TableView.PROP_MIN_ROW_HEIGHT.equals(propName) || TableView.PROP_MAX_ROW_HEIGHT.equals(propName) ) { refresh(); } else { super.propertyChange(evt); } } @Override protected void createEditPolicies() { super.createEditPolicies(); installEditPolicy(ContainerEditPolicy.KEY, new TableViewEditPolicy()); } @Override protected void refreshVisuals() { TableView model = (TableView)getModel(); TableViewFigure figure = (TableViewFigure)getFigure(); figure.setHeaderTitle(model.getHeaderTitle()); figure.setFooterTitle(model.getFooterTitle()); figure.setHasHeaderView(model.getHeaderView() != null); figure.setHasFooterView(model.getFooterView() != null); super.refreshVisuals(); } } class TableViewEditPolicy extends ContainerEditPolicy { protected Command getCreateCommand(CreateRequest request) { // And then passed those to the validate facility. Object newObject = request.getNewObject(); Object container = getHost().getModel(); if (!GUIEditorPlugin.getComponentValidator().validate(newObject, container)) return null; if (!(newObject instanceof TableViewRow) && !(newObject instanceof TableViewSection) && newObject instanceof TitaniumUIElement){ Rectangle r = (Rectangle)getConstraintFor(request); if (r != null){ TitaniumUIBoundedElement child = (TitaniumUIBoundedElement) newObject; if (container instanceof TableView){ TableView view = (TableView) getHost().getModel(); if (child instanceof SearchBar && view.getSearch() == null){ return new AddToTableViewElementCommand(view, child, r, true); } else if (GUIEditorPlugin.getComponentValidator().isView(child)){ if (r.y <= view.getDimension().height / 2){ if (view.getHeaderView() == null){ return new AddToTableViewElementCommand(view, child, r, true); } } else if (view.getFooterView() == null){ return new AddToTableViewElementCommand(view, child, r, false); } } return null;//Can't drop } } } return super.getCreateCommand(request); } /*@Override protected Object getConstraintFor(CreateRequest request) { Rectangle r = (Rectangle) super.getConstraintFor(request); r.x = 0; return r; }*/ }<|fim▁end|>
List list = new ArrayList(super.getModelChildren_()); TableView model = (TableView)getModel(); if (model.getHeaderView() != null){
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! IO related functions <|fim▁hole|>pub mod arguments; pub mod constants; pub mod events; pub mod graphics; pub mod sound;<|fim▁end|>
pub mod applicationsettings; pub mod applicationstate;
<|file_name|>checkout.js<|end_file_name|><|fim▁begin|>var express = require('express') var braintree = require('braintree') var router = express.Router() // eslint-disable-line new-cap var gateway = require('../lib/gateway') var TRANSACTION_SUCCESS_STATUSES = [ braintree.Transaction.Status.Authorizing, braintree.Transaction.Status.Authorized, braintree.Transaction.Status.Settled, braintree.Transaction.Status.Settling, braintree.Transaction.Status.SettlementConfirmed, braintree.Transaction.Status.SettlementPending,<|fim▁hole|>] function formatErrors(errors) { var formattedErrors = '' for (var i in errors) { // eslint-disable-line no-inner-declarations, vars-on-top if (errors.hasOwnProperty(i)) { formattedErrors += 'Error: ' + errors[i].code + ': ' + errors[i].message + '\n' } } return formattedErrors } function createResultObject(transaction) { var result var status = transaction.status if (TRANSACTION_SUCCESS_STATUSES.indexOf(status) !== -1) { result = { header: 'Sweet Success!', icon: 'success', message: 'Your test transaction has been successfully processed. See the Braintree API response and try again.' } } else { result = { header: 'Transaction Failed', icon: 'fail', message: 'Your test transaction has a status of ' + status + '. See the Braintree API response and try again.' } } return result } router.get('/', function (req, res) { res.redirect('/checkouts/new') }) router.get('/checkouts/new', function (req, res) { gateway.clientToken.generate({}, function (err, response) { res.render('checkouts/new', {clientToken: response.clientToken, messages: req.flash('error')}) }) }) router.get('/checkouts/:id', function (req, res) { var result var transactionId = req.params.id gateway.transaction.find(transactionId, function (err, transaction) { result = createResultObject(transaction) res.render('checkouts/show', {transaction: transaction, result: result}) }) }) router.post('/checkouts', function (req, res) { var transactionErrors var amount = req.body.amount // In production you should not take amounts directly from clients var nonce = req.body.payment_method_nonce gateway.transaction.sale({ amount: amount, paymentMethodNonce: nonce, customer: { firstName: req.body.firstName, lastName: req.body.lastName, email: req.body.email }, options: { submitForSettlement: true, storeInVaultOnSuccess: true } }, function (err, result) { if (result.success || result.transaction) { res.redirect('checkouts/' + result.transaction.id) } else { transactionErrors = result.errors.deepErrors() req.flash('error', {msg: formatErrors(transactionErrors)}) res.redirect('checkouts/new') } }) }) module.exports = router<|fim▁end|>
braintree.Transaction.Status.SubmittedForSettlement
<|file_name|>sin.rs<|end_file_name|><|fim▁begin|>//! Implements vertical (lane-wise) floating-point `sin`. macro_rules! impl_math_float_sin { ([$elem_ty:ident; $elem_count:expr]: $id:ident | $test_tt:tt) => { impl $id { /// Sine. #[inline] pub fn sin(self) -> Self { use crate::codegen::math::float::sin::Sin; Sin::sin(self) } /// Sine of `self * PI`. #[inline] pub fn sin_pi(self) -> Self { use crate::codegen::math::float::sin_pi::SinPi; SinPi::sin_pi(self) } <|fim▁hole|> pub fn sin_cos_pi(self) -> (Self, Self) { use crate::codegen::math::float::sin_cos_pi::SinCosPi; SinCosPi::sin_cos_pi(self) } } test_if!{ $test_tt: paste::item! { pub mod [<$id _math_sin>] { use super::*; #[cfg_attr(not(target_arch = "wasm32"), test)] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)] fn sin() { use crate::$elem_ty::consts::PI; let z = $id::splat(0 as $elem_ty); let p = $id::splat(PI as $elem_ty); let ph = $id::splat(PI as $elem_ty / 2.); let o_r = $id::splat((PI as $elem_ty / 2.).sin()); let z_r = $id::splat((PI as $elem_ty).sin()); assert_eq!(z, z.sin()); assert_eq!(o_r, ph.sin()); assert_eq!(z_r, p.sin()); } } } } }; }<|fim▁end|>
/// Sine and cosine of `self * PI`. #[inline]
<|file_name|>write.ts<|end_file_name|><|fim▁begin|>import { Nibble, UInt4 } from '../types' /** * Returns a Nibble (0-15) which equals the given bits. * * @example * byte.write([1,0,1,0]) => 10 * * @param {Array} nibble 4-bit unsigned integer * @return {Number} */ export default (nibble: Nibble): UInt4 => { if (!Array.isArray(nibble) || nibble.length !== 4) throw new RangeError('invalid array length') let result: UInt4 = 0 for (let i: number = 0; i < 4; i++) if (nibble[3 - i]) result |= 1 << i <|fim▁hole|><|fim▁end|>
return <UInt4>result }
<|file_name|>trait-inheritance-auto.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Testing that this impl turns A into a Quux, because // A is already a Foo Bar Baz impl<T:Foo + Bar + Baz> Quux for T { } trait Foo { fn f(&self) -> int; } trait Bar { fn g(&self) -> int; } trait Baz { fn h(&self) -> int; }<|fim▁hole|>trait Quux: Foo + Bar + Baz { } struct A { x: int } impl Foo for A { fn f(&self) -> int { 10 } } impl Bar for A { fn g(&self) -> int { 20 } } impl Baz for A { fn h(&self) -> int { 30 } } fn f<T:Quux>(a: &T) { assert!(a.f() == 10); assert!(a.g() == 20); assert!(a.h() == 30); } pub fn main() { let a = &A { x: 3 }; f(a); }<|fim▁end|>
<|file_name|>traversal.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use context::{SharedStyleContext, StyleContext}; use dom::{OpaqueNode, TNode, TRestyleDamage, UnsafeNode}; use matching::{ApplicableDeclarations, ElementMatchMethods, MatchMethods, StyleSharingResult}; use selector_impl::SelectorImplExt; use selectors::Element; use selectors::bloom::BloomFilter; use std::cell::RefCell; use util::opts; use util::tid::tid; /// Every time we do another layout, the old bloom filters are invalid. This is /// detected by ticking a generation number every layout. pub type Generation = u32; /// A pair of the bloom filter used for css selector matching, and the node to /// which it applies. This is used to efficiently do `Descendant` selector /// matches. Thanks to the bloom filter, we can avoid walking up the tree /// looking for ancestors that aren't there in the majority of cases. /// /// As we walk down the DOM tree a thread-local bloom filter is built of all the /// CSS `SimpleSelector`s which are part of a `Descendant` compound selector /// (i.e. paired with a `Descendant` combinator, in the `next` field of a /// `CompoundSelector`. /// /// Before a `Descendant` selector match is tried, it's compared against the /// bloom filter. If the bloom filter can exclude it, the selector is quickly /// rejected. /// /// When done styling a node, all selectors previously inserted into the filter /// are removed. /// /// Since a work-stealing queue is used for styling, sometimes, the bloom filter /// will no longer be the for the parent of the node we're currently on. When /// this happens, the thread local bloom filter will be thrown away and rebuilt. thread_local!( pub static STYLE_BLOOM: RefCell<Option<(Box<BloomFilter>, UnsafeNode, Generation)>> = RefCell::new(None)); /// Returns the thread local bloom filter. /// /// If one does not exist, a new one will be made for you. If it is out of date, /// it will be cleared and reused. fn take_thread_local_bloom_filter<N, Impl: SelectorImplExt>(parent_node: Option<N>, root: OpaqueNode, context: &SharedStyleContext<Impl>) -> Box<BloomFilter> where N: TNode { STYLE_BLOOM.with(|style_bloom| { match (parent_node, style_bloom.borrow_mut().take()) { // Root node. Needs new bloom filter. (None, _ ) => { debug!("[{}] No parent, but new bloom filter!", tid()); Box::new(BloomFilter::new()) } // No bloom filter for this thread yet. (Some(parent), None) => { let mut bloom_filter = Box::new(BloomFilter::new()); insert_ancestors_into_bloom_filter(&mut bloom_filter, parent, root); bloom_filter } // Found cached bloom filter. (Some(parent), Some((mut bloom_filter, old_node, old_generation))) => { if old_node == parent.to_unsafe() && old_generation == context.generation { // Hey, the cached parent is our parent! We can reuse the bloom filter. debug!("[{}] Parent matches (={}). Reusing bloom filter.", tid(), old_node.0); } else { // Oh no. the cached parent is stale. I guess we need a new one. Reuse the existing // allocation to avoid malloc churn. bloom_filter.clear(); insert_ancestors_into_bloom_filter(&mut bloom_filter, parent, root); } bloom_filter }, } }) } pub fn put_thread_local_bloom_filter<Impl: SelectorImplExt>(bf: Box<BloomFilter>, unsafe_node: &UnsafeNode, context: &SharedStyleContext<Impl>) { STYLE_BLOOM.with(move |style_bloom| { assert!(style_bloom.borrow().is_none(),<|fim▁hole|> /// "Ancestors" in this context is inclusive of ourselves. fn insert_ancestors_into_bloom_filter<N>(bf: &mut Box<BloomFilter>, mut n: N, root: OpaqueNode) where N: TNode { debug!("[{}] Inserting ancestors.", tid()); let mut ancestors = 0; loop { ancestors += 1; n.insert_into_bloom_filter(&mut **bf); n = match n.layout_parent_node(root) { None => break, Some(p) => p, }; } debug!("[{}] Inserted {} ancestors.", tid(), ancestors); } pub trait DomTraversalContext<N: TNode> { type SharedContext: Sync + 'static; fn new<'a>(&'a Self::SharedContext, OpaqueNode) -> Self; fn process_preorder(&self, node: N); fn process_postorder(&self, node: N); } /// The recalc-style-for-node traversal, which styles each node and must run before /// layout computation. This computes the styles applied to each node. #[inline] #[allow(unsafe_code)] pub fn recalc_style_at<'a, N, C>(context: &'a C, root: OpaqueNode, node: N) where N: TNode, C: StyleContext<'a, <N::ConcreteElement as Element>::Impl>, <N::ConcreteElement as Element>::Impl: SelectorImplExt<ComputedValues=N::ConcreteComputedValues> + 'a { // Get the parent node. let parent_opt = match node.parent_node() { Some(parent) if parent.is_element() => Some(parent), _ => None, }; // Get the style bloom filter. let mut bf = take_thread_local_bloom_filter(parent_opt, root, context.shared_context()); let nonincremental_layout = opts::get().nonincremental_layout; if nonincremental_layout || node.is_dirty() { // Remove existing CSS styles from nodes whose content has changed (e.g. text changed), // to force non-incremental reflow. if node.has_changed() { node.unstyle(); } // Check to see whether we can share a style with someone. let style_sharing_candidate_cache = &mut context.local_context().style_sharing_candidate_cache.borrow_mut(); let sharing_result = match node.as_element() { Some(element) => { unsafe { element.share_style_if_possible(style_sharing_candidate_cache, parent_opt.clone()) } }, None => StyleSharingResult::CannotShare, }; // Otherwise, match and cascade selectors. match sharing_result { StyleSharingResult::CannotShare => { let mut applicable_declarations = ApplicableDeclarations::new(); let shareable_element = match node.as_element() { Some(element) => { // Perform the CSS selector matching. let stylist = &context.shared_context().stylist; if element.match_element(&**stylist, Some(&*bf), &mut applicable_declarations) { Some(element) } else { None } }, None => { if node.has_changed() { node.set_restyle_damage(N::ConcreteRestyleDamage::rebuild_and_reflow()) } None }, }; // Perform the CSS cascade. unsafe { node.cascade_node(&context.shared_context(), parent_opt, &applicable_declarations, &mut context.local_context().applicable_declarations_cache.borrow_mut(), &context.shared_context().new_animations_sender); } // Add ourselves to the LRU cache. if let Some(element) = shareable_element { style_sharing_candidate_cache.insert_if_possible::<'ln, N>(&element); } } StyleSharingResult::StyleWasShared(index, damage) => { style_sharing_candidate_cache.touch(index); node.set_restyle_damage(damage); } } } let unsafe_layout_node = node.to_unsafe(); // Before running the children, we need to insert our nodes into the bloom // filter. debug!("[{}] + {:X}", tid(), unsafe_layout_node.0); node.insert_into_bloom_filter(&mut *bf); // NB: flow construction updates the bloom filter on the way up. put_thread_local_bloom_filter(bf, &unsafe_layout_node, context.shared_context()); }<|fim▁end|>
"Putting into a never-taken thread-local bloom filter"); *style_bloom.borrow_mut() = Some((bf, *unsafe_node, context.generation)); }) }
<|file_name|>getcbsperiod.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2009 Lund University * * Written by Anton Cervin, Dan Henriksson and Martin Ohlin, * Department of Automatic Control LTH, Lund University, Sweden. * * This file is part of Truetime 2.0 beta. * * Truetime 2.0 beta is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. *<|fim▁hole|> * without any warranty; without even the implied warranty of * merchantability or fitness for a particular purpose. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Truetime 2.0 beta. If not, see <http://www.gnu.org/licenses/> */ #ifndef GET_CBS_PERIOD #define GET_CBS_PERIOD #include "getnode.cpp" double ttGetCBSPeriod(const char *name) { DataNode *dn; dn = (DataNode*) getNode(name, rtsys->cbsList); if (dn == NULL) { char buf[MAXERRBUF]; sprintf(buf, "ttGetCBSPeriod: Non-existent task '%s'", name); TT_MEX_ERROR(buf); return 0.0; } CBS* cbs = (CBS*) dn->data; return cbs->Ts; } #endif<|fim▁end|>
* Truetime 2.0 beta is distributed in the hope that it will be useful, but
<|file_name|>0013_show2.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations<|fim▁hole|> class Migration(migrations.Migration): dependencies = [ ('setlist', '0012_remove_show_leg'), ] operations = [ migrations.CreateModel( name='Show2', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('venue', models.ForeignKey(to='setlist.Venue', to_field='id')), ('tour', models.ForeignKey(to='setlist.Tour', to_field='id')), ('date', models.DateField(db_index=True)), ('setlist', models.TextField(default=b'', blank=True)), ('notes', models.TextField(default=b'', blank=True)), ('source', models.TextField(default=b'', blank=True)), ], options={ }, bases=(models.Model,), ), ]<|fim▁end|>
<|file_name|>style.rs<|end_file_name|><|fim▁begin|>use std::borrow::Cow; use std::collections::HashMap; use std::fmt::{self, Write}; use std::mem; use console::{measure_text_width, Style}; #[cfg(feature = "improved_unicode")] use unicode_segmentation::UnicodeSegmentation; use crate::format::{ BinaryBytes, DecimalBytes, FormattedDuration, HumanBytes, HumanCount, HumanDuration, }; use crate::state::ProgressState; /// Controls the rendering style of progress bars #[derive(Clone)] pub struct ProgressStyle { tick_strings: Vec<Box<str>>, progress_chars: Vec<Box<str>>, template: Template, on_finish: ProgressFinish, // how unicode-big each char in progress_chars is char_width: usize, format_map: FormatMap, } #[cfg(feature = "improved_unicode")] fn segment(s: &str) -> Vec<Box<str>> { UnicodeSegmentation::graphemes(s, true) .map(|s| s.into()) .collect() } #[cfg(not(feature = "improved_unicode"))] fn segment(s: &str) -> Vec<Box<str>> { s.chars().map(|x| x.to_string().into()).collect() } #[cfg(feature = "improved_unicode")] fn measure(s: &str) -> usize { unicode_width::UnicodeWidthStr::width(s) } #[cfg(not(feature = "improved_unicode"))] fn measure(s: &str) -> usize { s.chars().count() } /// finds the unicode-aware width of the passed grapheme cluters /// panics on an empty parameter, or if the characters are not equal-width fn width(c: &[Box<str>]) -> usize { c.iter() .map(|s| measure(s.as_ref())) .fold(None, |acc, new| { match acc { None => return Some(new), Some(old) => assert_eq!(old, new, "got passed un-equal width progress characters"), } acc }) .unwrap() } impl ProgressStyle { /// Returns the default progress bar style for bars pub fn default_bar() -> ProgressStyle { Self::new("{wide_bar} {pos}/{len}") } /// Returns the default progress bar style for spinners pub fn default_spinner() -> Self { Self::new("{spinner} {msg}") } fn new(template: &str) -> Self { let progress_chars = segment("█░"); let char_width = width(&progress_chars); ProgressStyle { tick_strings: "⠁⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈⠈ " .chars() .map(|c| c.to_string().into()) .collect(), progress_chars, char_width, template: Template::from_str(template), on_finish: ProgressFinish::default(), format_map: FormatMap::default(), } } /// Sets the tick character sequence for spinners pub fn tick_chars(mut self, s: &str) -> ProgressStyle { self.tick_strings = s.chars().map(|c| c.to_string().into()).collect(); // Format bar will panic with some potentially confusing message, better to panic here // with a message explicitly informing of the problem assert!( self.tick_strings.len() >= 2, "at least 2 tick chars required" ); self } /// Sets the tick string sequence for spinners pub fn tick_strings(mut self, s: &[&str]) -> ProgressStyle { self.tick_strings = s.iter().map(|s| s.to_string().into()).collect(); // Format bar will panic with some potentially confusing message, better to panic here // with a message explicitly informing of the problem assert!( self.progress_chars.len() >= 2, "at least 2 tick strings required" ); self } /// Sets the progress characters `(filled, current, to do)` /// /// You can pass more than three for a more detailed display. /// All passed grapheme clusters need to be of equal width. pub fn progress_chars(mut self, s: &str) -> ProgressStyle { self.progress_chars = segment(s); // Format bar will panic with some potentially confusing message, better to panic here // with a message explicitly informing of the problem assert!( self.progress_chars.len() >= 2, "at least 2 progress chars required" ); self.char_width = width(&self.progress_chars); self } /// Adds a custom key that references a `&ProgressState` to the template pub fn with_key(mut self, key: &'static str, f: Format) -> ProgressStyle { self.format_map.0.insert(key, f); self } /// Sets the template string for the progress bar /// /// Review the [list of template keys](./index.html#templates) for more information. pub fn template(mut self, s: &str) -> ProgressStyle { self.template = Template::from_str(s); self } /// Sets the finish behavior for the progress bar /// /// This behavior is invoked when [`ProgressBar`] or /// [`ProgressBarIter`] completes and /// [`ProgressBar::is_finished()`] is false. /// If you don't want the progress bar to be automatically finished then /// call `on_finish(None)`. /// /// [`ProgressBar`]: crate::ProgressBar /// [`ProgressBarIter`]: crate::ProgressBarIter /// [`ProgressBar::is_finished()`]: crate::ProgressBar::is_finished pub fn on_finish(mut self, finish: ProgressFinish) -> ProgressStyle { self.on_finish = finish; self } pub(crate) fn current_tick_str(&self, state: &ProgressState) -> &str { match state.is_finished() { true => self.get_final_tick_str(), false => self.get_tick_str(state.tick), } } /// Returns the tick string for a given number pub fn get_tick_str(&self, idx: u64) -> &str { &self.tick_strings[(idx as usize) % (self.tick_strings.len() - 1)] } /// Returns the tick string for the finished state pub fn get_final_tick_str(&self) -> &str { &self.tick_strings[self.tick_strings.len() - 1] } /// Returns the finish behavior pub fn get_on_finish(&self) -> &ProgressFinish { &self.on_finish } fn format_bar(&self, fract: f32, width: usize, alt_style: Option<&Style>) -> BarDisplay<'_> { // The number of clusters from progress_chars to write (rounding down). let width = width / self.char_width; // The number of full clusters (including a fractional component for a partially-full one). let fill = fract * width as f32; // The number of entirely full clusters (by truncating `fill`). let entirely_filled = fill as usize; // 1 if the bar is not entirely empty or full (meaning we need to draw the "current" // character between the filled and "to do" segment), 0 otherwise. let head = if fill > 0.0 && entirely_filled < width { 1 } else { 0 }; let cur = if head == 1 { // Number of fine-grained progress entries in progress_chars. let n = self.progress_chars.len().saturating_sub(2); let cur_char = if n <= 1 { // No fine-grained entries. 1 is the single "current" entry if we have one, the "to // do" entry if not. 1 } else { // Pick a fine-grained entry, ranging from the last one (n) if the fractional part // of fill is 0 to the first one (1) if the fractional part of fill is almost 1. n.saturating_sub((fill.fract() * n as f32) as usize) }; Some(cur_char) } else { None }; // Number of entirely empty clusters needed to fill the bar up to `width`. let bg = width.saturating_sub(entirely_filled).saturating_sub(head); let rest = RepeatedStringDisplay { str: &self.progress_chars[self.progress_chars.len() - 1], num: bg, }; BarDisplay { chars: &self.progress_chars, filled: entirely_filled, cur, rest: alt_style.unwrap_or(&Style::new()).apply_to(rest), } } pub(crate) fn format_state(&self, state: &ProgressState) -> Vec<String> { let mut cur = String::new(); let mut buf = String::new(); let mut rv = vec![]; let mut wide = None; for part in &self.template.parts { match part { TemplatePart::Placeholder { key, align, width, truncate, style, alt_style, } => { buf.clear(); if let Some(formatter) = self.format_map.0.get(key.as_str()) { buf.push_str(&formatter(state)); } else { match key.as_str() { "wide_bar" => { wide = Some(WideElement::Bar { alt_style }); buf.push('\x00'); } "bar" => buf .write_fmt(format_args!( "{}", self.format_bar( state.fraction(), width.unwrap_or(20) as usize, alt_style.as_ref(), ) )) .unwrap(), "spinner" => buf.push_str(state.current_tick_str()), "wide_msg" => { wide = Some(WideElement::Message { align }); buf.push('\x00'); } "msg" => buf.push_str(state.message()), "prefix" => buf.push_str(state.prefix()), "pos" => buf.write_fmt(format_args!("{}", state.pos)).unwrap(), "human_pos" => buf .write_fmt(format_args!("{}", HumanCount(state.pos))) .unwrap(), "len" => buf.write_fmt(format_args!("{}", state.len)).unwrap(), "human_len" => buf .write_fmt(format_args!("{}", HumanCount(state.len))) .unwrap(), "percent" => buf .write_fmt(format_args!("{:.*}", 0, state.fraction() * 100f32)) .unwrap(), "bytes" => buf .write_fmt(format_args!("{}", HumanBytes(state.pos))) .unwrap(), "total_bytes" => buf .write_fmt(format_args!("{}", HumanBytes(state.len))) .unwrap(), "decimal_bytes" => buf .write_fmt(format_args!("{}", DecimalBytes(state.pos))) .unwrap(), "decimal_total_bytes" => buf .write_fmt(format_args!("{}", DecimalBytes(state.len))) .unwrap(), "binary_bytes" => buf .write_fmt(format_args!("{}", BinaryBytes(state.pos))) .unwrap(), "binary_total_bytes" => buf .write_fmt(format_args!("{}", BinaryBytes(state.len))) .unwrap(), "elapsed_precise" => buf .write_fmt(format_args!( "{}", FormattedDuration(state.started.elapsed()) )) .unwrap(), "elapsed" => buf .write_fmt(format_args!( "{:#}", HumanDuration(state.started.elapsed()) )) .unwrap(), "per_sec" => buf .write_fmt(format_args!("{:.4}/s", state.per_sec())) .unwrap(), "bytes_per_sec" => buf .write_fmt(format_args!("{}/s", HumanBytes(state.per_sec() as u64))) .unwrap(), "binary_bytes_per_sec" => buf .write_fmt(format_args!( "{}/s", BinaryBytes(state.per_sec() as u64) )) .unwrap(), "eta_precise" => buf .write_fmt(format_args!("{}", FormattedDuration(state.eta()))) .unwrap(), "eta" => buf .write_fmt(format_args!("{:#}", HumanDuration(state.eta()))) .unwrap(), "duration_precise" => buf .write_fmt(format_args!("{}", FormattedDuration(state.duration()))) .unwrap(), "duration" => buf .write_fmt(format_args!("{:#}", HumanDuration(state.duration()))) .unwrap(), _ => (), } }; match width { Some(width) => { let padded = PaddedStringDisplay { str: &buf, width: *width as usize, align: *align, truncate: *truncate, }; match style { Some(s) => cur .write_fmt(format_args!("{}", s.apply_to(padded))) .unwrap(), None => cur.write_fmt(format_args!("{}", padded)).unwrap(), } } None => match style { Some(s) => cur.write_fmt(format_args!("{}", s.apply_to(&buf))).unwrap(), None => cur.push_str(&buf), }, } } TemplatePart::Literal(s) => cur.push_str(s), TemplatePart::NewLine => rv.push(match wide { Some(inner) => inner.expand(mem::take(&mut cur), self, state, &mut buf), None => mem::take(&mut cur), }), } } if !cur.is_empty() { rv.push(match wide { Some(inner) => inner.expand(mem::take(&mut cur), self, state, &mut buf), None => mem::take(&mut cur), }) } rv } } #[derive(Clone, Copy)] enum WideElement<'a> { Bar { alt_style: &'a Option<Style> }, Message { align: &'a Alignment }, } impl<'a> WideElement<'a> { fn expand( self, cur: String, style: &ProgressStyle, state: &ProgressState, buf: &mut String, ) -> String { let left = state.width().saturating_sub(measure_text_width(&cur)); match self { Self::Bar { alt_style } => cur.replace( "\x00", &format!( "{}", style.format_bar(state.fraction(), left, alt_style.as_ref()) ), ), WideElement::Message { align } => { buf.clear(); buf.write_fmt(format_args!( "{}", PaddedStringDisplay { str: state.message(), width: left, align: *align, truncate: true, } )) .unwrap(); let trimmed = match cur.as_bytes().last() == Some(&b'\x00') { true => buf.trim_end(), false => buf, }; cur.replace("\x00", trimmed) } } } } #[derive(Clone, Debug)] struct Template { parts: Vec<TemplatePart>, } impl Template { fn from_str(s: &str) -> Self { use State::*; let (mut state, mut parts, mut buf) = (Literal, vec![], String::new()); for c in s.chars() { let new = match (state, c) { (Literal, '{') => (MaybeOpen, None), (Literal, '\n') => { if !buf.is_empty() { parts.push(TemplatePart::Literal(mem::take(&mut buf))); } parts.push(TemplatePart::NewLine); (Literal, None) } (Literal, '}') => (DoubleClose, Some('}')), (Literal, c) => (Literal, Some(c)), (DoubleClose, '}') => (Literal, None), (MaybeOpen, '{') => (Literal, Some('{')), (MaybeOpen, c) | (Key, c) if c.is_ascii_whitespace() => { // If we find whitespace where the variable key is supposed to go, // backtrack and act as if this was a literal. buf.push(c); let mut new = String::from("{"); new.push_str(&buf); buf.clear(); parts.push(TemplatePart::Literal(new)); (Literal, None) } (MaybeOpen, c) if c != '}' && c != ':' => (Key, Some(c)), (Key, c) if c != '}' && c != ':' => (Key, Some(c)), (Key, ':') => (Align, None), (Key, '}') => (Literal, None), (Key, '!') if !buf.is_empty() => { parts.push(TemplatePart::Placeholder { key: mem::take(&mut buf), align: Alignment::Left, width: None, truncate: true, style: None, alt_style: None, }); (Width, None) } (Align, c) if c == '<' || c == '^' || c == '>' => { if let Some(TemplatePart::Placeholder { align, .. }) = parts.last_mut() { match c { '<' => *align = Alignment::Left, '^' => *align = Alignment::Center, '>' => *align = Alignment::Right, _ => (), } } (Width, None) } (Align, c @ '0'..='9') => (Width, Some(c)), (Align, '!') | (Width, '!') => { if let Some(TemplatePart::Placeholder { truncate, .. }) = parts.last_mut() { *truncate = true; } (Width, None) } (Align, '.') => (FirstStyle, None), (Align, '}') => (Literal, None), (Width, c @ '0'..='9') => (Width, Some(c)), (Width, '.') => (FirstStyle, None), (Width, '}') => (Literal, None), (FirstStyle, '/') => (AltStyle, None), (FirstStyle, '}') => (Literal, None), (FirstStyle, c) => (FirstStyle, Some(c)), (AltStyle, '}') => (Literal, None), (AltStyle, c) => (AltStyle, Some(c)), (st, c) => panic!("unreachable state: {:?} @ {:?}", c, st), }; match (state, new.0) { (MaybeOpen, Key) if !buf.is_empty() => { parts.push(TemplatePart::Literal(mem::take(&mut buf))) } (Key, Align) | (Key, Literal) if !buf.is_empty() => { parts.push(TemplatePart::Placeholder { key: mem::take(&mut buf), align: Alignment::Left, width: None, truncate: false, style: None, alt_style: None, }) } (Width, FirstStyle) | (Width, Literal) if !buf.is_empty() => { if let Some(TemplatePart::Placeholder { width, .. }) = parts.last_mut() { *width = Some(buf.parse().unwrap()); buf.clear(); } } (FirstStyle, AltStyle) | (FirstStyle, Literal) if !buf.is_empty() => { if let Some(TemplatePart::Placeholder { style, .. }) = parts.last_mut() { *style = Some(Style::from_dotted_str(&buf)); buf.clear(); } } (AltStyle, Literal) if !buf.is_empty() => { if let Some(TemplatePart::Placeholder { alt_style, .. }) = parts.last_mut() { *alt_style = Some(Style::from_dotted_str(&buf)); buf.clear(); } } (_, _) => (), } state = new.0; if let Some(c) = new.1 { buf.push(c); } } if matches!(state, Literal | DoubleClose) && !buf.is_empty() { parts.push(TemplatePart::Literal(buf)); } Self { parts } } } #[derive(Clone, Debug, PartialEq)] enum TemplatePart { Literal(String), Placeholder { key: String, align: Alignment, width: Option<u16>, truncate: bool, style: Option<Style>, alt_style: Option<Style>, }, NewLine, }<|fim▁hole|>enum State { Literal, MaybeOpen, DoubleClose, Key, Align, Width, FirstStyle, AltStyle, } struct BarDisplay<'a> { chars: &'a [Box<str>], filled: usize, cur: Option<usize>, rest: console::StyledObject<RepeatedStringDisplay<'a>>, } impl<'a> fmt::Display for BarDisplay<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for _ in 0..self.filled { f.write_str(&self.chars[0])?; } if let Some(cur) = self.cur { f.write_str(&self.chars[cur])?; } self.rest.fmt(f) } } struct RepeatedStringDisplay<'a> { str: &'a str, num: usize, } impl<'a> fmt::Display for RepeatedStringDisplay<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for _ in 0..self.num { f.write_str(self.str)?; } Ok(()) } } struct PaddedStringDisplay<'a> { str: &'a str, width: usize, align: Alignment, truncate: bool, } impl<'a> fmt::Display for PaddedStringDisplay<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let cols = measure_text_width(self.str); if cols >= self.width { return match self.truncate { true => f.write_str(self.str.get(..self.width).unwrap_or(self.str)), false => f.write_str(self.str), }; } let diff = self.width.saturating_sub(cols); let (left_pad, right_pad) = match self.align { Alignment::Left => (0, diff), Alignment::Right => (diff, 0), Alignment::Center => (diff / 2, diff.saturating_sub(diff / 2)), }; for _ in 0..left_pad { f.write_char(' ')?; } f.write_str(self.str)?; for _ in 0..right_pad { f.write_char(' ')?; } Ok(()) } } #[derive(Clone, Default)] struct FormatMap(HashMap<&'static str, Format>); pub type Format = fn(&ProgressState) -> String; /// Behavior of a progress bar when it is finished /// /// This is invoked when a [`ProgressBar`] or [`ProgressBarIter`] completes and /// [`ProgressBar::is_finished`] is false. /// /// [`ProgressBar`]: crate::ProgressBar /// [`ProgressBarIter`]: crate::ProgressBarIter /// [`ProgressBar::is_finished`]: crate::ProgressBar::is_finished #[derive(Clone, Debug)] pub enum ProgressFinish { /// Finishes the progress bar and leaves the current message /// /// Same behavior as calling [`ProgressBar::finish()`](crate::ProgressBar::finish). AndLeave, /// Finishes the progress bar at current position and leaves the current message /// /// Same behavior as calling [`ProgressBar::finish_at_current_pos()`](crate::ProgressBar::finish_at_current_pos). AtCurrentPos, /// Finishes the progress bar and sets a message /// /// Same behavior as calling [`ProgressBar::finish_with_message()`](crate::ProgressBar::finish_with_message). WithMessage(Cow<'static, str>), /// Finishes the progress bar and completely clears it (this is the default) /// /// Same behavior as calling [`ProgressBar::finish_and_clear()`](crate::ProgressBar::finish_and_clear). AndClear, /// Finishes the progress bar and leaves the current message and progress /// /// Same behavior as calling [`ProgressBar::abandon()`](crate::ProgressBar::abandon). Abandon, /// Finishes the progress bar and sets a message, and leaves the current progress /// /// Same behavior as calling [`ProgressBar::abandon_with_message()`](crate::ProgressBar::abandon_with_message). AbandonWithMessage(Cow<'static, str>), } impl Default for ProgressFinish { fn default() -> Self { Self::AndClear } } #[derive(PartialEq, Eq, Debug, Copy, Clone)] enum Alignment { Left, Center, Right, } #[cfg(test)] mod tests { use super::*; use crate::draw_target::ProgressDrawTarget; use crate::state::ProgressState; #[test] fn test_expand_template() { let mut style = ProgressStyle::default_bar(); style.format_map.0.insert("foo", |_| "FOO".into()); style.format_map.0.insert("bar", |_| "BAR".into()); let state = ProgressState::new(10, ProgressDrawTarget::stdout()); style.template = Template::from_str("{{ {foo} {bar} }}"); let rv = style.format_state(&state); assert_eq!(&rv[0], "{ FOO BAR }"); style.template = Template::from_str(r#"{ "foo": "{foo}", "bar": {bar} }"#); let rv = style.format_state(&state); assert_eq!(&rv[0], r#"{ "foo": "FOO", "bar": BAR }"#); } #[test] fn test_expand_template_flags() { use console::set_colors_enabled; set_colors_enabled(true); let mut style = ProgressStyle::default_bar(); style.format_map.0.insert("foo", |_| "XXX".into()); let state = ProgressState::new(10, ProgressDrawTarget::stdout()); style.template = Template::from_str("{foo:5}"); let rv = style.format_state(&state); assert_eq!(&rv[0], "XXX "); style.template = Template::from_str("{foo:.red.on_blue}"); let rv = style.format_state(&state); assert_eq!(&rv[0], "\u{1b}[31m\u{1b}[44mXXX\u{1b}[0m"); style.template = Template::from_str("{foo:^5.red.on_blue}"); let rv = style.format_state(&state); assert_eq!(&rv[0], "\u{1b}[31m\u{1b}[44m XXX \u{1b}[0m"); style.template = Template::from_str("{foo:^5.red.on_blue/green.on_cyan}"); let rv = style.format_state(&state); assert_eq!(&rv[0], "\u{1b}[31m\u{1b}[44m XXX \u{1b}[0m"); } }<|fim▁end|>
#[derive(Copy, Clone, Debug, PartialEq)]
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,<|fim▁hole|># software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from setuptools import setup DESC = """Installer for Apache Bloodhound Adds the bloodhound_setup cli command. """ versions = [ (0, 8, 0), (0, 9, 0), ] latest = '.'.join(str(x) for x in versions[-1]) setup( name="bloodhound_installer", version=latest, description=DESC.split('\n', 1)[0], author="Apache Bloodhound", license="Apache License v2", url="https://bloodhound.apache.org/", requires=['trac', 'BloodhoundMultiProduct'], packages=['bhsetup'], entry_points=""" [console_scripts] bloodhound_setup = bhsetup.bloodhound_setup:run """, long_description=DESC, )<|fim▁end|>
<|file_name|>RegionEnteredEvent.java<|end_file_name|><|fim▁begin|>package net.senmori.customtextures.events; import net.senmori.customtextures.util.MovementType; import org.bukkit.entity.Player; import org.bukkit.event.player.PlayerEvent; import com.sk89q.worldguard.protection.regions.ProtectedRegion; /** * event that is triggered after a player entered a WorldGuard region * @author mewin<[email protected]> */ public class RegionEnteredEvent extends RegionEvent { <|fim▁hole|> * @param player the player who triggered the event * @param movement the type of movement how the player entered the region */ public RegionEnteredEvent(ProtectedRegion region, Player player, MovementType movement, PlayerEvent parent) { super(player, region, parent, movement); } }<|fim▁end|>
/** * creates a new RegionEnteredEvent * @param region the region the player entered
<|file_name|>predicates.py<|end_file_name|><|fim▁begin|>######################################################################## # amara/xpath/locationpaths/predicates.py """ A parsed token that represents a predicate list. """ from __future__ import absolute_import from itertools import count, izip from amara.xpath import datatypes from amara.xpath.expressions.basics import literal, variable_reference from amara.xpath.expressions.booleans import equality_expr, relational_expr from amara.xpath.functions import position_function from ._nodetests import positionfilter from ._paths import pathiter __all__ = ['predicates', 'predicate'] class predicates(tuple): def __init__(self, *args): self.select = pathiter(pred.select for pred in self).select return def filter(self, nodes, context, reverse): if self: state = context.node, context.position, context.size for predicate in self: nodes = datatypes.nodeset(predicate.select(context, nodes)) context.node, context.position, context.size = state else: nodes = datatypes.nodeset(nodes) if reverse: nodes.reverse() return nodes def pprint(self, indent='', stream=None): print >> stream, indent + repr(self) for pred in self: pred.pprint(indent + ' ', stream) def __str__(self): return self.__unicode__().encode('utf-8') def __repr__(self): ptr = id(self) if ptr < 0: ptr += 0x100000000L return '<%s at 0x%x: %s>' % (self.__class__.__name__, ptr, self) def __unicode__(self): return u''.join(map(unicode, self)) #FIXME: should this derive from boolean_expression? class predicate: def __init__(self, expression): self._expr = expression self._provide_context_size = False #See http://trac.xml3k.org/ticket/62 #FIXME: There are probably many code paths which need self._provide_context_size set # Check for just "Number" if isinstance(expression, literal): const = datatypes.number(expression._literal) index = int(const) if index == const and index >= 1: self.select = positionfilter(index) else: # FIXME: add warning that expression will not select anything self.select = izip() return # Check for "position() = Expr" elif isinstance(expression, equality_expr) and expression._op == '=': if isinstance(expression._left, position_function): expression = expression._right if isinstance(expression, literal): const = datatypes.number(expression._literal) index = int(const) if index == const and index >= 1: self.select = positionfilter(index) else:<|fim▁hole|> else: #FIXME: This will kick in the non-lazy behavior too broadly, e.g. in the case of [position = 1+1] #See: http://trac.xml3k.org/ticket/62 self._provide_context_size = True self._expr = expression self.select = self._number return elif isinstance(expression._right, position_function): expression = expression._left if isinstance(expression, literal): const = datatypes.number(expression._literal) index = int(const) if index == const and index >= 1: self.select = positionfilter(index) else: self.select = izip() else: self._expr = expression self.select = self._number return # Check for "position() [>,>=] Expr" or "Expr [<,<=] position()" # FIXME - do full slice-type notation elif isinstance(expression, relational_expr): op = expression._op if (isinstance(expression._left, position_function) and isinstance(expression._right, (literal, variable_reference)) and op in ('>', '>=')): self._start = expression._right self._position = (op == '>') self.select = self._slice return elif (isinstance(expression._left, (literal, variable_reference)) and isinstance(expression._right, Position) and op in ('<', '<=')): self._start = expression._left self._position = (op == '<') self.select = self._slice return if issubclass(expression.return_type, datatypes.number): self.select = self._number elif expression.return_type is not datatypes.xpathobject: assert issubclass(expression.return_type, datatypes.xpathobject) self.select = self._boolean return def _slice(self, context, nodes): start = self._start.evaluate_as_number(context) position = self._position if position > start: return nodes position += 1 nodes = iter(nodes) for node in nodes: if position > start: break position += 1 return nodes def _number(self, context, nodes): expr = self._expr position = 1 if self._provide_context_size: nodes = list(nodes) context.size = len(nodes) context.current_node = context.node for node in nodes: context.node, context.position = node, position if expr.evaluate_as_number(context) == position: yield node position += 1 return def _boolean(self, context, nodes): expr = self._expr position = 1 context.current_node = context.node for node in nodes: context.node, context.position = node, position if expr.evaluate_as_boolean(context): yield node position += 1 return def select(self, context, nodes): expr = self._expr position = 1 context.current_node = context.node for node in nodes: context.node, context.position = node, position result = expr.evaluate(context) if isinstance(result, datatypes.number): # This must be separate to prevent falling into # the boolean check. if result == position: yield node elif result: yield node position += 1 return def pprint(self, indent='', stream=None): print >> stream, indent + repr(self) self._expr.pprint(indent + ' ', stream) def __str__(self): return self.__unicode__().encode('utf-8') def __repr__(self): ptr = id(self) if ptr < 0: ptr += 0x100000000L return '<%s at 0x%x: %s>' % (self.__class__.__name__, ptr, self) def __unicode__(self): return u'[%s]' % self._expr @property def children(self): 'Child of the parse tree of a predicate is its expression' return (self._expr,)<|fim▁end|>
self.select = izip()
<|file_name|>calendar-base_ru.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|>size 394<|fim▁end|>
oid sha256:7678f6e4188a6066c45fd9a295882aea8e986bbc11eea3dbeabf24eca190b774
<|file_name|>arnoldi.rs<|end_file_name|><|fim▁begin|>use ndarray::*; use ndarray_linalg::{krylov::*, *}; #[test] fn aq_qh_mgs() { let a: Array2<f64> = random((5, 5)); let v: Array1<f64> = random(5); let (q, h) = arnoldi_mgs(a.clone(), v, 1e-9); println!("A = \n{:?}", &a); println!("Q = \n{:?}", &q); println!("H = \n{:?}", &h); let aq = a.dot(&q); let qh = q.dot(&h); println!("AQ = \n{:?}", &aq); println!("QH = \n{:?}", &qh); close_l2(&aq, &qh, 1e-9); } #[test] fn aq_qh_householder() { let a: Array2<f64> = random((5, 5)); let v: Array1<f64> = random(5); let (q, h) = arnoldi_mgs(a.clone(), v, 1e-9); println!("A = \n{:?}", &a); println!("Q = \n{:?}", &q); println!("H = \n{:?}", &h); let aq = a.dot(&q); let qh = q.dot(&h); println!("AQ = \n{:?}", &aq); println!("QH = \n{:?}", &qh); close_l2(&aq, &qh, 1e-9); } #[test] fn aq_qh_mgs_complex() { let a: Array2<c64> = random((5, 5)); let v: Array1<c64> = random(5); let (q, h) = arnoldi_mgs(a.clone(), v, 1e-9); println!("A = \n{:?}", &a); println!("Q = \n{:?}", &q); println!("H = \n{:?}", &h); let aq = a.dot(&q); let qh = q.dot(&h); println!("AQ = \n{:?}", &aq); println!("QH = \n{:?}", &qh); close_l2(&aq, &qh, 1e-9); } #[test] fn aq_qh_householder_complex() { let a: Array2<c64> = random((5, 5)); let v: Array1<c64> = random(5); let (q, h) = arnoldi_mgs(a.clone(), v, 1e-9); println!("A = \n{:?}", &a);<|fim▁hole|> println!("H = \n{:?}", &h); let aq = a.dot(&q); let qh = q.dot(&h); println!("AQ = \n{:?}", &aq); println!("QH = \n{:?}", &qh); close_l2(&aq, &qh, 1e-9); }<|fim▁end|>
println!("Q = \n{:?}", &q);
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! This crate defines [structs for the on-disk minidump format](format/index.html) as well as<|fim▁hole|>//! the actual functionality of reading minidumps using the structs defined in this crate. //! //! [minidump]: https://crates.io/crates/minidump #![warn(missing_debug_implementations)] pub mod format; pub mod traits;<|fim▁end|>
//! [some common traits](traits/index.html) used by related crates. //! //! You probably don't want to use this crate directly, the [minidump][minidump] crate provides
<|file_name|>attendances.js<|end_file_name|><|fim▁begin|>'use strict'; /**<|fim▁hole|> Schedule = mongoose.model('Schedule'), Group = mongoose.model('Group'), _ = require('lodash'); exports.attendance = function(req, res, next, id) { CurrentModel.load(id, function(err, item) { if (err) return next(err); if (!item) return next(new Error('Failed to load item ' + id)); req.attendance = item; next(); }); }; exports.schedule = function(req, res, next, id) { Schedule.load(id, function(err, item) { if (err) return next(err); if (!item) return next(new Error('Failed to load item ' + id)); req.schedule = item; next(); }); }; exports.group = function(req, res, next, id) { Group.load(id, function(err, item) { if (err) return next(err); if (!item) return next(new Error('Failed to load item ' + id)); req.group = item; next(); }); }; exports.create = function(req, res) { var value = new CurrentModel(req.body); value.group = req.group; value.schedule = req.schedule; value.save(function(err) { if (err) { return res.send('users/signup', { errors: err.errors, object: value }); } else { res.jsonp(value); } }); }; exports.update = function(req, res) { var item = req.attendance; item = _.extend(item, req.body); item.save(function(err) { if (err) { return res.send('users/signup', { errors: err.errors, object: item }); } else { res.jsonp(item); } }); }; exports.destroy = function(req, res) { var item = req.attendance; item.remove(function(err) { if (err) { return res.send('users/signup', { errors: err.errors, object: item }); } else { res.jsonp(item); } }); }; exports.show = function(req, res) { res.jsonp(req.attendance); }; exports.all = function(req, res) { CurrentModel.find({ group: req.group, schedule: req.schedule }).populate('participant', 'name email').exec(function(err, items) { if (err) { res.render('error', { status: 500 }); } else { res.jsonp(items); } }); };<|fim▁end|>
* Module dependencies. */ var mongoose = require('mongoose'), CurrentModel = mongoose.model('Attendance'),
<|file_name|>set.go<|end_file_name|><|fim▁begin|>package suffix import ( "bufio" "fmt" "io" "sort" "strings" ) type matchType uint8 const (<|fim▁hole|> // matchMore doesn't match anything directly, // but indicates there are more matching suffixes. matchMore = 1 << iota matchExact = 1 << iota // match exact names matchSub = 1 << iota // match sub-names matchAll = matchExact | matchSub ) func (m matchType) has(f matchType) bool { return m&f == f } // decodeSuffix return suffix and match type based on the pattern. // If trailing dot is present then matchExact is set, // leading dot yields matchSub, otherwise matchAll. func decodeSuffix(suffix string) (string, matchType) { var match matchType if suffix[len(suffix)-1] == '.' { match |= matchExact } if suffix[0] == '.' { match |= matchSub } if match == matchNone { match = matchAll } return strings.Trim(suffix, "."), match } // encodeSuffix is opposite to decodeSuffix, appending dot if necessary. func encodeSuffix(suffix string, match matchType) string { switch true { case match.has(matchAll): return suffix case match.has(matchExact): return suffix + "." case match.has(matchSub): return "." + suffix } return "" } // Set defines set of suffixes type Set struct { names map[string]matchType size int } // Len returns number of suffixes in Set func (set *Set) Len() int { return set.size } // Match contains matching suffix and way it matches type Match struct { Suffix string // raw suffix (domain) Exact bool // if true exact (full) match, otherwise subdomain match } // Add suffix to the set. If suffix starts with a dot, only values ending, // but not equal will be matched; if suffix ends with a dot, only exact // values will be matched. E.g.: // "golang.org" will match golang.org and blog.golang.org // ".golang.org" will match blog.golang.org, but not golang.org // "golang.org." will match golang.org only // ".golang.org." is equivalent to "golang.org" func (set *Set) Add(suffix string) []Match { if len(suffix) == 0 { return nil } if set.names == nil { set.names = make(map[string]matchType) } var match matchType suffix, match = decodeSuffix(suffix) // Prepare resulting matches res := make([]Match, 0, 2) if match&matchExact != 0 { res = append(res, Match{suffix, true}) } if match&matchSub != 0 { res = append(res, Match{suffix, false}) } // Increase size if suffix didn't match anything before if set.names[suffix]&matchAll == 0 { set.size++ } set.names[suffix] |= match // Add all parent names to build a tree. // Don't need to do this for matchExact as we check them directly. if match != matchExact { for len(suffix) > 0 { dot := strings.IndexByte(suffix, '.') if dot < 0 { break } suffix = suffix[dot+1:] set.names[suffix] |= matchMore } } return res } // MatchAll calls callback for each matching suffix. func (set *Set) MatchAll(name string, callback func(m Match) bool) { if len(set.names) == 0 { return } // Check exact match first, so we only care about parent suffixes later. // Also means we don't always need to track all parent suffixes in Add(). if set.MatchesExact(name) { if !callback(Match{name, true}) { return } } // Check sub-matches by starting with the last label dot := len(name) for { dot = strings.LastIndexByte(name[:dot], '.') if dot < 0 { break } s := name[dot+1:] // extract current suffix m := set.names[s] // check match if m.has(matchSub) { if !callback(Match{s, false}) { break } } if !m.has(matchMore) { break } } } // Match returns the longest matching suffix. // If nothing matches empty string is returned. func (set *Set) Match(name string) string { var res string set.MatchAll(name, func(m Match) bool { res = m.Suffix return !m.Exact // stop on exact match, otherwise keep matching }) return res } // Matches checks if passed name matches any suffix. // This is potentially quicker than using Match(name) != "" as we stop // searching after the first match. func (set *Set) Matches(name string) bool { var res bool set.MatchAll(name, func(Match) bool { res = true return false // stop on first match }) return res } // MatchesExact returns true if name matches exactly. // Similar to Match(name) == name, but requires only a single lookup. func (set *Set) MatchesExact(name string) bool { return len(set.names) > 0 && set.names[name]&matchExact != 0 } // Split splits name into prefix and suffix where suffix is longest matching // suffix from the set. If no suffix matches empty strings are returned. func (set *Set) Split(name string) (pre string, suf string) { suf = set.Match(name) if suf != "" && len(name) > len(suf) { pre = name[:len(name)-len(suf)-1] } return } // ReadFrom reads set from the stream. Each non-empty line of stream is // considered a suffix, except from lines beginning with '#' or '//', which // are treated as comments and skipped. func (set *Set) ReadFrom(r io.Reader) (n int64, err error) { cnt := &counter{} scanner := bufio.NewScanner(io.TeeReader(r, cnt)) for scanner.Scan() { line := strings.Trim(scanner.Text(), " \t") if strings.HasPrefix(line, "#") || strings.HasPrefix(line, "//") { continue } set.Add(line) } return cnt.N, scanner.Err() } // WriteTo serialises set into the writer. // Data is serialised in plain text, each suffix in a separate line. // Suffixes are written in lexicographical order. func (set *Set) WriteTo(w io.Writer) (n int64, err error) { suffs := make([]string, 0, set.Len()) for s, m := range set.names { s = encodeSuffix(s, m) if s != "" { suffs = append(suffs, s) } } sort.Strings(suffs) c := &counter{W: w} for n := range suffs { _, err = fmt.Fprintln(c, suffs[n]) if err != nil { break } } return c.N, err } type counter struct { W io.Writer N int64 } func (c *counter) Write(p []byte) (n int, err error) { if c.W != nil { n, err = c.W.Write(p) } else { n = len(p) } c.N += int64(n) return } // PlusOne returns matching suffix plus one label from the name. // For example if set containt 'com' and name is 'www.blog.com', // this function would return 'blog.com'. Returned string is empty if there // is no matching suffix in the set or an additional label is missing. func PlusOne(set *Set, name string) string { pre, suf := set.Split(name) if suf == "" || pre == "" { return "" } return pre[strings.LastIndexByte(pre, '.')+1:] + "." + suf }<|fim▁end|>
matchNone matchType = iota
<|file_name|>boss_koralon.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2014-2017 StormCore * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "ScriptMgr.h" #include "ScriptedCreature.h" #include "SpellAuraEffects.h" #include "SpellScript.h" #include "vault_of_archavon.h" enum Events { // Koralon EVENT_BURNING_BREATH = 1, EVENT_BURNING_FURY = 2, EVENT_FLAME_CINDER = 3, EVENT_METEOR_FISTS = 4, // Flame Warder EVENT_FW_LAVA_BIRST = 5, EVENT_FW_METEOR_FISTS = 6 }; enum Spells { // Spells Koralon SPELL_BURNING_BREATH = 66665, SPELL_BURNING_FURY = 66721, SPELL_FLAME_CINDER_A = 66684, SPELL_FLAME_CINDER_B = 66681, // don't know the real relation to SPELL_FLAME_CINDER_A atm. SPELL_METEOR_FISTS = 66725, SPELL_METEOR_FISTS_DAMAGE = 66765, // Spells Flame Warder SPELL_FW_LAVA_BIRST = 66813, SPELL_FW_METEOR_FISTS = 66808, SPELL_FW_METEOR_FISTS_DAMAGE = 66809 }; class boss_koralon : public CreatureScript { public: boss_koralon() : CreatureScript("boss_koralon") { } struct boss_koralonAI : public BossAI { boss_koralonAI(Creature* creature) : BossAI(creature, DATA_KORALON) { } void EnterCombat(Unit* /*who*/) override { DoCast(me, SPELL_BURNING_FURY); events.ScheduleEvent(EVENT_BURNING_FURY, 20000); /// @todo check timer events.ScheduleEvent(EVENT_BURNING_BREATH, 15000); // 1st after 15sec, then every 45sec events.ScheduleEvent(EVENT_METEOR_FISTS, 75000); // 1st after 75sec, then every 45sec events.ScheduleEvent(EVENT_FLAME_CINDER, 30000); /// @todo check timer _EnterCombat(); } void UpdateAI(uint32 diff) override { if (!UpdateVictim()) return; events.Update(diff); if (me->HasUnitState(UNIT_STATE_CASTING)) return; while (uint32 eventId = events.ExecuteEvent()) { switch (eventId) { case EVENT_BURNING_FURY: DoCast(me, SPELL_BURNING_FURY); events.ScheduleEvent(EVENT_BURNING_FURY, 20000);<|fim▁hole|> DoCast(me, SPELL_BURNING_BREATH); events.ScheduleEvent(EVENT_BURNING_BREATH, 45000); break; case EVENT_METEOR_FISTS: DoCast(me, SPELL_METEOR_FISTS); events.ScheduleEvent(EVENT_METEOR_FISTS, 45000); break; case EVENT_FLAME_CINDER: DoCast(me, SPELL_FLAME_CINDER_A); events.ScheduleEvent(EVENT_FLAME_CINDER, 30000); break; default: break; } } DoMeleeAttackIfReady(); } }; CreatureAI* GetAI(Creature* creature) const override { return new boss_koralonAI(creature); } }; /*###### ## Npc Flame Warder ######*/ class npc_flame_warder : public CreatureScript { public: npc_flame_warder() : CreatureScript("npc_flame_warder") { } struct npc_flame_warderAI : public ScriptedAI { npc_flame_warderAI(Creature* creature) : ScriptedAI(creature) { } void Reset() override { events.Reset(); } void EnterCombat(Unit* /*who*/) override { DoZoneInCombat(); events.ScheduleEvent(EVENT_FW_LAVA_BIRST, 5000); events.ScheduleEvent(EVENT_FW_METEOR_FISTS, 10000); } void UpdateAI(uint32 diff) override { if (!UpdateVictim()) return; events.Update(diff); while (uint32 eventId = events.ExecuteEvent()) { switch (eventId) { case EVENT_FW_LAVA_BIRST: DoCastVictim(SPELL_FW_LAVA_BIRST); events.ScheduleEvent(EVENT_FW_LAVA_BIRST, 15000); break; case EVENT_FW_METEOR_FISTS: DoCast(me, SPELL_FW_METEOR_FISTS); events.ScheduleEvent(EVENT_FW_METEOR_FISTS, 20000); break; default: break; } } DoMeleeAttackIfReady(); } private: EventMap events; }; CreatureAI* GetAI(Creature* creature) const override { return new npc_flame_warderAI(creature); } }; class spell_koralon_meteor_fists : public SpellScriptLoader { public: spell_koralon_meteor_fists() : SpellScriptLoader("spell_koralon_meteor_fists") { } class spell_koralon_meteor_fists_AuraScript : public AuraScript { PrepareAuraScript(spell_koralon_meteor_fists_AuraScript); bool Validate(SpellInfo const* /*spellInfo*/) override { if (!sSpellMgr->GetSpellInfo(SPELL_METEOR_FISTS_DAMAGE)) return false; return true; } void TriggerFists(AuraEffect const* aurEff, ProcEventInfo& eventInfo) { PreventDefaultAction(); GetTarget()->CastSpell(eventInfo.GetProcTarget(), SPELL_METEOR_FISTS_DAMAGE, true, NULL, aurEff); } void Register() override { OnEffectProc += AuraEffectProcFn(spell_koralon_meteor_fists_AuraScript::TriggerFists, EFFECT_0, SPELL_AURA_DUMMY); } }; AuraScript* GetAuraScript() const override { return new spell_koralon_meteor_fists_AuraScript(); } }; class spell_koralon_meteor_fists_damage : public SpellScriptLoader { public: spell_koralon_meteor_fists_damage() : SpellScriptLoader("spell_koralon_meteor_fists_damage") { } class spell_koralon_meteor_fists_damage_SpellScript : public SpellScript { PrepareSpellScript(spell_koralon_meteor_fists_damage_SpellScript); public: spell_koralon_meteor_fists_damage_SpellScript() { _chainTargets = 0; } private: void FilterTargets(std::list<WorldObject*>& targets) { _chainTargets = targets.size(); } void CalculateSplitDamage() { if (_chainTargets) SetHitDamage(GetHitDamage() / (_chainTargets + 1)); } void Register() override { OnObjectAreaTargetSelect += SpellObjectAreaTargetSelectFn(spell_koralon_meteor_fists_damage_SpellScript::FilterTargets, EFFECT_0, TARGET_UNIT_TARGET_ENEMY); OnHit += SpellHitFn(spell_koralon_meteor_fists_damage_SpellScript::CalculateSplitDamage); } private: uint8 _chainTargets; }; SpellScript* GetSpellScript() const override { return new spell_koralon_meteor_fists_damage_SpellScript(); } }; class spell_flame_warder_meteor_fists : public SpellScriptLoader { public: spell_flame_warder_meteor_fists() : SpellScriptLoader("spell_flame_warder_meteor_fists") { } class spell_flame_warder_meteor_fists_AuraScript : public AuraScript { PrepareAuraScript(spell_flame_warder_meteor_fists_AuraScript); bool Validate(SpellInfo const* /*spellInfo*/) override { if (!sSpellMgr->GetSpellInfo(SPELL_FW_METEOR_FISTS_DAMAGE)) return false; return true; } void TriggerFists(AuraEffect const* aurEff, ProcEventInfo& eventInfo) { PreventDefaultAction(); GetTarget()->CastSpell(eventInfo.GetProcTarget(), SPELL_FW_METEOR_FISTS_DAMAGE, true, NULL, aurEff); } void Register() override { OnEffectProc += AuraEffectProcFn(spell_flame_warder_meteor_fists_AuraScript::TriggerFists, EFFECT_0, SPELL_AURA_DUMMY); } }; AuraScript* GetAuraScript() const override { return new spell_flame_warder_meteor_fists_AuraScript(); } }; void AddSC_boss_koralon() { new boss_koralon(); new npc_flame_warder(); new spell_koralon_meteor_fists(); new spell_koralon_meteor_fists_damage(); new spell_flame_warder_meteor_fists(); }<|fim▁end|>
break; case EVENT_BURNING_BREATH:
<|file_name|>hello_coroutine.py<|end_file_name|><|fim▁begin|>"""Print 'Hello World' every two seconds, using a coroutine.""" import asyncio @asyncio.coroutine def greet_every_two_seconds(): while True: print('Hello World') yield from asyncio.sleep(2) if __name__ == '__main__': loop = asyncio.get_event_loop() try: loop.run_until_complete(greet_every_two_seconds()) finally:<|fim▁hole|><|fim▁end|>
loop.close()
<|file_name|>dependency.py<|end_file_name|><|fim▁begin|># orm/dependency.py # Copyright (C) 2005, 2006, 2007, 2008 Michael Bayer [email protected] # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Bridge the ``PropertyLoader`` (i.e. a ``relation()``) and the ``UOWTransaction`` together to allow processing of relation()-based dependencies at flush time. """ from sqlalchemy.orm import sync from sqlalchemy import sql, util, exceptions from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY def create_dependency_processor(prop): types = { ONETOMANY : OneToManyDP, MANYTOONE: ManyToOneDP, MANYTOMANY : ManyToManyDP, } if prop.association is not None: return AssociationDP(prop) else: return types[prop.direction](prop) class DependencyProcessor(object): no_dependencies = False def __init__(self, prop): self.prop = prop self.cascade = prop.cascade self.mapper = prop.mapper self.parent = prop.parent self.secondary = prop.secondary self.direction = prop.direction self.is_backref = prop.is_backref self.post_update = prop.post_update self.foreign_keys = prop.foreign_keys self.passive_deletes = prop.passive_deletes self.passive_updates = prop.passive_updates self.enable_typechecks = prop.enable_typechecks self.key = prop.key if not self.prop.synchronize_pairs: raise exceptions.ArgumentError("Can't build a DependencyProcessor for relation %s. No target attributes to populate between parent and child are present" % self.prop) def _get_instrumented_attribute(self): """Return the ``InstrumentedAttribute`` handled by this ``DependencyProecssor``. """ return getattr(self.parent.class_, self.key) def hasparent(self, state): """return True if the given object instance has a parent, according to the ``InstrumentedAttribute`` handled by this ``DependencyProcessor``.""" # TODO: use correct API for this return self._get_instrumented_attribute().impl.hasparent(state) def register_dependencies(self, uowcommit): """Tell a ``UOWTransaction`` what mappers are dependent on which, with regards to the two or three mappers handled by this ``PropertyLoader``. Also register itself as a *processor* for one of its mappers, which will be executed after that mapper's objects have been saved or before they've been deleted. The process operation manages attributes and dependent operations upon the objects of one of the involved mappers. """ raise NotImplementedError() def whose_dependent_on_who(self, state1, state2): """Given an object pair assuming `obj2` is a child of `obj1`, return a tuple with the dependent object second, or None if there is no dependency. """ if state1 is state2: return None elif self.direction == ONETOMANY: return (state1, state2) else: return (state2, state1) def process_dependencies(self, task, deplist, uowcommit, delete = False): """This method is called during a flush operation to synchronize data between a parent and child object. It is called within the context of the various mappers and sometimes individual objects sorted according to their insert/update/delete order (topological sort). """ raise NotImplementedError() def preprocess_dependencies(self, task, deplist, uowcommit, delete = False): """Used before the flushes' topological sort to traverse through related objects and ensure every instance which will require save/update/delete is properly added to the UOWTransaction. """ raise NotImplementedError() def _verify_canload(self, state): if not self.enable_typechecks: return if state is not None and not self.mapper._canload(state): raise exceptions.FlushError("Attempting to flush an item of type %s on collection '%s', which is handled by mapper '%s' and does not load items of that type. Did you mean to use a polymorphic mapper for this relationship ? Set 'enable_typechecks=False' on the relation() to disable this exception. Mismatched typeloading may cause bi-directional relationships (backrefs) to not function properly." % (state.class_, self.prop, self.mapper)) def _synchronize(self, state, child, associationrow, clearkeys, uowcommit): """Called during a flush to synchronize primary key identifier values between a parent/child object, as well as to an associationrow in the case of many-to-many. """ raise NotImplementedError() def _conditional_post_update(self, state, uowcommit, related): """Execute a post_update call. For relations that contain the post_update flag, an additional ``UPDATE`` statement may be associated after an ``INSERT`` or before a ``DELETE`` in order to resolve circular row dependencies. This method will check for the post_update flag being set on a particular relationship, and given a target object and list of one or more related objects, and execute the ``UPDATE`` if the given related object list contains ``INSERT``s or ``DELETE``s. """ if state is not None and self.post_update: for x in related: if x is not None: uowcommit.register_object(state, postupdate=True, post_update_cols=[r for l, r in self.prop.synchronize_pairs]) break def _pks_changed(self, uowcommit, state): raise NotImplementedError() def __str__(self): return "%s(%s)" % (self.__class__.__name__, str(self.prop)) class OneToManyDP(DependencyProcessor): def register_dependencies(self, uowcommit): if self.post_update: if not self.is_backref: stub = MapperStub(self.parent, self.mapper, self.key) uowcommit.register_dependency(self.mapper, stub) uowcommit.register_dependency(self.parent, stub) uowcommit.register_processor(stub, self, self.parent) else: uowcommit.register_dependency(self.parent, self.mapper) uowcommit.register_processor(self.parent, self, self.parent) def process_dependencies(self, task, deplist, uowcommit, delete = False): #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " process_dep isdelete " + repr(delete) + " direction " + repr(self.direction) if delete: # head object is being deleted, and we manage its list of child objects # the child objects have to have their foreign key to the parent set to NULL # this phase can be called safely for any cascade but is unnecessary if delete cascade # is on. if self.post_update or not self.passive_deletes=='all': for state in deplist: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes) if unchanged or deleted: for child in deleted: if child is not None and self.hasparent(child) is False: self._synchronize(state, child, None, True, uowcommit) self._conditional_post_update(child, uowcommit, [state]) if self.post_update or not self.cascade.delete: for child in unchanged: if child is not None: self._synchronize(state, child, None, True, uowcommit) self._conditional_post_update(child, uowcommit, [state]) else: for state in deplist: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=True) if added or deleted: for child in added: self._synchronize(state, child, None, False, uowcommit) if child is not None: self._conditional_post_update(child, uowcommit, [state]) for child in deleted: if not self.cascade.delete_orphan and not self.hasparent(child): self._synchronize(state, child, None, True, uowcommit) if self._pks_changed(uowcommit, state): if unchanged: for child in unchanged: self._synchronize(state, child, None, False, uowcommit) <|fim▁hole|> # head object is being deleted, and we manage its list of child objects # the child objects have to have their foreign key to the parent set to NULL if not self.post_update: should_null_fks = not self.cascade.delete and not self.passive_deletes=='all' for state in deplist: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes) if unchanged or deleted: for child in deleted: if child is not None and self.hasparent(child) is False: if self.cascade.delete_orphan: uowcommit.register_object(child, isdelete=True) else: uowcommit.register_object(child) if should_null_fks: for child in unchanged: if child is not None: uowcommit.register_object(child) else: for state in deplist: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=True) if added or deleted: for child in added: if child is not None: uowcommit.register_object(child) for child in deleted: if not self.cascade.delete_orphan: uowcommit.register_object(child, isdelete=False) elif self.hasparent(child) is False: uowcommit.register_object(child, isdelete=True) for c, m in self.mapper.cascade_iterator('delete', child): uowcommit.register_object(c._state, isdelete=True) if not self.passive_updates and self._pks_changed(uowcommit, state): if not unchanged: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=False) if unchanged: for child in unchanged: uowcommit.register_object(child) def _synchronize(self, state, child, associationrow, clearkeys, uowcommit): source = state dest = child if dest is None or (not self.post_update and uowcommit.is_deleted(dest)): return self._verify_canload(child) if clearkeys: sync.clear(dest, self.mapper, self.prop.synchronize_pairs) else: sync.populate(source, self.parent, dest, self.mapper, self.prop.synchronize_pairs) def _pks_changed(self, uowcommit, state): return sync.source_changes(uowcommit, state, self.parent, self.prop.synchronize_pairs) class DetectKeySwitch(DependencyProcessor): """a special DP that works for many-to-one relations, fires off for child items who have changed their referenced key.""" no_dependencies = True def register_dependencies(self, uowcommit): uowcommit.register_processor(self.parent, self, self.mapper) def preprocess_dependencies(self, task, deplist, uowcommit, delete=False): # for non-passive updates, register in the preprocess stage # so that mapper save_obj() gets a hold of changes if not delete and not self.passive_updates: self._process_key_switches(deplist, uowcommit) def process_dependencies(self, task, deplist, uowcommit, delete=False): # for passive updates, register objects in the process stage # so that we avoid ManyToOneDP's registering the object without # the listonly flag in its own preprocess stage (results in UPDATE) # statements being emitted if not delete and self.passive_updates: self._process_key_switches(deplist, uowcommit) def _process_key_switches(self, deplist, uowcommit): switchers = util.Set([s for s in deplist if self._pks_changed(uowcommit, s)]) if switchers: # yes, we're doing a linear search right now through the UOW. only # takes effect when primary key values have actually changed. # a possible optimization might be to enhance the "hasparents" capability of # attributes to actually store all parent references, but this introduces # more complicated attribute accounting. for s in [elem for elem in uowcommit.session.identity_map.all_states() if issubclass(elem.class_, self.parent.class_) and self.key in elem.dict and elem.dict[self.key]._state in switchers ]: uowcommit.register_object(s, listonly=self.passive_updates) sync.populate(s.dict[self.key]._state, self.mapper, s, self.parent, self.prop.synchronize_pairs) #self.syncrules.execute(s.dict[self.key]._state, s, None, None, False) def _pks_changed(self, uowcommit, state): return sync.source_changes(uowcommit, state, self.mapper, self.prop.synchronize_pairs) class ManyToOneDP(DependencyProcessor): def __init__(self, prop): DependencyProcessor.__init__(self, prop) self.mapper._dependency_processors.append(DetectKeySwitch(prop)) def register_dependencies(self, uowcommit): if self.post_update: if not self.is_backref: stub = MapperStub(self.parent, self.mapper, self.key) uowcommit.register_dependency(self.mapper, stub) uowcommit.register_dependency(self.parent, stub) uowcommit.register_processor(stub, self, self.parent) else: uowcommit.register_dependency(self.mapper, self.parent) uowcommit.register_processor(self.mapper, self, self.parent) def process_dependencies(self, task, deplist, uowcommit, delete = False): #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " process_dep isdelete " + repr(delete) + " direction " + repr(self.direction) if delete: if self.post_update and not self.cascade.delete_orphan and not self.passive_deletes=='all': # post_update means we have to update our row to not reference the child object # before we can DELETE the row for state in deplist: self._synchronize(state, None, None, True, uowcommit) (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes) if added or unchanged or deleted: self._conditional_post_update(state, uowcommit, deleted + unchanged + added) else: for state in deplist: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=True) if added or deleted or unchanged: for child in added: self._synchronize(state, child, None, False, uowcommit) self._conditional_post_update(state, uowcommit, deleted + unchanged + added) def preprocess_dependencies(self, task, deplist, uowcommit, delete = False): #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " PRE process_dep isdelete " + repr(delete) + " direction " + repr(self.direction) if self.post_update: return if delete: if self.cascade.delete or self.cascade.delete_orphan: for state in deplist: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes) if self.cascade.delete_orphan: todelete = added + unchanged + deleted else: todelete = added + unchanged for child in todelete: if child is None: continue uowcommit.register_object(child, isdelete=True) for c, m in self.mapper.cascade_iterator('delete', child): uowcommit.register_object(c._state, isdelete=True) else: for state in deplist: uowcommit.register_object(state) if self.cascade.delete_orphan: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes) if deleted: for child in deleted: if self.hasparent(child) is False: uowcommit.register_object(child, isdelete=True) for c, m in self.mapper.cascade_iterator('delete', child): uowcommit.register_object(c._state, isdelete=True) def _synchronize(self, state, child, associationrow, clearkeys, uowcommit): if state is None or (not self.post_update and uowcommit.is_deleted(state)): return if clearkeys or child is None: sync.clear(state, self.parent, self.prop.synchronize_pairs) else: self._verify_canload(child) sync.populate(child, self.mapper, state, self.parent, self.prop.synchronize_pairs) class ManyToManyDP(DependencyProcessor): def register_dependencies(self, uowcommit): # many-to-many. create a "Stub" mapper to represent the # "middle table" in the relationship. This stub mapper doesnt save # or delete any objects, but just marks a dependency on the two # related mappers. its dependency processor then populates the # association table. stub = MapperStub(self.parent, self.mapper, self.key) uowcommit.register_dependency(self.parent, stub) uowcommit.register_dependency(self.mapper, stub) uowcommit.register_processor(stub, self, self.parent) def process_dependencies(self, task, deplist, uowcommit, delete = False): #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " process_dep isdelete " + repr(delete) + " direction " + repr(self.direction) connection = uowcommit.transaction.connection(self.mapper) secondary_delete = [] secondary_insert = [] secondary_update = [] if self.prop._reverse_property: reverse_dep = getattr(self.prop._reverse_property, '_dependency_processor', None) else: reverse_dep = None if delete: for state in deplist: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes) if deleted or unchanged: for child in deleted + unchanged: if child is None or (reverse_dep and (reverse_dep, "manytomany", child, state) in uowcommit.attributes): continue associationrow = {} self._synchronize(state, child, associationrow, False, uowcommit) secondary_delete.append(associationrow) uowcommit.attributes[(self, "manytomany", state, child)] = True else: for state in deplist: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key) if added or deleted: for child in added: if child is None or (reverse_dep and (reverse_dep, "manytomany", child, state) in uowcommit.attributes): continue associationrow = {} self._synchronize(state, child, associationrow, False, uowcommit) uowcommit.attributes[(self, "manytomany", state, child)] = True secondary_insert.append(associationrow) for child in deleted: if child is None or (reverse_dep and (reverse_dep, "manytomany", child, state) in uowcommit.attributes): continue associationrow = {} self._synchronize(state, child, associationrow, False, uowcommit) uowcommit.attributes[(self, "manytomany", state, child)] = True secondary_delete.append(associationrow) if not self.passive_updates and unchanged and self._pks_changed(uowcommit, state): for child in unchanged: associationrow = {} sync.update(state, self.parent, associationrow, "old_", self.prop.synchronize_pairs) sync.update(child, self.mapper, associationrow, "old_", self.prop.secondary_synchronize_pairs) #self.syncrules.update(associationrow, state, child, "old_") secondary_update.append(associationrow) if secondary_delete: secondary_delete.sort() # TODO: precompile the delete/insert queries? statement = self.secondary.delete(sql.and_(*[c == sql.bindparam(c.key, type_=c.type) for c in self.secondary.c if c.key in associationrow])) result = connection.execute(statement, secondary_delete) if result.supports_sane_multi_rowcount() and result.rowcount != len(secondary_delete): raise exceptions.ConcurrentModificationError("Deleted rowcount %d does not match number of secondary table rows deleted from table '%s': %d" % (result.rowcount, self.secondary.description, len(secondary_delete))) if secondary_update: statement = self.secondary.update(sql.and_(*[c == sql.bindparam("old_" + c.key, type_=c.type) for c in self.secondary.c if c.key in associationrow])) result = connection.execute(statement, secondary_update) if result.supports_sane_multi_rowcount() and result.rowcount != len(secondary_update): raise exceptions.ConcurrentModificationError("Updated rowcount %d does not match number of secondary table rows updated from table '%s': %d" % (result.rowcount, self.secondary.description, len(secondary_update))) if secondary_insert: statement = self.secondary.insert() connection.execute(statement, secondary_insert) def preprocess_dependencies(self, task, deplist, uowcommit, delete = False): #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " preprocess_dep isdelete " + repr(delete) + " direction " + repr(self.direction) if not delete: for state in deplist: (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=True) if deleted: for child in deleted: if self.cascade.delete_orphan and self.hasparent(child) is False: uowcommit.register_object(child, isdelete=True) for c, m in self.mapper.cascade_iterator('delete', child): uowcommit.register_object(c._state, isdelete=True) def _synchronize(self, state, child, associationrow, clearkeys, uowcommit): if associationrow is None: return self._verify_canload(child) sync.populate_dict(state, self.parent, associationrow, self.prop.synchronize_pairs) sync.populate_dict(child, self.mapper, associationrow, self.prop.secondary_synchronize_pairs) def _pks_changed(self, uowcommit, state): return sync.source_changes(uowcommit, state, self.parent, self.prop.synchronize_pairs) class AssociationDP(OneToManyDP): def __init__(self, *args, **kwargs): super(AssociationDP, self).__init__(*args, **kwargs) self.cascade.delete = True self.cascade.delete_orphan = True class MapperStub(object): """Pose as a Mapper representing the association table in a many-to-many join, when performing a ``flush()``. The ``Task`` objects in the objectstore module treat it just like any other ``Mapper``, but in fact it only serves as a dependency placeholder for the many-to-many update task. """ __metaclass__ = util.ArgSingleton def __init__(self, parent, mapper, key): self.mapper = mapper self.base_mapper = self self.class_ = mapper.class_ self._inheriting_mappers = [] def polymorphic_iterator(self): return iter([self]) def _register_dependencies(self, uowcommit): pass def _save_obj(self, *args, **kwargs): pass def _delete_obj(self, *args, **kwargs): pass def primary_mapper(self): return self<|fim▁end|>
def preprocess_dependencies(self, task, deplist, uowcommit, delete = False): #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " preprocess_dep isdelete " + repr(delete) + " direction " + repr(self.direction) if delete:
<|file_name|>ADM_muxerUtils.cpp<|end_file_name|><|fim▁begin|>/*************************************************************************** ADM_muxerUtils.cpp - description ------------------- copyright : (C) 2008 by mean email : [email protected] ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ #include "ADM_default.h" #include "ADM_muxerInternal.h" #include "ADM_muxerUtils.h" /** \fn rescaleFps \brief Rescale fps to be accurate (i.e. 23.976 become 24000/1001) */ void rescaleFps(uint32_t fps1000, AVRational *rational) { switch(fps1000) { case 23976 : { rational->num=1001; rational->den=24000; break; } case 29970 : { rational->num=1001; rational->den=30000; break; } default: rational->num=1000; rational->den=fps1000; } printf("[MP3] TimeBase for video %d/%d\n",rational->num,rational->den); } /** \fn rescaleLavPts \brief Rescale PTS/DTS the lavformat way, i.e. relative to the scale. */ uint64_t rescaleLavPts(uint64_t us, AVRational *scale) { if(us==ADM_NO_PTS) return 0x8000000000000000LL; // AV_NOPTS_VALUE double db=(double)us; double s=scale->den; db*=s; db=(db)/1000000.; // in seconds uint64_t i=(uint64_t)db; // round up to the closer num value<|fim▁hole|> i*=scale->num; return i; }<|fim▁end|>
i=(i+scale->num-1)/scale->num;
<|file_name|>RayWarper.java<|end_file_name|><|fim▁begin|>package au.com.codeka.planetrender; import java.util.Random; import au.com.codeka.common.PerlinNoise; import au.com.codeka.common.Vector2; import au.com.codeka.common.Vector3; /** * This class takes a ray that's going in a certain direction and warps it based on a noise pattern. This is used * to generate misshapen asteroid images, for example. */ public class RayWarper { private NoiseGenerator mNoiseGenerator; private double mWarpFactor; public RayWarper(Template.WarpTemplate tmpl, Random rand) { if (tmpl.getNoiseGenerator() == Template.WarpTemplate.NoiseGenerator.Perlin) { mNoiseGenerator = new PerlinGenerator(tmpl, rand); } else if (tmpl.getNoiseGenerator() == Template.WarpTemplate.NoiseGenerator.Spiral) { mNoiseGenerator = new SpiralGenerator(tmpl, rand); } mWarpFactor = tmpl.getWarpFactor(); } public void warp(Vector3 vec, double u, double v) { mNoiseGenerator.warp(vec, u, v, mWarpFactor); } static abstract class NoiseGenerator { protected double getNoise(double u, double v) { return 0.0; } protected Vector3 getValue(double u, double v) { double x = getNoise(u * 0.25, v * 0.25); double y = getNoise(0.25 + u * 0.25, v * 0.25); double z = getNoise(u * 0.25, 0.25 + v * 0.25); return Vector3.pool.borrow().reset(x, y, z); } protected void warp(Vector3 vec, double u, double v, double factor) { Vector3 warpVector = getValue(u, v); warpVector.reset(warpVector.x * factor + (1.0 - factor), warpVector.y * factor + (1.0 - factor), warpVector.z * factor + (1.0 - factor)); vec.reset(vec.x * warpVector.x, vec.y * warpVector.y, vec.z * warpVector.z); Vector3.pool.release(warpVector); } } static class PerlinGenerator extends NoiseGenerator { private PerlinNoise mNoise; public PerlinGenerator(Template.WarpTemplate tmpl, Random rand) { mNoise = new TemplatedPerlinNoise(tmpl.getParameter(Template.PerlinNoiseTemplate.class), rand); } @Override public double getNoise(double u, double v) { return mNoise.getNoise(u, v); } } static class SpiralGenerator extends NoiseGenerator { public SpiralGenerator(Template.WarpTemplate tmpl, Random rand) { }<|fim▁hole|> @Override protected void warp(Vector3 vec, double u, double v, double factor) { Vector2 uv = Vector2.pool.borrow().reset(u, v); uv.rotate(factor * uv.length() * 2.0 * Math.PI * 2.0 / 360.0); vec.reset(uv.x, -uv.y, 1.0); Vector2.pool.release(uv); } } }<|fim▁end|>
<|file_name|>lexer.rs<|end_file_name|><|fim▁begin|>use lexeme::Lexeme; /*** * Structure Lexer * -> Lexical Analyzer structure used to analyze given source code */ #[allow(dead_code)] pub struct Lexer<'a>{ index: usize, errors: u32, code: String, whitespaces: String, terminals: Vec<&'a str>, lexemes: Vec<Lexeme> } #[allow(dead_code)] impl<'a> Lexer<'a> { // trait new for Lexer structure pub fn new(code: String) -> Lexer<'a> { Lexer { index: 0,<|fim▁hole|> errors: 0, code: code, whitespaces: String::from(" \t"), terminals: vec![ "let","for","while","do_while","if", "else_if","else","continue","break","return", "true","false","fn", ">",">=","<","<=", "==","!=","+","-","*","/","%", "=",",",";","{","}","(",")","[", "]","\n","&&", "||", "!" ], lexemes: vec![ Lexeme::Let, Lexeme::For, Lexeme::While, Lexeme::DoWhile, Lexeme::If, Lexeme::ElseIf, Lexeme::Else, Lexeme::Continue, Lexeme::Break, Lexeme::Return, Lexeme::True, Lexeme::False, Lexeme::Fn, Lexeme::Greater, Lexeme::GreaterEqual, Lexeme::Less, Lexeme::LessEqual, Lexeme::IsEqual, Lexeme::IsNotEqual, Lexeme::Plus, Lexeme::Minus, Lexeme::Multiply, Lexeme::Divide, Lexeme::Modulo, Lexeme::Equals, Lexeme::Comma, Lexeme::SemiColon, Lexeme::OpenBrace, Lexeme::CloseBrace, Lexeme::OpenParenthesis, Lexeme::CloseParenthesis, Lexeme::OpenBracket, Lexeme::CloseBracket, Lexeme::Newline, Lexeme::And, Lexeme::Or, Lexeme::Not ], } } // returns lexemes for each token scanned from the source code pub fn analyze(&mut self) -> Vec<Lexeme> { let mut lexemes = Vec::<Lexeme>::new(); while self.index < self.code.len() - 1{ let mut construct = false; self.skip(); for i in 0..self.terminals.len() { let is_fit = self.terminals[i].len() + self.index < self.code.len(); if is_fit && self.peek(self.terminals[i].len()) == self.terminals[i] { lexemes.push(self.lexemes[i].clone()); self.index += self.terminals[i].len(); construct = true; break; } } if !construct { if self.getc().is_numeric() { lexemes.push(self.number()); } else if self.getc() == '\"' { lexemes.push(self.string()); } else if self.getc().is_alphabetic() { lexemes.push(self.identifier()); } else { self.errors += 1; println!("Syntax Error at {} : {}.", self.index, self.code.len()); } } } if self.errors > 0 { println!("Total Errors : {}", self.errors); } else { println!("Build Successful!"); println!("_________________"); } lexemes } // returns number of errors pub fn count_errors(&mut self) -> u32 { self.errors } // returns current character fn getc(&self) -> char { self.code.chars().nth(self.index).unwrap() } // skips whitespaces fn skip(&mut self) { while self.whitespaces.contains(self.getc()) { self.index += 1; } } // returns part of remaining slice fn peek(&self, length: usize) -> &str { &self.code[self.index..self.index+length] } // returns identifier token and its value fn identifier(&mut self) -> Lexeme { let mut varname = String::new(); if self.getc().is_alphabetic() { varname.push(self.getc()); self.index += 1; while self.getc().is_alphanumeric() { varname.push(self.getc()); self.index += 1; } } Lexeme::Identifier(varname) } // returns number token and its value fn number(&mut self) -> Lexeme { let mut n = String::new(); while self.getc().is_numeric() { n.push(self.getc()); self.index += 1; } Lexeme::Number(n) } // returns string token and its value fn string(&mut self) -> Lexeme { let mut s = String::new(); self.index += 1; while self.getc() != '\"' { s.push(self.getc()); self.index += 1; } self.index += 1; Lexeme::StringLiteral(s) } }<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![crate_type = "lib"] #![crate_name = "comm"] #![feature(box_syntax, core, alloc, oom, heap_api, unsafe_no_drop_flag, filling_drop, wait_timeout, wait_timeout_with, static_mutex, raw, nonzero, drain, num_bits_bytes)] #![cfg_attr(test, feature(test, scoped))] #![cfg_attr(test, allow(deprecated))] #![allow(dead_code, trivial_casts, trivial_numeric_casts, drop_with_repr_extern)] //! Communication primitives. //! //! This library provides types for message passing between threads and polling. //! Concretely, it provides //! //! - Single-producer single-consumer (SPSC), //! - Single-producer multiple-consumers (SPMC), //! - Multiple-producers single-consumer (MPSC), and //! - Multiple-producers multiple-consumers (MPMC) //! //! channels of different flavors and a `Select` object which can poll the consuming ends //! of these channels for readiness. //! //! ### Examples //! //! Simple usage: //! //! ``` //! use std::{thread}; //! use comm::{spsc}; //! //! // Create a bounded SPSC channel. //! let (send, recv) = spsc::bounded::new(10); //! thread::spawn(move || { //! send.send_sync(10).unwrap(); //! }); //! assert_eq!(recv.recv_sync().unwrap(), 10); //! ``` //!<|fim▁hole|>//! ``` //! use std::{thread}; //! use comm::{mpsc}; //! //! // Create an unbounded MPSC channel. //! let (send, recv) = mpsc::unbounded::new(); //! for i in 0..10 { //! let send = send.clone(); //! thread::spawn(move || { //! send.send(i).unwrap(); //! }); //! } //! drop(send); //! while let Ok(n) = recv.recv_sync() { //! println!("{}", n); //! } //! ``` //! //! Selecting: //! //! ``` //! #![feature(std_misc, thread_sleep)] //! //! use std::thread::{self, sleep_ms}; //! use comm::{spsc}; //! use comm::select::{Select, Selectable}; //! //! let mut channels = vec!(); //! for i in 0..10 { //! let (send, recv) = spsc::one_space::new(); //! channels.push(recv); //! thread::spawn(move || { //! sleep_ms(100); //! send.send(i).ok(); //! }); //! } //! let select = Select::new(); //! for recv in &channels { //! select.add(recv); //! } //! let first_ready = select.wait(&mut [0])[0]; //! for recv in &channels { //! if first_ready == recv.id() { //! println!("First ready: {}", recv.recv_sync().unwrap()); //! return; //! } //! } //! ``` extern crate core; extern crate alloc; #[cfg(test)] extern crate test; pub use marker::{Sendable}; mod sortedvec; mod marker; pub mod arc; pub mod select; pub mod spsc; pub mod spmc; pub mod mpsc; pub mod mpmc; /// Errors that can happen during receiving and sending. /// /// See the individual functions for a list of errors they can return and the specific /// meaning. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum Error { Disconnected, Full, Empty, Deadlock, }<|fim▁end|>
//! Shared usage: //!
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> * @license * Copyright Mauricio Gemelli Vigolo. * * Use of this source code is governed by a MIT-style license that can be * found in the LICENSE file at https://github.com/orchejs/rest/LICENSE */ export * from './interceptor.loader'; export * from './parameter.loader'; export * from './router.loader';<|fim▁end|>
/**
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from flask import Flask from flask.ext.bootstrap import Bootstrap from flask.ext.mail import Mail from flask.ext.moment import Moment from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.login import LoginManager from flask.ext.pagedown import PageDown from config import config bootstrap = Bootstrap() mail = Mail() moment = Moment() db = SQLAlchemy() pagedown = PageDown() login_manager = LoginManager() login_manager.session_protection = 'strong' login_manager.login_view = 'auth.login' def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) bootstrap.init_app(app) mail.init_app(app) moment.init_app(app) db.init_app(app) login_manager.init_app(app) pagedown.init_app(app) from .main import main as main_blueprint app.register_blueprint(main_blueprint) from .auth import auth as auth_blueprint app.register_blueprint(auth_blueprint, url_prefix='/auth') <|fim▁hole|><|fim▁end|>
return app
<|file_name|>tag_test.go<|end_file_name|><|fim▁begin|>package dao import ( "context" "go-common/app/admin/main/growup/model" "testing" "github.com/smartystreets/goconvey/convey" ) func TestDaoUpdateTagState(t *testing.T) { convey.Convey("UpdateTagState", t, func(ctx convey.C) { var ( c = context.Background() tagID = int(100) isDeleted = int(0) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { d.Exec(c, "INSERT INTO tag_info(id, is_deleted) VALUES(100, 1) ON DUPLICATE KEY UPDATE is_deleted = 1") rows, err := d.UpdateTagState(c, tagID, isDeleted) ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(rows, convey.ShouldNotBeNil) }) }) }) } func TestDaoTxInsertTagUpInfo(t *testing.T) { convey.Convey("TxInsertTagUpInfo", t, func(ctx convey.C) { var ( tx, _ = d.BeginTran(context.Background()) tagID = int64(100) mid = int64(1000) isDeleted = int(0) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { defer tx.Commit() d.Exec(context.Background(), "DELETE FROM tag_up_info WHERE tag_id = 100") rows, err := d.TxInsertTagUpInfo(tx, tagID, mid, isDeleted) ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(rows, convey.ShouldNotBeNil) }) }) }) } func TestDaoInsertTagUpInfo(t *testing.T) { convey.Convey("InsertTagUpInfo", t, func(ctx convey.C) { var ( c = context.Background() tagID = int64(100) mid = int64(1000) isDeleted = int(0) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { d.Exec(c, "DELETE FROM tag_up_info WHERE tag_id = 100") rows, err := d.InsertTagUpInfo(c, tagID, mid, isDeleted) ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(rows, convey.ShouldNotBeNil) }) }) }) } func TestDaoUpdateTagCom(t *testing.T) { convey.Convey("UpdateTagCom", t, func(ctx convey.C) { var ( c = context.Background() tagID = int(100) isCommon = int(1) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { d.Exec(c, "INSERT INTO tag_info(id, is_common) VALUES(100, 0) ON DUPLICATE KEY UPDATE is_common = 1") rows, err := d.UpdateTagCom(c, tagID, isCommon) ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(rows, convey.ShouldNotBeNil) }) }) }) } func TestDaoInsertTag(t *testing.T) { convey.Convey("InsertTag", t, func(ctx convey.C) { var ( c = context.Background() tag = &model.TagInfo{ Tag: "tt", Category: 1, Business: 3, } ) ctx.Convey("When everything goes positive", func(ctx convey.C) { d.Exec(c, "DELETE FROM tag_info WHERE tag = 'tt'") rows, err := d.InsertTag(c, tag) ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(rows, convey.ShouldNotBeNil) }) }) }) } func TestDaoTxInsertTag(t *testing.T) { convey.Convey("TxInsertTag", t, func(ctx convey.C) { var ( tx, _ = d.BeginTran(context.Background()) tag = &model.TagInfo{ Tag: "tt", Category: 1, Business: 3, } ) ctx.Convey("When everything goes positive", func(ctx convey.C) { defer tx.Commit() d.Exec(context.Background(), "DELETE FROM tag_info WHERE tag = 'tt'") rows, err := d.TxInsertTag(tx, tag) ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(rows, convey.ShouldNotBeNil) }) }) }) } func TestDaoUpdateTagInfo(t *testing.T) { convey.Convey("UpdateTagInfo", t, func(ctx convey.C) { var ( c = context.Background() no = &model.TagInfo{ Tag: "tt", Category: 1, Business: 3,<|fim▁hole|> ctx.Convey("When everything goes positive", func(ctx convey.C) { rows, err := d.UpdateTagInfo(c, no) ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(rows, convey.ShouldNotBeNil) }) }) }) } func TestDaoGetTagInfo(t *testing.T) { convey.Convey("GetTagInfo", t, func(ctx convey.C) { var ( c = context.Background() tagID = int(101) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { d.Exec(c, "INSERT INTO tag_info(id, tag) VALUES(101, 'kkkkk')") info, err := d.GetTagInfo(c, tagID) ctx.Convey("Then err should be nil.info should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(info, convey.ShouldNotBeNil) }) }) }) } func TestDaoGetTagInfoByName(t *testing.T) { convey.Convey("GetTagInfoByName", t, func(ctx convey.C) { var ( c = context.Background() tag = "ppp" dimension = int(0) category = int(0) business = int(0) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { d.Exec(c, "INSERT INTO tag_info(id, tag) VALUES(102, 'ppp')") id, err := d.GetTagInfoByName(c, tag, dimension, category, business) ctx.Convey("Then err should be nil.id should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(id, convey.ShouldNotBeNil) }) }) }) } func TestDaoTxGetTagInfoByName(t *testing.T) { convey.Convey("TxGetTagInfoByName", t, func(ctx convey.C) { var ( tx, _ = d.BeginTran(context.Background()) tag = "ppp" dimension = int(0) category = int(0) business = int(0) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { defer tx.Commit() id, err := d.TxGetTagInfoByName(tx, tag, dimension, category, business) ctx.Convey("Then err should be nil.id should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(id, convey.ShouldNotBeNil) }) }) }) } func TestDaoTagsCount(t *testing.T) { convey.Convey("TagsCount", t, func(ctx convey.C) { var ( c = context.Background() query = "" ) ctx.Convey("When everything goes positive", func(ctx convey.C) { count, err := d.TagsCount(c, query) ctx.Convey("Then err should be nil.count should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(count, convey.ShouldNotBeNil) }) }) }) } func TestDaoGetTagInfos(t *testing.T) { convey.Convey("GetTagInfos", t, func(ctx convey.C) { var ( c = context.Background() query = "" from = int(0) limit = int(10) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { tagInfos, err := d.GetTagInfos(c, query, from, limit) ctx.Convey("Then err should be nil.tagInfos should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(tagInfos, convey.ShouldNotBeNil) }) }) }) } func TestDaoGetNickname(t *testing.T) { convey.Convey("GetNickname", t, func(ctx convey.C) { var ( c = context.Background() mid = int64(100) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { d.Exec(c, "INSERT INTO up_category_info(mid, nick_name) VALUES(100, 'tt')") nickname, err := d.GetNickname(c, mid) ctx.Convey("Then err should be nil.nickname should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(nickname, convey.ShouldNotBeNil) }) }) }) } func TestDaoGetTagUpInfoMID(t *testing.T) { convey.Convey("GetTagUpInfoMID", t, func(ctx convey.C) { var ( c = context.Background() tagID = int64(100) isDeleted = int(0) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { mids, err := d.GetTagUpInfoMID(c, tagID, isDeleted) ctx.Convey("Then err should be nil.mids should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(mids, convey.ShouldNotBeNil) }) }) }) } func TestDaoUpdateTagActivity(t *testing.T) { convey.Convey("UpdateTagActivity", t, func(ctx convey.C) { var ( c = context.Background() tagID = int64(102) activityID = int64(100) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { d.Exec(c, "INSERT INTO tag_info(id, activity_id) VALUES(102, 100) ON DUPLICATE KEY UPDATE activity_id = 100") rows, err := d.UpdateTagActivity(c, tagID, activityID) ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(rows, convey.ShouldNotBeNil) }) }) }) }<|fim▁end|>
} )
<|file_name|>test_transfer.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # test_transfer.py # # Copyright (C) 2012 Bro <[email protected]> # # Deluge is free software. # # You may redistribute it and/or modify it under the terms of the # GNU General Public License, as published by the Free Software # Foundation; either version 3 of the License, or (at your option) # any later version. # # deluge is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with deluge. If not, write to: # The Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor # Boston, MA 02110-1301, USA. # # In addition, as a special exception, the copyright holders give # permission to link the code of portions of this program with the OpenSSL # library. # You must obey the GNU General Public License in all respects for all of # the code used other than OpenSSL. If you modify file(s) with this # exception, you may extend this exception to your version of the file(s), # but you are not obligated to do so. If you do not wish to do so, delete # this exception statement from your version. If you delete this exception # statement from all source files in the program, then also delete it here. # from twisted.trial import unittest from deluge.transfer import DelugeTransferProtocol import base64 import deluge.rencode as rencode class TransferTestClass(DelugeTransferProtocol): def __init__(self): DelugeTransferProtocol.__init__(self) self.transport = self self.messages_out = [] self.messages_in = [] self.packet_count = 0 def write(self, message): """ Called by DelugeTransferProtocol class This simulates the write method of the self.transport in DelugeTransferProtocol. """ self.messages_out.append(message) def message_received(self, message): """ This method overrides message_received is DelugeTransferProtocol and is called with the complete message as it was sent by DelugeRPCProtocol """ self.messages_in.append(message) def get_messages_out_joined(self): return b"".join(self.messages_out) def get_messages_in(self): return self.messages_in def dataReceived_old_protocol(self, data): """ This is the original method logic (as close as possible) for handling data receival on the client :param data: a zlib compressed string encoded with rencode. """ from datetime import timedelta import zlib print "\n=== New Data Received ===\nBytes received:", len(data) if self._buffer: # We have some data from the last dataReceived() so lets prepend it print "Current buffer:", len(self._buffer) if self._buffer else "0" data = self._buffer + data self._buffer = None self.packet_count += 1 self._bytes_received += len(data) while data: print "\n-- Handle packet data --" print "Bytes received:", self._bytes_received print "Current data:", len(data) if self._message_length == 0: # handle_new_message uses _buffer so set data to _buffer. self._buffer = data self._handle_new_message() data = self._buffer self._buffer = None self.packet_count = 1 print "New message of length:", self._message_length dobj = zlib.decompressobj() try: request = rencode.loads(dobj.decompress(data)) print "Successfully loaded message", print " - Buffer length: %d, data length: %d, unused length: %d" % (len(data), \ len(data) - len(dobj.unused_data), len(dobj.unused_data)) print "Packet count:", self.packet_count except Exception, e: #log.debug("Received possible invalid message (%r): %s", data, e) # This could be cut-off data, so we'll save this in the buffer # and try to prepend it on the next dataReceived() self._buffer = data print "Failed to load buffer (size %d): %s" % (len(self._buffer), str(e)) return else: data = dobj.unused_data self._message_length = 0 self.message_received(request) class DelugeTransferProtocolTestCase(unittest.TestCase): def setUp(self): """ The expected messages corresponds to the test messages (msg1, msg2) after they've been processed by DelugeTransferProtocol.send, which means that they've first been encoded with pickle, and then compressed with zlib. The expected messages are encoded in base64 to easily including it here in the source. So before comparing the results with the expected messages, the expected messages must be decoded, or the result message be encoded in base64. """ self.transfer = TransferTestClass() self.msg1 = (0, 1, {"key_int": 1242429423}, {"key_str": "some string"}, {"key_bool": True}) self.msg2 = (2, 3, {"key_float": 12424.29423}, {"key_unicode": u"some string"}, {"key_dict_with_tuple": {"key_tuple": (1, 2, 3)}}, {"keylist": [4, "5", 6.7]}) self.msg1_expected_compressed_base64 = "RAAAADF4nDvKwJjenp1aGZ+ZV+Lgxfv9PYRXXFLU"\ "XZyfm6oAZGTmpad3gAST8vNznAEAJhSQ" self.msg2_expected_compressed_base64 = "RAAAAF14nDvGxJzemZ1aGZ+Wk59Y4uTmpKib3g3i"\ "l+ZlJuenpHYX5+emKhSXFGXmpadPBkmkZCaXxJdn"\ "lmTEl5QW5KRCdIOZhxmBhrUDuTmZxSWHWRpNnRyu"\ "paUBAHYlJxI=" def test_send_one_message(self): """ Send one message and test that it has been sent correctoly to the method 'write' in self.transport. """ self.transfer.transfer_message(self.msg1) # Get the data as sent by DelugeTransferProtocol messages = self.transfer.get_messages_out_joined() base64_encoded = base64.b64encode(messages) self.assertEquals(base64_encoded, self.msg1_expected_compressed_base64)<|fim▁hole|> def test_receive_one_message(self): """ Receive one message and test that it has been sent to the method 'message_received'. """ self.transfer.dataReceived(base64.b64decode(self.msg1_expected_compressed_base64)) # Get the data as sent by DelugeTransferProtocol messages = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg1), rencode.dumps(messages)) def test_receive_old_message(self): """ Receive an old message (with no header) and verify that the data is discarded. """ self.transfer.dataReceived(rencode.dumps(self.msg1)) self.assertEquals(len(self.transfer.get_messages_in()), 0) self.assertEquals(self.transfer._message_length, 0) self.assertEquals(len(self.transfer._buffer), 0) def test_receive_two_concatenated_messages(self): """ This test simply concatenates two messsages (as they're sent over the network), and lets DelugeTransferProtocol receive the data as one string. """ two_concatenated = base64.b64decode(self.msg1_expected_compressed_base64) + base64.b64decode(self.msg2_expected_compressed_base64) self.transfer.dataReceived(two_concatenated) # Get the data as sent by DelugeTransferProtocol message1 = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg1), rencode.dumps(message1)) message2 = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg2), rencode.dumps(message2)) def test_receive_three_messages_in_parts(self): """ This test concatenates three messsages (as they're sent over the network), and lets DelugeTransferProtocol receive the data in multiple parts. """ msg_bytes = base64.b64decode(self.msg1_expected_compressed_base64) + \ base64.b64decode(self.msg2_expected_compressed_base64) + \ base64.b64decode(self.msg1_expected_compressed_base64) packet_size = 40 one_message_byte_count = len(base64.b64decode(self.msg1_expected_compressed_base64)) two_messages_byte_count = one_message_byte_count + len(base64.b64decode(self.msg2_expected_compressed_base64)) three_messages_byte_count = two_messages_byte_count + len(base64.b64decode(self.msg1_expected_compressed_base64)) for d in self.receive_parts_helper(msg_bytes, packet_size): bytes_received = self.transfer.get_bytes_recv() if bytes_received >= three_messages_byte_count: expected_msgs_received_count = 3 elif bytes_received >= two_messages_byte_count: expected_msgs_received_count = 2 elif bytes_received >= one_message_byte_count: expected_msgs_received_count = 1 else: expected_msgs_received_count = 0 # Verify that the expected number of complete messages has arrived self.assertEquals(expected_msgs_received_count, len(self.transfer.get_messages_in())) # Get the data as received by DelugeTransferProtocol message1 = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg1), rencode.dumps(message1)) message2 = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg2), rencode.dumps(message2)) message3 = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg1), rencode.dumps(message3)) # Remove underscore to enable test, or run the test directly: # tests $ trial test_transfer.DelugeTransferProtocolTestCase._test_rencode_fail_protocol def _test_rencode_fail_protocol(self): """ This test tries to test the protocol that relies on errors from rencode. """ msg_bytes = base64.b64decode(self.msg1_expected_compressed_base64) + \ base64.b64decode(self.msg2_expected_compressed_base64) + \ base64.b64decode(self.msg1_expected_compressed_base64) packet_size = 149 one_message_byte_count = len(base64.b64decode(self.msg1_expected_compressed_base64)) two_messages_byte_count = one_message_byte_count + len(base64.b64decode(self.msg2_expected_compressed_base64)) three_messages_byte_count = two_messages_byte_count + len(base64.b64decode(self.msg1_expected_compressed_base64)) print print "Msg1 size:", len(base64.b64decode(self.msg1_expected_compressed_base64)) - 4 print "Msg2 size:", len(base64.b64decode(self.msg2_expected_compressed_base64)) - 4 print "Msg3 size:", len(base64.b64decode(self.msg1_expected_compressed_base64)) - 4 print "one_message_byte_count:", one_message_byte_count print "two_messages_byte_count:", two_messages_byte_count print "three_messages_byte_count:", three_messages_byte_count for d in self.receive_parts_helper(msg_bytes, packet_size, self.transfer.dataReceived_old_protocol): bytes_received = self.transfer.get_bytes_recv() if bytes_received >= three_messages_byte_count: expected_msgs_received_count = 3 elif bytes_received >= two_messages_byte_count: expected_msgs_received_count = 2 elif bytes_received >= one_message_byte_count: expected_msgs_received_count = 1 else: expected_msgs_received_count = 0 # Verify that the expected number of complete messages has arrived if expected_msgs_received_count != len(self.transfer.get_messages_in()): print "Expected number of messages received is %d, but %d have been received."\ % (expected_msgs_received_count, len(self.transfer.get_messages_in())) # Get the data as received by DelugeTransferProtocol message1 = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg1), rencode.dumps(message1)) message2 = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg2), rencode.dumps(message2)) message3 = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg1), rencode.dumps(message3)) def test_receive_middle_of_header(self): """ This test concatenates two messsages (as they're sent over the network), and lets DelugeTransferProtocol receive the data in two parts. The first part contains the first message, plus two bytes of the next message. The next part contains the rest of the message. This is a special case, as DelugeTransferProtocol can't start parsing a message until it has at least 4 bytes (the size of the header) to be able to read and parse the size of the payload. """ two_concatenated = base64.b64decode(self.msg1_expected_compressed_base64) + base64.b64decode(self.msg2_expected_compressed_base64) first_len = len(base64.b64decode(self.msg1_expected_compressed_base64)) # Now found the entire first message, and half the header of the next message (2 bytes into the header) self.transfer.dataReceived(two_concatenated[:first_len+2]) # Should be 1 message in the list self.assertEquals(1, len(self.transfer.get_messages_in())) # Send the rest self.transfer.dataReceived(two_concatenated[first_len+2:]) # Should be 2 messages in the list self.assertEquals(2, len(self.transfer.get_messages_in())) # Get the data as sent by DelugeTransferProtocol message1 = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg1), rencode.dumps(message1)) message2 = self.transfer.get_messages_in().pop(0) self.assertEquals(rencode.dumps(self.msg2), rencode.dumps(message2)) # Needs file containing big data structure e.g. like thetorrent list as it is transfered by the daemon #def test_simulate_big_transfer(self): # filename = "../deluge.torrentlist" # # f = open(filename, "r") # data = f.read() # message_to_send = eval(data) # self.transfer.transfer_message(message_to_send) # # # Get the data as sent to the network by DelugeTransferProtocol # compressed_data = self.transfer.get_messages_out_joined() # packet_size = 16000 # Or something smaller... # # for d in self.receive_parts_helper(compressed_data, packet_size): # bytes_recv = self.transfer.get_bytes_recv() # if bytes_recv < len(compressed_data): # self.assertEquals(len(self.transfer.get_messages_in()), 0) # else: # self.assertEquals(len(self.transfer.get_messages_in()), 1) # # Get the data as received by DelugeTransferProtocol # transfered_message = self.transfer.get_messages_in().pop(0) # # Test that the data structures are equal # #self.assertEquals(transfered_message, message_to_send) # #self.assertTrue(transfered_message == message_to_send) # # #f.close() # #f = open("rencode.torrentlist", "w") # #f.write(str(transfered_message)) # #f.close() def receive_parts_helper(self, data, packet_size, receive_func=None): byte_count = len(data) sent_bytes = 0 while byte_count > 0: to_receive = packet_size if byte_count > packet_size else byte_count sent_bytes += to_receive byte_count -= to_receive if receive_func: receive_func(data[:to_receive]) else: self.transfer.dataReceived(data[:to_receive]) data = data[to_receive:] yield<|fim▁end|>
<|file_name|>template.py<|end_file_name|><|fim▁begin|># -*- python -*- # Package : omniidl # template.py Created on: 2000/01/18 # Author : David Scott (djs) # # Copyright (C) 2003-2008 Apasphere Ltd # Copyright (C) 1999 AT&T Laboratories Cambridge # # This file is part of omniidl. # # omniidl is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA # 02111-1307, USA. # # Description: # # C++ templates for the .hh file # $Id: template.py 5867 2009-05-06 16:16:18Z dgrisby $ # $Log$ # Revision 1.8.2.20 2008/12/29 18:44:38 dgrisby # Globally scope array functions to avoid ambiguities. # # Revision 1.8.2.19 2008/12/03 10:53:58 dgrisby # Tweaks leading to Python 3 support; other minor clean-ups. # # Revision 1.8.2.18 2007/09/19 14:16:07 dgrisby # Avoid namespace clashes if IDL defines modules named CORBA. # # Revision 1.8.2.17 2007/05/11 09:52:27 dgrisby # New -Wbguard_prefix option. Thanks Austin Bingham. # # Revision 1.8.2.16 2006/10/23 15:36:25 dgrisby # Undefine USE_stub_in_nt_dll at the end of header if it was not defined # at the start. # # Revision 1.8.2.15 2006/09/04 11:40:06 dgrisby # Remove crazy switch code in enum marshalling. # # Revision 1.8.2.14 2006/01/10 12:24:03 dgrisby # Merge from omni4_0_develop pre 4.0.7 release. # # Revision 1.8.2.13 2005/11/14 11:02:16 dgrisby # Local interface fixes. # # Revision 1.8.2.12 2005/11/09 12:22:17 dgrisby # Local interfaces support. # # Revision 1.8.2.11 2005/08/16 13:51:21 dgrisby # Problems with valuetype / abstract interface C++ mapping. # # Revision 1.8.2.10 2005/07/22 17:18:37 dgrisby # Another merge from omni4_0_develop. # # Revision 1.8.2.9 2005/01/06 23:10:06 dgrisby # Big merge from omni4_0_develop. # # Revision 1.8.2.8 2005/01/06 16:35:18 dgrisby # Narrowing for abstract interfaces. # # Revision 1.8.2.7 2004/10/13 17:58:24 dgrisby # Abstract interfaces support; values support interfaces; value bug fixes. # # Revision 1.8.2.6 2004/07/31 23:46:27 dgrisby # Correct constness of exception Any insertion operator. # # Revision 1.8.2.5 2004/07/23 10:29:59 dgrisby # Completely new, much simpler Any implementation. # # Revision 1.8.2.4 2004/07/04 23:53:39 dgrisby # More ValueType TypeCode and Any support. # # Revision 1.8.2.3 2004/02/16 10:10:32 dgrisby # More valuetype, including value boxes. C++ mapping updates. # # Revision 1.8.2.2 2003/10/23 11:25:55 dgrisby # More valuetype support. # # Revision 1.8.2.1 2003/03/23 21:02:36 dgrisby # Start of omniORB 4.1.x development branch. # # Revision 1.5.2.19 2001/11/12 13:46:07 dpg1 # _unchecked_narrow, improved _narrow. # # Revision 1.5.2.18 2001/11/08 16:33:51 dpg1 # Local servant POA shortcut policy. # # Revision 1.5.2.17 2001/10/29 17:42:41 dpg1 # Support forward-declared structs/unions, ORB::create_recursive_tc(). # # Revision 1.5.2.16 2001/10/18 12:45:28 dpg1 # IDL compiler tweaks. # # Revision 1.5.2.15 2001/10/17 16:44:05 dpg1 # Update DynAny to CORBA 2.5 spec, const Any exception extraction. # # Revision 1.5.2.14 2001/09/19 17:29:04 dpg1 # Cosmetic changes. # # Revision 1.5.2.13 2001/08/17 13:45:56 dpg1 # C++ mapping fixes. # # Revision 1.5.2.12 2001/08/15 10:26:10 dpg1 # New object table behaviour, correct POA semantics. # # Revision 1.5.2.11 2001/08/03 17:41:17 sll # System exception minor code overhaul. When a system exeception is raised, # a meaning minor code is provided. # # Revision 1.5.2.10 2001/07/31 19:25:11 sll # Array _var should be separated into fixed and variable size ones. # # Revision 1.5.2.9 2001/06/18 20:30:51 sll # Only define 1 conversion operator from T_var to T* if the compiler is # gcc. Previously, this is only done for gcc 2.7.2. It seems that gcc 3.0 # requires this to be the case. This is the default for all versions of # gcc. # # Revision 1.5.2.8 2001/05/29 17:03:50 dpg1 # In process identity. # # Revision 1.5.2.7 2001/04/19 09:30:12 sll # Big checkin with the brand new internal APIs. # Scoped where appropriate with the omni namespace. # # Revision 1.5.2.6 2001/03/13 10:32:09 dpg1 # Fixed point support. # # Revision 1.5.2.5 2000/11/20 14:43:25 sll # Added support for wchar and wstring. # # Revision 1.5.2.4 2000/11/09 12:27:55 dpg1 # Huge merge from omni3_develop, plus full long long from omni3_1_develop. # # Revision 1.5.2.3 2000/11/03 19:20:41 sll # Replaced old marshal operators with a unified operator for cdrStream. # # Revision 1.5.2.2 2000/10/12 15:37:51 sll # Updated from omni3_1_develop. # # Revision 1.6.2.2 2000/08/21 11:35:18 djs # Lots of tidying # # Revision 1.6.2.1 2000/08/02 10:52:02 dpg1 # New omni3_1_develop branch, merged from omni3_develop. # # Revision 1.6 2000/07/13 15:26:00 dpg1 # Merge from omni3_develop for 3.0 release. # # Revision 1.3.2.15 2000/07/26 15:29:11 djs # Missing typedef and forward when generating BOA skeletons # # Revision 1.3.2.14 2000/07/24 09:35:20 dpg1 # Adding the missing constructor meant that there was no longer a # default constructor. # # Revision 1.3.2.13 2000/07/24 10:17:31 djs # Added missing BOA skeleton constructor # # Revision 1.3.2.12 2000/07/04 12:57:55 djs # Fixed Any insertion/extraction operators for unions and exceptions # # Revision 1.3.2.11 2000/06/26 16:24:00 djs # Better handling of #include'd files (via new commandline options) # Refactoring of configuration state mechanism. # # Revision 1.3.2.10 2000/06/19 18:19:50 djs # Implemented union discriminant setting function _d(_value) with checks for # illegal uses (setting to a label corresponding to a non-current member and # setting before initialisation) # # Revision 1.3.2.9 2000/06/05 13:03:57 djs # Removed union member name clash (x & pd_x, pd__default, pd__d) # Removed name clash when a sequence is called "pd_seq" # Nested union within union fix # Actually generates BOA non-flattened tie templates # # Revision 1.3.2.8 2000/05/31 18:02:58 djs # Better output indenting (and preprocessor directives now correctly output at # the beginning of lines) # # Revision 1.3.2.7 2000/05/30 15:59:25 djs # Removed inheritance ambiguity in generated BOA _sk_ and POA_ classes # # Revision 1.3.2.6 2000/05/18 15:57:33 djs # Added missing T* data constructor for bounded sequence types # # Revision 1.3.2.5 2000/03/20 11:50:20 djs # Removed excess buffering- output templates have code attached which is # lazily evaluated when required. # # Revision 1.3.2.4 2000/03/10 12:01:03 djr # Re-fixed omniidl (make exception _NP_duplicate() public). # # Revision 1.3.2.3 2000/03/09 15:22:42 djs # Changing the protection status of an exception method, mirroring a change # in omniidl3 # # Revision 1.3.2.2 2000/03/07 18:07:33 djr # Fixed user-exceptions when can't catch by base class. # # Revision 1.3.2.1 2000/03/03 14:29:17 djr # Improvement to BOA skeletons (less generated code). # # Revision 1.3 2000/02/01 09:26:45 djs # Tracking fixes in old compiler: powerpc-aix scoped identifier workarounds # # Revision 1.2 2000/01/19 11:23:29 djs # Moved most C++ code to template file # # Revision 1.1 2000/01/18 18:05:53 djs # Extracted most C++ from header/defs and put in a template file. # General refactoring. # """C++ templates for the .hh file""" ## ## File header ## header = """\ // This file is generated by @program@- @library@. Do not edit. #ifndef @guard_prefix@__@guard@_hh__ #define @guard_prefix@__@guard@_hh__ """ footer = """\ #endif """ ## ## Main file ## main = """\ #ifndef __CORBA_H_EXTERNAL_GUARD__ #include <omniORB4/CORBA.h> #endif #ifndef USE_stub_in_nt_dll # define USE_stub_in_nt_dll_NOT_DEFINED_@guard@ #endif #ifndef USE_core_stub_in_nt_dll # define USE_core_stub_in_nt_dll_NOT_DEFINED_@guard@ #endif #ifndef USE_dyn_stub_in_nt_dll # define USE_dyn_stub_in_nt_dll_NOT_DEFINED_@guard@ #endif @sub_include_pre@ @cxx_direct_include@ @includes@ @sub_include_post@ #ifdef USE_stub_in_nt_dll # ifndef USE_core_stub_in_nt_dll # define USE_core_stub_in_nt_dll # endif # ifndef USE_dyn_stub_in_nt_dll # define USE_dyn_stub_in_nt_dll # endif #endif #ifdef _core_attr # error "A local CPP macro _core_attr has already been defined." #else # ifdef USE_core_stub_in_nt_dll # define _core_attr _OMNIORB_NTDLL_IMPORT # else # define _core_attr # endif #endif #ifdef _dyn_attr # error "A local CPP macro _dyn_attr has already been defined." #else # ifdef USE_dyn_stub_in_nt_dll # define _dyn_attr _OMNIORB_NTDLL_IMPORT # else # define _dyn_attr # endif #endif @forward_declarations@ @string_tcParser_declarations@ @defs@ @poa@ @obv@ @other_tie@ #undef _core_attr #undef _dyn_attr @operators@ @marshalling@ #ifdef USE_stub_in_nt_dll_NOT_DEFINED_@guard@ # undef USE_stub_in_nt_dll # undef USE_stub_in_nt_dll_NOT_DEFINED_@guard@ #endif #ifdef USE_core_stub_in_nt_dll_NOT_DEFINED_@guard@ # undef USE_core_stub_in_nt_dll # undef USE_core_stub_in_nt_dll_NOT_DEFINED_@guard@ #endif #ifdef USE_dyn_stub_in_nt_dll_NOT_DEFINED_@guard@ # undef USE_dyn_stub_in_nt_dll # undef USE_dyn_stub_in_nt_dll_NOT_DEFINED_@guard@ #endif #endif // __@guard@_hh__ """ sub_include_pre = """\ #ifdef INCLUDED_stub_in_nt_dll # ifdef USE_stub_in_nt_dll # error "cannot use both INCLUDED_stub_in_nt_dll and USE_stub_in_nt_dll." # else # define USE_stub_in_nt_dll # endif # define INCLUDED_stub_in_nt_dll_DEFINED_@guard@ # undef INCLUDED_stub_in_nt_dll #endif """ sub_include_post = """\ #ifdef INCLUDED_stub_in_nt_dll_DEFINED_@guard@ # undef USE_stub_in_nt_dll # define INCLUDED_stub_in_nt_dll # undef INCLUDED_stub_in_nt_dll_DEFINED_@guard@ #endif """ main_include = """\ #ifndef @guard_prefix@__@guardname@_EXTERNAL_GUARD__ #define @guard_prefix@__@guardname@_EXTERNAL_GUARD__ #include @filename@ #endif""" ## ## Modules ## # name => C++ form of the module identifier module_begin = """\ _CORBA_MODULE @name@ _CORBA_MODULE_BEG """ module_end = """\ _CORBA_MODULE_END """ POA_module_begin = """\ _CORBA_MODULE @POA_prefix@@name@ _CORBA_MODULE_BEG """ POA_module_end = """\ _CORBA_MODULE_END """ OBV_module_begin = """\ _CORBA_MODULE @OBV_prefix@@name@ _CORBA_MODULE_BEG """ OBV_module_end = """\ _CORBA_MODULE_END """ POA_interface = """\ class @POA_name@ : public virtual @impl_scopedID@, @inherits@ { public: virtual ~@POA_name@(); inline ::@scopedID@_ptr _this() { return (::@scopedID@_ptr) _do_this(::@scopedID@::_PD_repoId); } };<|fim▁hole|> ## ## Interfaces ## interface_Helper = """\ #ifndef __@guard@__ #define __@guard@__ class @name@; class _objref_@name@; class _impl_@name@; @class_sk_name@ typedef _objref_@name@* @name@_ptr; typedef @name@_ptr @name@Ref; class @name@_Helper { public: typedef @name@_ptr _ptr_type; static _ptr_type _nil(); static _CORBA_Boolean is_nil(_ptr_type); static void release(_ptr_type); static void duplicate(_ptr_type); static void marshalObjRef(_ptr_type, cdrStream&); static _ptr_type unmarshalObjRef(cdrStream&); }; typedef _CORBA_ObjRef_Var<_objref_@name@, @name@_Helper> @name@_var; typedef _CORBA_ObjRef_OUT_arg<_objref_@name@,@name@_Helper > @name@_out; #endif """ interface_type = """\ // interface @name@ class @name@ { public: // Declarations for this interface type. typedef @name@_ptr _ptr_type; typedef @name@_var _var_type; static _ptr_type _duplicate(_ptr_type); static _ptr_type _narrow(::CORBA::Object_ptr); static _ptr_type _unchecked_narrow(::CORBA::Object_ptr); @abstract_narrows@ static _ptr_type _nil(); static inline void _marshalObjRef(_ptr_type, cdrStream&); static inline _ptr_type _unmarshalObjRef(cdrStream& s) { omniObjRef* o = omniObjRef::_unMarshal(_PD_repoId,s); if (o) return (_ptr_type) o->_ptrToObjRef(_PD_repoId); else return _nil(); } static _core_attr const char* _PD_repoId; // Other IDL defined within this scope. @Other_IDL@ }; """ interface_abstract_narrows = """\ static _ptr_type _narrow(::CORBA::AbstractBase_ptr); static _ptr_type _unchecked_narrow(::CORBA::AbstractBase_ptr); """ ## ## Abstract Interfaces ## abstract_interface_Helper = """\ #ifndef __@guard@__ #define __@guard@__ class @name@; class _objref_@name@; typedef @name@* @name@_ptr; typedef @name@_ptr @name@Ref; class @name@_Helper { public: typedef @name@_ptr _ptr_type; static _ptr_type _nil(); static _CORBA_Boolean is_nil(_ptr_type); static void release(_ptr_type); static void duplicate(_ptr_type); static void marshalObjRef(_ptr_type, cdrStream&); static _ptr_type unmarshalObjRef(cdrStream&); }; typedef _CORBA_ObjRef_Var<@name@, @name@_Helper> @name@_var; typedef _CORBA_ObjRef_OUT_arg<@name@,@name@_Helper > @name@_out; #endif """ abstract_interface_type = """\ // abstract interface @name@ class @name@ : @inherits@ { public: // Declarations for this interface type. typedef @name@_ptr _ptr_type; typedef @name@_var _var_type; static _ptr_type _duplicate(_ptr_type); static _ptr_type _narrow(::CORBA::AbstractBase_ptr); static _ptr_type _unchecked_narrow(::CORBA::AbstractBase_ptr); static _ptr_type _nil(); static inline void _marshalObjRef(_ptr_type, cdrStream&); static inline _ptr_type _unmarshalObjRef(cdrStream& s) { _CORBA_Boolean b = s.unmarshalBoolean(); if (b) { omniObjRef* o = omniObjRef::_unMarshal(_PD_repoId,s); if (o) return (_ptr_type) o->_ptrToObjRef(_PD_repoId); else return _nil(); } else { ::CORBA::ValueBase* v = ::CORBA::ValueBase::_NP_unmarshal(s); if (v) return (_ptr_type) v->_ptrToValue(_PD_repoId); else return 0; } } static _core_attr const char* _PD_repoId; // Other IDL defined within this scope. @Other_IDL@ // Operations declared in this abstract interface @operations@ }; """ ## ## Local Interfaces ## local_interface_Helper = """\ #ifndef __@guard@__ #define __@guard@__ class @name@; typedef @name@* @name@_ptr; typedef @name@_ptr @name@Ref; class @name@_Helper { public: typedef @name@_ptr _ptr_type; static _ptr_type _nil(); static _CORBA_Boolean is_nil(_ptr_type); static void release(_ptr_type); static void duplicate(_ptr_type); static void marshalObjRef(_ptr_type, cdrStream&); static _ptr_type unmarshalObjRef(cdrStream&); }; typedef _CORBA_ObjRef_Var<@name@, @name@_Helper> @name@_var; typedef _CORBA_ObjRef_OUT_arg<@name@,@name@_Helper > @name@_out; #endif """ local_interface_type = """\ // local interface @name@ class @name@ : @inherits@ { public: // Declarations for this interface type. typedef @name@_ptr _ptr_type; typedef @name@_var _var_type; static _ptr_type _duplicate(_ptr_type); static _ptr_type _narrow(::CORBA::Object_ptr); static _ptr_type _unchecked_narrow(::CORBA::Object_ptr); @abstract_narrows@ static _ptr_type _nil(); static inline void _marshalObjRef(_ptr_type, cdrStream& s) { OMNIORB_THROW(MARSHAL, _OMNI_NS(MARSHAL_LocalObject), (::CORBA::CompletionStatus)s.completion()); } static inline _ptr_type _unmarshalObjRef(cdrStream& s) { OMNIORB_THROW(MARSHAL, _OMNI_NS(MARSHAL_LocalObject), (::CORBA::CompletionStatus)s.completion()); #ifdef NEED_DUMMY_RETURN return 0; #endif } static _core_attr const char* _PD_repoId; // Other IDL defined within this scope. @Other_IDL@ // Operations declared in this local interface @operations@ private: virtual void* _ptrToObjRef(const char*); protected: @name@(); virtual ~@name@(); }; class _nil_@name@ : @nil_inherits@ public virtual @name@ { public: @nil_operations@ inline _nil_@name@() { _PR_setobj(0); } protected: virtual ~_nil_@name@(); }; """ ## ## Object reference ## interface_objref = """\ class _objref_@name@ : @inherits@ { public: @operations@ inline _objref_@name@() @init_shortcut@ { _PR_setobj(0); } // nil _objref_@name@(omniIOR*, omniIdentity*); protected: virtual ~_objref_@name@(); @shortcut@ private: virtual void* _ptrToObjRef(const char*); _objref_@name@(const _objref_@name@&); _objref_@name@& operator = (const _objref_@name@&); // not implemented friend class @name@; }; """ interface_shortcut = """\ virtual void _enableShortcut(omniServant*, const _CORBA_Boolean*); _impl_@name@* _shortcut; const _CORBA_Boolean* _invalid;\ """ ## ## Proxy Object Factory ## interface_pof = """\ class _pof_@name@ : public _OMNI_NS(proxyObjectFactory) { public: inline _pof_@name@() : _OMNI_NS(proxyObjectFactory)(@name@::_PD_repoId) {} virtual ~_pof_@name@(); virtual omniObjRef* newObjRef(omniIOR*,omniIdentity*); virtual _CORBA_Boolean is_a(const char*) const; }; """ ## ## Interface Impl class ## interface_impl = """\ class _impl_@name@ : @inherits@ { public: virtual ~_impl_@name@(); @operations@ public: // Really protected, workaround for xlC virtual _CORBA_Boolean _dispatch(omniCallHandle&); private: virtual void* _ptrToInterface(const char*); virtual const char* _mostDerivedRepoId(); @abstract@ }; """ interface_impl_abstract = """\ virtual void _interface_is_abstract() = 0;""" interface_impl_not_abstract = """\ virtual void _interface_is_abstract();""" ## ## Old BOA skeleton class ## interface_sk = """\ class _sk_@name@ : public virtual _impl_@name@, @inherits@ { public: _sk_@name@() {} _sk_@name@(const omniOrbBoaKey&); virtual ~_sk_@name@(); inline @name@::_ptr_type _this() { return (@name@::_ptr_type) omniOrbBoaServant::_this(@name@::_PD_repoId); } }; """ ## ## Objref marshal function ## interface_marshal_forward = """\ inline void @name@::_marshalObjRef(::@name@_ptr obj, cdrStream& s) { omniObjRef::_marshal(obj->_PR_getobj(),s); } """ abstract_interface_marshal_forward = """\ inline void @name@::_marshalObjRef(::@name@_ptr obj, cdrStream& s) { if (obj) { ::CORBA::ValueBase* v = obj->_NP_to_value(); if (v) { s.marshalBoolean(0); ::CORBA::ValueBase::_NP_marshal(v,s); return; } ::CORBA::Object_ptr o = obj->_NP_to_object(); if (o) { s.marshalBoolean(1); omniObjRef::_marshal(o->_PR_getobj(),s); return; } } s.marshalBoolean(0); ::CORBA::ValueBase::_NP_marshal(0, s); } """ ## ## Typedefs ## typedef_simple_to_array = """\ typedef @base@ @derived@; typedef @base@_slice @derived@_slice; typedef @base@_copyHelper @derived@_copyHelper; typedef @base@_var @derived@_var; typedef @base@_out @derived@_out; typedef @base@_forany @derived@_forany; @inline_qualifier@ @derived@_slice* @derived@_alloc() { return @base@_alloc(); } @inline_qualifier@ @derived@_slice* @derived@_dup(const @derived@_slice* p) { return @base@_dup(p); } @inline_qualifier@ void @derived@_copy( @derived@_slice* _to, const @derived@_slice* _from ) { @base@_copy(_to, _from); } @inline_qualifier@ void @derived@_free( @derived@_slice* p) { @base@_free(p); } """ typedef_simple_string = """\ typedef char* @name@; typedef ::CORBA::String_var @name@_var; typedef ::CORBA::String_out @name@_out; """ typedef_simple_wstring = """\ typedef ::CORBA::WChar* @name@; typedef ::CORBA::WString_var @name@_var; typedef ::CORBA::WString_out @name@_out; """ typedef_simple_typecode = """\ typedef ::CORBA::TypeCode_ptr @name@_ptr; typedef ::CORBA::TypeCode_var @name@_var; """ typedef_simple_any = """\ typedef ::CORBA::Any @name@; typedef ::CORBA::Any_var @name@_var; typedef ::CORBA::Any_out @name@_out; """ typedef_simple_fixed = """\ typedef _omni_Fixed<@digits@,@scale@> @name@; typedef @name@& @name@_out; """ typedef_simple_basic = """\ typedef @base@ @derived@; typedef @base@_out @derived@_out; """ typedef_simple_constructed = """\ typedef @base@ @name@; typedef @base@_var @name@_var; typedef @base@_out @name@_out; """ typedef_simple_objref = """\ typedef @base@ @name@; typedef @base@_ptr @name@_ptr; typedef @base@Ref @name@Ref; @impl_base@ typedef @base@_Helper @name@_Helper; @objref_base@ typedef @base@_var @name@_var; typedef @base@_out @name@_out; """ typedef_enum_oper_friend = """\ // Need to declare <<= for elem type, as GCC expands templates early #if defined(__GNUG__) && __GNUG__ == 2 && __GNUC_MINOR__ == 7 @friend@ inline void operator >>= (@element@, cdrStream&); @friend@ inline void operator <<= (@element@&, cdrStream&); #endif """ # Arrays typedef_array = """\ typedef @type@ @name@@dims@; typedef @type@ @name@_slice@taildims@; @inline_qualifier@ @name@_slice* @name@_alloc() { return new @name@_slice[@firstdim@]; } @inline_qualifier@ @name@_slice* @name@_dup(const @name@_slice* _s) { if (!_s) return 0; @name@_slice* _data = @name@_alloc(); if (_data) { @dup_loop@ } return _data; } @inline_qualifier@ void @name@_copy(@name@_slice* _to, const @name@_slice* _from){ @copy_loop@ } @inline_qualifier@ void @name@_free(@name@_slice* _s) { delete [] _s; } """ typedef_array_copyHelper = """\ class @name@_copyHelper { public: static inline @name@_slice* alloc() { return ::@fqname@_alloc(); } static inline @name@_slice* dup(const @name@_slice* p) { return ::@fqname@_dup(p); } static inline void free(@name@_slice* p) { ::@fqname@_free(p); } }; typedef _CORBA_Array_@var_or_fix@_Var<@name@_copyHelper,@name@_slice> @name@_var; typedef _CORBA_Array_@var_or_fix@_Forany<@name@_copyHelper,@name@_slice> @name@_forany; """ typedef_array_fix_out_type = """\ typedef @name@_slice* @name@_out; """ typedef_array_variable_out_type = """\ typedef _CORBA_Array_Variable_OUT_arg<@name@_slice,@name@_var > @name@_out; """ ## ## Sequences ## sequence_type = """\ class @name@_var; class @name@ : public @derived@ { public: typedef @name@_var _var_type; inline @name@() {} inline @name@(const @name@& _s) : @derived@(_s) {} @bounds@ inline @name@& operator = (const @name@& _s) { @derived@::operator=(_s); return *this; } }; """ sequence_forward_type = """\ class @name@_var; class @name@ : public @derived@ { public: typedef @name@_var _var_type; inline @name@() {} @name@(const @name@& _s); @name@& operator=(const @name@& _s); @bounds@ virtual ~@name@(); @element@& operator[] (_CORBA_ULong _index); const @element@& operator[] (_CORBA_ULong _index) const; static @element@* allocbuf(_CORBA_ULong _nelems); static void freebuf(@element@* _b); void operator>>= (cdrStream &_s) const; void operator<<= (cdrStream &_s); protected: void NP_copybuffer(_CORBA_ULong _newmax); void NP_freebuf(); }; """ sequence_unbounded_ctors = """\ inline @name@(_CORBA_ULong _max) : @derived@(_max) {} inline @name@(_CORBA_ULong _max, _CORBA_ULong _len, @element@* _val, _CORBA_Boolean _rel=0) : @derived@(_max, _len, _val, _rel) {} """ sequence_bounded_ctors = """\ inline @name@(_CORBA_ULong _len, @element@* _val, _CORBA_Boolean _rel=0) : @derived@(_len, _val, _rel) {} """ sequence_var_array_subscript = """\ inline @element@_slice* operator [] (_CORBA_ULong _s) { return (@element@_slice*) ((_pd_seq->NP_data())[_s]); } """ sequence_var_subscript = """\ inline @element@ operator [] (_CORBA_ULong _s) { return (*_pd_seq)[_s]; } """ sequence_var = """\ class @name@_out; class @name@_var { public: inline @name@_var() : _pd_seq(0) {} inline @name@_var(@name@* _s) : _pd_seq(_s) {} inline @name@_var(const @name@_var& _s) { if( _s._pd_seq ) _pd_seq = new @name@(*_s._pd_seq); else _pd_seq = 0; } inline ~@name@_var() { if( _pd_seq ) delete _pd_seq; } inline @name@_var& operator = (@name@* _s) { if( _pd_seq ) delete _pd_seq; _pd_seq = _s; return *this; } inline @name@_var& operator = (const @name@_var& _s) { if( _s._pd_seq ) { if( !_pd_seq ) _pd_seq = new @name@; *_pd_seq = *_s._pd_seq; } else if( _pd_seq ) { delete _pd_seq; _pd_seq = 0; } return *this; } @subscript_operator@ inline @name@* operator -> () { return _pd_seq; } inline const @name@* operator -> () const { return _pd_seq; } #if defined(__GNUG__) inline operator @name@& () const { return *_pd_seq; } #else inline operator const @name@& () const { return *_pd_seq; } inline operator @name@& () { return *_pd_seq; } #endif inline const @name@& in() const { return *_pd_seq; } inline @name@& inout() { return *_pd_seq; } inline @name@*& out() { if( _pd_seq ) { delete _pd_seq; _pd_seq = 0; } return _pd_seq; } inline @name@* _retn() { @name@* tmp = _pd_seq; _pd_seq = 0; return tmp; } friend class @name@_out; private: @name@* _pd_seq; }; """ sequence_out_array_subscript = """\ inline @element@_slice* operator [] (_CORBA_ULong _i) { return (@element@_slice*) ((_data->NP_data())[_i]); } """ sequence_out_subscript = """\ inline @element@ operator [] (_CORBA_ULong _i) { return (*_data)[_i]; } """ sequence_out = """\ class @name@_out { public: inline @name@_out(@name@*& _s) : _data(_s) { _data = 0; } inline @name@_out(@name@_var& _s) : _data(_s._pd_seq) { _s = (@name@*) 0; } inline @name@_out(const @name@_out& _s) : _data(_s._data) {} inline @name@_out& operator = (const @name@_out& _s) { _data = _s._data; return *this; } inline @name@_out& operator = (@name@* _s) { _data = _s; return *this; } inline operator @name@*&() { return _data; } inline @name@*& ptr() { return _data; } inline @name@* operator->() { return _data; } @subscript_operator@ @name@*& _data; private: @name@_out(); @name@_out& operator=(const @name@_var&); }; """ ## ## Structs ## struct = """\ struct @name@ { typedef _CORBA_ConstrType_@fix_or_var@_Var<@name@> _var_type; @Other_IDL@ @members@ void operator>>= (cdrStream &) const; void operator<<= (cdrStream &); }; typedef @name@::_var_type @name@_var; """ struct_fix_out_type = """\ typedef @name@& @name@_out; """ struct_variable_out_type = """\ typedef _CORBA_ConstrType_Variable_OUT_arg< @name@,@name@_var > @name@_out; """ struct_array_declarator = """\ typedef @memtype@ @prefix@_@cxx_id@@dims@; typedef @memtype@ _@cxx_id@_slice@tail_dims@; """ struct_nonarray_sequence = """\ typedef @memtype@ _@cxx_id@_seq; _@cxx_id@_seq @cxx_id@; """ struct_normal_member = """\ @memtype@ @cxx_id@@dims@; """ struct_forward = """\ struct @name@; """ ## ## Exceptions ## exception = """\ class @name@ : public ::CORBA::UserException { public: @Other_IDL@ @members@ inline @name@() { pd_insertToAnyFn = insertToAnyFn; pd_insertToAnyFnNCP = insertToAnyFnNCP; } @name@(const @name@&); @constructor@ @name@& operator=(const @name@&); virtual ~@name@(); virtual void _raise() const; static @name@* _downcast(::CORBA::Exception*); static const @name@* _downcast(const ::CORBA::Exception*); static inline @name@* _narrow(::CORBA::Exception* _e) { return _downcast(_e); } @inline@void operator>>=(cdrStream&) const @body@ @inline@void operator<<=(cdrStream&) @body@ static _core_attr insertExceptionToAny insertToAnyFn; static _core_attr insertExceptionToAnyNCP insertToAnyFnNCP; virtual ::CORBA::Exception* _NP_duplicate() const; static _core_attr const char* _PD_repoId; static _core_attr const char* _PD_typeId; private: virtual const char* _NP_typeId() const; virtual const char* _NP_repoId(int*) const; virtual void _NP_marshal(cdrStream&) const; }; """ exception_array_declarator = """\ typedef @memtype@ @private_prefix@_@cxx_id@@dims@; typedef @memtype@ _@cxx_id@_slice@tail_dims@; """ exception_member = """\ @memtype@ @cxx_id@@dims@; """ ## ## Unions ## union_ctor_nonexhaustive = """\ if ((_pd__default = _value._pd__default)) { @default@ } else { switch(_value._pd__d) { @cases@ } } _pd__d = _value._pd__d; """ union_ctor_exhaustive = """\ switch(_value._pd__d) { @cases@ } _pd__d = _value._pd__d;""" union_ctor_case = """\ case @discrimvalue@: @name@(_value._pd_@name@); break; """ union_ctor_bool_default = """\ #ifndef HAS_Cplusplus_Bool default: break; #endif """ union_ctor_default = """\ default: break; """ union = """\ class @unionname@ { public: typedef _CORBA_ConstrType_@fixed@_Var<@unionname@> _var_type; @Other_IDL@ @unionname@(): _pd__initialised(0) { @default_constructor@ } @unionname@(const @unionname@& _value) { _pd__initialised = _value._pd__initialised; @copy_constructor@ } ~@unionname@() {} @unionname@& operator=(const @unionname@& _value) { _pd__initialised = _value._pd__initialised; @copy_constructor@ return *this; } @discrimtype@ _d() const { return _pd__d;} void _d(@discrimtype@ _value){ @_d_body@ } @implicit_default@ @members@ void operator>>= (cdrStream&) const; void operator<<= (cdrStream&); private: @discrimtype@ _pd__d; _CORBA_Boolean _pd__default; _CORBA_Boolean _pd__initialised; @union@ @outsideUnion@ }; typedef @unionname@::_var_type @unionname@_var; """ union_fix_out_type = """\ typedef @unionname@& @unionname@_out; """ union_variable_out_type = """\ typedef _CORBA_ConstrType_Variable_OUT_arg< @unionname@,@unionname@_var > @unionname@_out; """ union_union = """\ union { @members@ }; """ union_d_fn_body = """\ // illegal to set discriminator before making a member active if (!_pd__initialised) OMNIORB_THROW(BAD_PARAM,_OMNI_NS(BAD_PARAM_InvalidUnionDiscValue),::CORBA::COMPLETED_NO); if (_value == _pd__d) return; // no change @switch@ fail: OMNIORB_THROW(BAD_PARAM,_OMNI_NS(BAD_PARAM_InvalidUnionDiscValue),::CORBA::COMPLETED_NO); """ union_constructor_implicit = """\ _default(); """ union_constructor_default = """\ _pd__default = 1; _pd__d = @default@; """ union_implicit_default = """\ void _default() { _pd__initialised = 1; _pd__d = @arbitraryDefault@; _pd__default = 1; } """ union_proxy_float = """ #ifdef USING_PROXY_FLOAT @type@ _pd_@name@@dims@; #endif """ union_noproxy_float = """ #ifndef USING_PROXY_FLOAT @type@ _pd_@name@@dims@; #endif """ union_array_declarator = """\ typedef @memtype@ @prefix@_@name@@dims@; typedef @memtype@ _@name@_slice@tail_dims@; """ union_array = """\ const @memtype@_slice *@name@ () const { return _pd_@name@; } void @name@ (const @const_type@ _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; @loop@ } """ union_any = """\ const @type@ &@name@ () const { return _pd_@name@; } @type@ &@name@ () { return _pd_@name@; } void @name@ (const @type@& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } """ union_typecode = """\ ::CORBA::TypeCode_ptr @name@ () const { return _pd_@name@._ptr; } void @name@(::CORBA::TypeCode_ptr _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = ::CORBA::TypeCode::_duplicate(_value); } void @name@(const ::CORBA::TypeCode_member& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } void @name@(const ::CORBA::TypeCode_var& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } """ union_basic = """\ @type@ @name@ () const { return _pd_@name@; } void @name@ (@type@ _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } """ union_string = """\ const char * @name@ () const { return (const char*) _pd_@name@; } void @name@(char* _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } void @name@(const char* _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } void @name@(const ::CORBA::String_var& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } void @name@(const ::CORBA::String_member& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } """ union_wstring = """\ const ::CORBA::WChar * @name@ () const { return (const ::CORBA::WChar*) _pd_@name@; } void @name@(::CORBA::WChar* _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } void @name@(const ::CORBA::WChar* _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } void @name@(const ::CORBA::WString_var& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } void @name@(const ::CORBA::WString_member& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } """ union_objref = """\ @ptr_name@ @member@ () const { return _pd_@member@._ptr; } void @member@(@ptr_name@ _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; @Helper_name@::duplicate(_value); _pd_@member@ = _value; } void @member@(const @memtype@& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@member@ = _value; } void @member@(const @var_name@& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@member@ = _value; } """ union_constructed = """\ const @type@ &@name@ () const { return _pd_@name@; } @type@ &@name@ () { return _pd_@name@; } void @name@ (const @type@& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@name@ = _value; } """ union_sequence = """\ typedef @sequence_template@ _@member@_seq; const _@member@_seq& @member@ () const { return _pd_@member@; } _@member@_seq& @member@ () { return _pd_@member@; } void @member@ (const _@member@_seq& _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; _pd_@member@ = _value; } """ union_value = """\ @type@* @member@() const { return _pd_@[email protected](); } void @member@(@type@* _value) { _pd__initialised = 1; _pd__d = @discrimvalue@; _pd__default = @isDefault@; ::CORBA::add_ref(_value); _pd_@member@ = _value; } """ union_member = """\ @type@ _pd_@name@@dims@; """ union_forward = """\ class @name@; """ ## ## Enum ## enum = """\ enum @name@ { @memberlist@ /*, __max_@name@=0xffffffff */ }; typedef @name@& @name@_out; """ ## ## Const ## const_inclass_isinteger = """\ static _core_attr const @type@ @name@ _init_in_cldecl_( = @val@ ); """ const_inclass_notinteger = """\ static _core_attr const @type@ @name@; """ const_outsideclass_isinteger = """\ _CORBA_@where@_VARINT const @type@ @name@ _init_in_decl_( = @val@ ); """ const_outsideclass_notinteger = """\ _CORBA_@where@_VAR _core_attr const @type@ @name@; """ ## ## Typecode_ptr ## typecode = """\ @qualifier@ _dyn_attr const ::CORBA::TypeCode_ptr _tc_@name@; """ ## ## Operators ## any_struct = """\ extern void operator<<=(::CORBA::Any& _a, const @fqname@& _s); extern void operator<<=(::CORBA::Any& _a, @fqname@* _sp); extern _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@*& _sp); extern _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, const @fqname@*& _sp); """ any_exception = """\ void operator<<=(::CORBA::Any& _a, const @fqname@& _s); void operator<<=(::CORBA::Any& _a, const @fqname@* _sp); _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, const @fqname@*& _sp); """ any_union = """\ void operator<<=(::CORBA::Any& _a, const @fqname@& _s); void operator<<=(::CORBA::Any& _a, @fqname@* _sp); _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, const @fqname@*& _sp); _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@*& _sp); """ any_enum = """\ void operator<<=(::CORBA::Any& _a, @name@ _s); _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @name@& _s); """ any_interface = """\ void operator<<=(::CORBA::Any& _a, @fqname@_ptr _s); void operator<<=(::CORBA::Any& _a, @fqname@_ptr* _s); _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@_ptr& _s); """ any_array_declarator = """\ void operator<<=(::CORBA::Any& _a, const @fqname@_forany& _s); _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@_forany& _s); """ any_sequence = """\ void operator<<=(::CORBA::Any& _a, const @fqname@& _s); void operator<<=(::CORBA::Any& _a, @fqname@* _sp); _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@*& _sp); _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, const @fqname@*& _sp); """ any_value = """\ void operator<<=(::CORBA::Any& _a, @fqname@* _s); void operator<<=(::CORBA::Any& _a, @fqname@** _s); _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@*& _s); """ enum_operators = """\ inline void operator >>=(@name@ _e, cdrStream& s) { ::operator>>=((::CORBA::ULong)_e, s); } inline void operator <<= (@name@& _e, cdrStream& s) { ::CORBA::ULong @private_prefix@_e; ::operator<<=(@private_prefix@_e,s); if (@private_prefix@_e <= @last_item@) { _e = (@name@) @private_prefix@_e; } else { OMNIORB_THROW(MARSHAL,_OMNI_NS(MARSHAL_InvalidEnumValue), (::CORBA::CompletionStatus)s.completion()); } } """ ## ## tie template ## tie_template = """\ template <class _omniT> class @tie_name@ : public virtual @inherits@ { public: @tie_name@(_omniT& t) : pd_obj(&t), pd_poa(0), pd_rel(0) {} @tie_name@(_omniT& t, ::PortableServer::POA_ptr p) : pd_obj(&t), pd_poa(p), pd_rel(0) {} @tie_name@(_omniT* t, _CORBA_Boolean r=1) : pd_obj(t), pd_poa(0), pd_rel(r) {} @tie_name@(_omniT* t, ::PortableServer::POA_ptr p,_CORBA_Boolean r=1) : pd_obj(t), pd_poa(p), pd_rel(r) {} ~@tie_name@() { if( pd_poa ) ::CORBA::release(pd_poa); if( pd_rel ) delete pd_obj; } _omniT* _tied_object() { return pd_obj; } void _tied_object(_omniT& t) { if( pd_rel ) delete pd_obj; pd_obj = &t; pd_rel = 0; } void _tied_object(_omniT* t, _CORBA_Boolean r=1) { if( pd_rel ) delete pd_obj; pd_obj = t; pd_rel = r; } _CORBA_Boolean _is_owner() { return pd_rel; } void _is_owner(_CORBA_Boolean io) { pd_rel = io; } ::PortableServer::POA_ptr _default_POA() { if( !pd_poa ) return ::PortableServer::POA::_the_root_poa(); else return ::PortableServer::POA::_duplicate(pd_poa); } @callables@ private: _omniT* pd_obj; ::PortableServer::POA_ptr pd_poa; _CORBA_Boolean pd_rel; }; """ tie_template_old = """\ template <class _omniT, _CORBA_Boolean release> class @tie_name@ : public virtual @inherits@ { public: @tie_name@(_omniT& t) : pd_obj(&t), pd_rel(release) {} @tie_name@(_omniT* t) : pd_obj(t), pd_rel(release) {} ~@tie_name@() { if( pd_rel ) delete pd_obj; } @callables@ private: _omniT* pd_obj; _CORBA_Boolean pd_rel; }; """ ## ## tc_string ## tcstring = """\ #if !defined(___tc_string_@n@__) && !defined(DISABLE_Unnamed_Bounded_String_TC) #define ___tc_string_@n@__ _CORBA_GLOBAL_VAR _dyn_attr const ::CORBA::TypeCode_ptr _tc_string_@n@; #endif """ ## ## tc_wstring ## tcwstring = """\ #if !defined(___tc_wstring_@n@__) && !defined(DISABLE_Unnamed_Bounded_WString_TC) #define ___tc_wstring_@n@__ _CORBA_GLOBAL_VAR _dyn_attr const ::CORBA::TypeCode_ptr _tc_wstring_@n@; #endif """<|fim▁end|>
"""
<|file_name|>FNV.ts<|end_file_name|><|fim▁begin|>namespace ZincDB { export namespace Hashing { export namespace FNV {<|fim▁hole|> export const fnv1a = function(bytes: Uint8Array | Buffer): number { let hash = 0x811c9dc5; for (let i = 0, len = bytes.length; i < len; i++) { hash ^= bytes[i]; hash += (hash << 24) + (hash << 8) + (hash << 7) + (hash << 4) + (hash << 1); } return hash & 0xffffffff; } } } }<|fim▁end|>
// Based on http://isthe.com/chongo/tech/comp/fnv/
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>use super::rocket; use rocket::testing::MockRequest; use rocket::http::Method::*; #[test] fn hello_world() { let rocket = rocket::ignite().mount("/", routes![super::hello]); let mut req = MockRequest::new(Get, "/"); let mut response = req.dispatch_with(&rocket);<|fim▁hole|> assert_eq!(response.body_string(), Some("Hello, world!".into())); }<|fim▁end|>
<|file_name|>unattended_install.py<|end_file_name|><|fim▁begin|>from __future__ import division import logging import time import re import os import tempfile import threading import shutil import stat import xml.dom.minidom try: import configparser as ConfigParser except ImportError: import ConfigParser from avocado.core import exceptions from avocado.utils import iso9660 from avocado.utils import process from avocado.utils import crypto from avocado.utils import download from virttest import virt_vm from virttest import asset from virttest import utils_disk from virttest import qemu_monitor from virttest import remote from virttest import syslog_server from virttest import http_server from virttest import data_dir from virttest import utils_net from virttest import utils_test from virttest import utils_misc from virttest import funcatexit from virttest import storage from virttest import error_context from virttest import qemu_storage from virttest.compat_52lts import decode_to_text # Whether to print all shell commands called DEBUG = False _url_auto_content_server_thread = None _url_auto_content_server_thread_event = None _unattended_server_thread = None _unattended_server_thread_event = None _syslog_server_thread = None _syslog_server_thread_event = None def start_auto_content_server_thread(port, path): global _url_auto_content_server_thread global _url_auto_content_server_thread_event if _url_auto_content_server_thread is None: _url_auto_content_server_thread_event = threading.Event() _url_auto_content_server_thread = threading.Thread( target=http_server.http_server, args=(port, path, terminate_auto_content_server_thread)) _url_auto_content_server_thread.start() def start_unattended_server_thread(port, path): global _unattended_server_thread global _unattended_server_thread_event if _unattended_server_thread is None: _unattended_server_thread_event = threading.Event() _unattended_server_thread = threading.Thread( target=http_server.http_server, args=(port, path, terminate_unattended_server_thread)) _unattended_server_thread.start() def terminate_auto_content_server_thread(): global _url_auto_content_server_thread global _url_auto_content_server_thread_event if _url_auto_content_server_thread is None: return False if _url_auto_content_server_thread_event is None: return False if _url_auto_content_server_thread_event.isSet(): return True return False def terminate_unattended_server_thread(): global _unattended_server_thread, _unattended_server_thread_event if _unattended_server_thread is None: return False if _unattended_server_thread_event is None: return False if _unattended_server_thread_event.isSet(): return True return False class RemoteInstall(object): """ Represents a install http server that we can master according to our needs. """ def __init__(self, path, ip, port, filename): self.path = path utils_disk.cleanup(self.path) os.makedirs(self.path) self.ip = ip self.port = port self.filename = filename start_unattended_server_thread(self.port, self.path) def get_url(self): return 'http://%s:%s/%s' % (self.ip, self.port, self.filename) def get_answer_file_path(self, filename): return os.path.join(self.path, filename) def close(self): os.chmod(self.path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) logging.debug("unattended http server %s successfully created", self.get_url()) class UnattendedInstallConfig(object): """ Creates a floppy disk image that will contain a config file for unattended OS install. The parameters to the script are retrieved from environment variables. """ def __init__(self, test, params, vm): """ Sets class attributes from test parameters. :param test: QEMU test object. :param params: Dictionary with test parameters. """ root_dir = data_dir.get_data_dir() self.deps_dir = os.path.join(test.virtdir, 'deps') self.unattended_dir = os.path.join(test.virtdir, 'unattended') self.results_dir = test.debugdir self.params = params self.attributes = ['kernel_args', 'finish_program', 'cdrom_cd1', 'unattended_file', 'medium', 'url', 'kernel', 'initrd', 'nfs_server', 'nfs_dir', 'install_virtio', 'floppy_name', 'cdrom_unattended', 'boot_path', 'kernel_params', 'extra_params', 'qemu_img_binary', 'cdkey', 'finish_program', 'vm_type', 'process_check', 'vfd_size', 'cdrom_mount_point', 'floppy_mount_point', 'cdrom_virtio', 'virtio_floppy', 're_driver_match', 're_hardware_id', 'driver_in_floppy', 'vga', 'unattended_file_kernel_param_name'] for a in self.attributes: setattr(self, a, params.get(a, '')) # Make finish.bat work well with positional arguments if not self.process_check.strip(): # pylint: disable=E0203 self.process_check = '""' # pylint: disable=E0203 # Will setup the virtio attributes v_attributes = ['virtio_floppy', 'virtio_scsi_path', 'virtio_storage_path', 'virtio_network_path', 'virtio_balloon_path', 'virtio_viorng_path', 'virtio_vioser_path', 'virtio_pvpanic_path', 'virtio_vioinput_path', 'virtio_oemsetup_id', 'virtio_network_installer_path', 'virtio_balloon_installer_path', 'virtio_qxl_installer_path'] for va in v_attributes: setattr(self, va, params.get(va, '')) self.tmpdir = test.tmpdir self.qemu_img_binary = utils_misc.get_qemu_img_binary(params) def get_unattended_file(backend): providers = asset.get_test_provider_names(backend) if not providers: return for provider_name in providers: provider_info = asset.get_test_provider_info(provider_name) if backend not in provider_info["backends"]: continue if "path" not in provider_info["backends"][backend]: continue path = provider_info["backends"][backend]["path"] tp_unattended_file = os.path.join(path, self.unattended_file) if os.path.exists(tp_unattended_file): # Using unattended_file from test-provider unattended_file = tp_unattended_file # Take the first matched return unattended_file if getattr(self, 'unattended_file'): # Fail-back to general unattended_file unattended_file = os.path.join(test.virtdir, self.unattended_file) for backend in asset.get_known_backends(): found_file = get_unattended_file(backend) if found_file: unattended_file = found_file break self.unattended_file = unattended_file if getattr(self, 'finish_program'): self.finish_program = os.path.join(test.virtdir, self.finish_program) if getattr(self, 'cdrom_cd1'): self.cdrom_cd1 = os.path.join(root_dir, self.cdrom_cd1) self.cdrom_cd1_mount = tempfile.mkdtemp(prefix='cdrom_cd1_', dir=self.tmpdir) if getattr(self, 'cdrom_unattended'): self.cdrom_unattended = os.path.join(root_dir, self.cdrom_unattended) if getattr(self, 'virtio_floppy'): self.virtio_floppy = os.path.join(root_dir, self.virtio_floppy) if getattr(self, 'cdrom_virtio'): self.cdrom_virtio = os.path.join(root_dir, self.cdrom_virtio) if getattr(self, 'kernel'): self.kernel = os.path.join(root_dir, self.kernel) if getattr(self, 'initrd'): self.initrd = os.path.join(root_dir, self.initrd) if self.medium == 'nfs': self.nfs_mount = tempfile.mkdtemp(prefix='nfs_', dir=self.tmpdir) setattr(self, 'floppy', self.floppy_name) if getattr(self, 'floppy'): self.floppy = os.path.join(root_dir, self.floppy) if not os.path.isdir(os.path.dirname(self.floppy)): os.makedirs(os.path.dirname(self.floppy)) self.image_path = os.path.dirname(self.kernel) # Content server params # lookup host ip address for first nic by interface name try: netdst = vm.virtnet[0].netdst # 'netdst' parameter is taken from cartesian config. Sometimes # netdst=<empty>. Call get_ip_address_by_interface() only for case # when netdst= is defined to something. if netdst: auto_ip = utils_net.get_ip_address_by_interface(netdst) else: auto_ip = utils_net.get_host_ip_address(params) except utils_net.NetError: auto_ip = None params_auto_ip = params.get('url_auto_ip', None) if params_auto_ip: self.url_auto_content_ip = params_auto_ip else: self.url_auto_content_ip = auto_ip self.url_auto_content_port = None # Kickstart server params # use the same IP as url_auto_content_ip, but a different port self.unattended_server_port = None # Embedded Syslog Server self.syslog_server_enabled = params.get('syslog_server_enabled', 'no') self.syslog_server_ip = params.get('syslog_server_ip', auto_ip) self.syslog_server_port = int(params.get('syslog_server_port', 5140)) self.syslog_server_tcp = params.get('syslog_server_proto', 'tcp') == 'tcp' self.vm = vm @error_context.context_aware def get_driver_hardware_id(self, driver, run_cmd=True): """ Get windows driver's hardware id from inf files. :param dirver: Configurable driver name. :param run_cmd: Use hardware id in windows cmd command or not. :return: Windows driver's hardware id """ if not os.path.exists(self.cdrom_mount_point): os.mkdir(self.cdrom_mount_point) if not os.path.exists(self.floppy_mount_point): os.mkdir(self.floppy_mount_point) if not os.path.ismount(self.cdrom_mount_point): process.system("mount %s %s -o loop" % (self.cdrom_virtio, self.cdrom_mount_point), timeout=60) if not os.path.ismount(self.floppy_mount_point): process.system("mount %s %s -o loop" % (self.virtio_floppy, self.floppy_mount_point), timeout=60) drivers_d = [] driver_link = None if self.driver_in_floppy is not None: driver_in_floppy = self.driver_in_floppy drivers_d = driver_in_floppy.split() else: drivers_d.append('qxl.inf') for driver_d in drivers_d: if driver_d in driver: driver_link = os.path.join(self.floppy_mount_point, driver) if driver_link is None: driver_link = os.path.join(self.cdrom_mount_point, driver) try: txt = open(driver_link, "r").read() hwid = re.findall(self.re_hardware_id, txt)[-1].rstrip() if run_cmd: hwid = '^&'.join(hwid.split('&')) return hwid except Exception as e: logging.error("Fail to get hardware id with exception: %s" % e) @error_context.context_aware def update_driver_hardware_id(self, driver): """ Update driver string with the hardware id get from inf files @driver: driver string :return: new driver string """ if 'hwid' in driver: if 'hwidcmd' in driver: run_cmd = True else: run_cmd = False if self.re_driver_match is not None: d_str = self.re_driver_match else: d_str = "(\S+)\s*hwid" drivers_in_floppy = [] if self.driver_in_floppy is not None: drivers_in_floppy = self.driver_in_floppy.split() mount_point = self.cdrom_mount_point storage_path = self.cdrom_virtio for driver_in_floppy in drivers_in_floppy: if driver_in_floppy in driver: mount_point = self.floppy_mount_point storage_path = self.virtio_floppy break d_link = re.findall(d_str, driver)[0].split(":")[1] d_link = "/".join(d_link.split("\\\\")[1:]) hwid = utils_test.get_driver_hardware_id(d_link, mount_point, storage_path, run_cmd=run_cmd) if hwid: driver = driver.replace("hwidcmd", hwid.strip()) else: raise exceptions.TestError("Can not find hwid from the driver" " inf file") return driver def answer_kickstart(self, answer_path): """ Replace KVM_TEST_CDKEY (in the unattended file) with the cdkey provided for this test and replace the KVM_TEST_MEDIUM with the tree url or nfs address provided for this test. :return: Answer file contents """ contents = open(self.unattended_file).read() dummy_cdkey_re = r'\bKVM_TEST_CDKEY\b' if re.search(dummy_cdkey_re, contents): if self.cdkey: contents = re.sub(dummy_cdkey_re, self.cdkey, contents) dummy_medium_re = r'\bKVM_TEST_MEDIUM\b' if self.medium in ["cdrom", "kernel_initrd"]: content = "cdrom" elif self.medium == "url": content = "url --url %s" % self.url elif self.medium == "nfs": content = "nfs --server=%s --dir=%s" % (self.nfs_server, self.nfs_dir) else: raise ValueError("Unexpected installation medium %s" % self.url) contents = re.sub(dummy_medium_re, content, contents) dummy_rh_system_stream_id_re = r'\bRH_SYSTEM_STREAM_ID\b' if re.search(dummy_rh_system_stream_id_re, contents): rh_system_stream_id = self.params.get("rh_system_stream_id", "") contents = re.sub(dummy_rh_system_stream_id_re, rh_system_stream_id, contents) dummy_repos_re = r'\bKVM_TEST_REPOS\b' if re.search(dummy_repos_re, contents): repo_list = self.params.get("kickstart_extra_repos", "").split() lines = ["# Extra repositories"] for index, repo_url in enumerate(repo_list, 1): line = ("repo --name=extra_repo%d --baseurl=%s --install " "--noverifyssl" % (index, repo_url)) lines.append(line) content = "\n".join(lines) contents = re.sub(dummy_repos_re, content, contents) dummy_logging_re = r'\bKVM_TEST_LOGGING\b' if re.search(dummy_logging_re, contents): if self.syslog_server_enabled == 'yes': log = 'logging --host=%s --port=%s --level=debug' log = log % (self.syslog_server_ip, self.syslog_server_port) else: log = '' contents = re.sub(dummy_logging_re, log, contents) dummy_graphical_re = re.compile('GRAPHICAL_OR_TEXT') if dummy_graphical_re.search(contents): if not self.vga or self.vga.lower() == "none": contents = dummy_graphical_re.sub('text', contents) else: contents = dummy_graphical_re.sub('graphical', contents) """ cmd_only_use_disk is used for specifying disk which will be used during installation. """ if self.params.get("cmd_only_use_disk"): insert_info = self.params.get("cmd_only_use_disk") + '\n' contents += insert_info logging.debug("Unattended install contents:") for line in contents.splitlines(): logging.debug(line) with open(answer_path, 'w') as answer_file: answer_file.write(contents) def answer_windows_ini(self, answer_path): parser = ConfigParser.ConfigParser() parser.read(self.unattended_file) # First, replacing the CDKEY if self.cdkey: parser.set('UserData', 'ProductKey', self.cdkey) else: logging.error("Param 'cdkey' required but not specified for " "this unattended installation") # Now, replacing the virtio network driver path, under double quotes if self.install_virtio == 'yes': parser.set('Unattended', 'OemPnPDriversPath', '"%s"' % self.virtio_network_path) else: parser.remove_option('Unattended', 'OemPnPDriversPath') dummy_re_dirver = {'KVM_TEST_VIRTIO_NETWORK_INSTALLER': 'virtio_network_installer_path', 'KVM_TEST_VIRTIO_BALLOON_INSTALLER': 'virtio_balloon_installer_path', 'KVM_TEST_VIRTIO_QXL_INSTALLER': 'virtio_qxl_installer_path'} dummy_re = "" for dummy in dummy_re_dirver: if dummy_re: dummy_re += "|%s" % dummy else: dummy_re = dummy # Replace the process check in finish command dummy_process_re = r'\bPROCESS_CHECK\b' for opt in parser.options('GuiRunOnce'): check = parser.get('GuiRunOnce', opt) if re.search(dummy_process_re, check): process_check = re.sub(dummy_process_re, "%s" % self.process_check, check) parser.set('GuiRunOnce', opt, process_check) elif re.findall(dummy_re, check): dummy = re.findall(dummy_re, check)[0] driver = getattr(self, dummy_re_dirver[dummy]) if driver.endswith("msi"): driver = 'msiexec /passive /package ' + driver elif 'INSTALLER' in dummy: driver = self.update_driver_hardware_id(driver) elif driver is None: driver = 'dir' check = re.sub(dummy, driver, check) parser.set('GuiRunOnce', opt, check) # Now, writing the in memory config state to the unattended file fp = open(answer_path, 'w') parser.write(fp) fp.close() # Let's read it so we can debug print the contents fp = open(answer_path, 'r') contents = fp.read() fp.close() logging.debug("Unattended install contents:") for line in contents.splitlines(): logging.debug(line) def answer_windows_xml(self, answer_path): doc = xml.dom.minidom.parse(self.unattended_file) if self.cdkey: # First, replacing the CDKEY product_key = doc.getElementsByTagName('ProductKey')[0] if product_key.getElementsByTagName('Key'): key = product_key.getElementsByTagName('Key')[0] key_text = key.childNodes[0] else: key_text = product_key.childNodes[0] assert key_text.nodeType == doc.TEXT_NODE key_text.data = self.cdkey else: logging.error("Param 'cdkey' required but not specified for " "this unattended installation") # Now, replacing the virtio driver paths or removing the entire # component PnpCustomizationsWinPE Element Node if self.install_virtio == 'yes': paths = doc.getElementsByTagName("Path") values = [self.virtio_scsi_path, self.virtio_storage_path, self.virtio_network_path, self.virtio_balloon_path, self.virtio_viorng_path, self.virtio_vioser_path, self.virtio_pvpanic_path, self.virtio_vioinput_path] # XXX: Force to replace the drive letter which loaded the # virtio driver by the specified letter. letter = self.params.get('virtio_drive_letter') if letter is not None: values = (re.sub(r'^\w+', letter, val) for val in values) for path, value in list(zip(paths, values)): if value: path_text = path.childNodes[0] assert path_text.nodeType == doc.TEXT_NODE path_text.data = value else: settings = doc.getElementsByTagName("settings") for s in settings: for c in s.getElementsByTagName("component"): if (c.getAttribute('name') == "Microsoft-Windows-PnpCustomizationsWinPE"): s.removeChild(c) # Last but not least important, replacing the virtio installer command # And process check in finish command command_lines = doc.getElementsByTagName("CommandLine") dummy_re_dirver = {'KVM_TEST_VIRTIO_NETWORK_INSTALLER': 'virtio_network_installer_path', 'KVM_TEST_VIRTIO_BALLOON_INSTALLER': 'virtio_balloon_installer_path', 'KVM_TEST_VIRTIO_QXL_INSTALLER': 'virtio_qxl_installer_path'} process_check_re = 'PROCESS_CHECK' dummy_re = "" for dummy in dummy_re_dirver: if dummy_re: dummy_re += "|%s" % dummy else: dummy_re = dummy for command_line in command_lines: command_line_text = command_line.childNodes[0] assert command_line_text.nodeType == doc.TEXT_NODE if re.findall(dummy_re, command_line_text.data): dummy = re.findall(dummy_re, command_line_text.data)[0] driver = getattr(self, dummy_re_dirver[dummy]) if driver.endswith("msi"): driver = 'msiexec /passive /package ' + driver elif 'INSTALLER' in dummy: driver = self.update_driver_hardware_id(driver) t = command_line_text.data t = re.sub(dummy_re, driver, t) command_line_text.data = t if process_check_re in command_line_text.data: t = command_line_text.data t = re.sub(process_check_re, self.process_check, t) command_line_text.data = t contents = doc.toxml() logging.debug("Unattended install contents:") for line in contents.splitlines(): logging.debug(line) fp = open(answer_path, 'w') doc.writexml(fp) fp.close() def answer_suse_xml(self, answer_path): # There's nothing to replace on SUSE files to date. Yay! doc = xml.dom.minidom.parse(self.unattended_file) contents = doc.toxml() logging.debug("Unattended install contents:") for line in contents.splitlines(): logging.debug(line) fp = open(answer_path, 'w') doc.writexml(fp) fp.close() def preseed_initrd(self): """ Puts a preseed file inside a gz compressed initrd file. Debian and Ubuntu use preseed as the OEM install mechanism. The only way to get fully automated setup without resorting to kernel params is to add a preseed.cfg file at the root of the initrd image. """ logging.debug("Remastering initrd.gz file with preseed file") dest_fname = 'preseed.cfg' remaster_path = os.path.join(self.image_path, "initrd_remaster") if not os.path.isdir(remaster_path): os.makedirs(remaster_path) base_initrd = os.path.basename(self.initrd) os.chdir(remaster_path) process.run("gzip -d < ../%s | fakeroot cpio --extract --make-directories " "--no-absolute-filenames" % base_initrd, verbose=DEBUG, shell=True) process.run("cp %s %s" % (self.unattended_file, dest_fname), verbose=DEBUG) # For libvirt initrd.gz will be renamed to initrd.img in setup_cdrom() process.run("find . | fakeroot cpio -H newc --create | gzip -9 > ../%s" % base_initrd, verbose=DEBUG, shell=True) os.chdir(self.image_path) process.run("rm -rf initrd_remaster", verbose=DEBUG) contents = open(self.unattended_file).read() logging.debug("Unattended install contents:") for line in contents.splitlines(): logging.debug(line) def set_unattended_param_in_kernel(self, unattended_file_url): ''' Check if kernel parameter that sets the unattended installation file is present. Add the parameter with the passed URL if it does not exist, otherwise replace the existing URL. :param unattended_file_url: URL to unattended installation file :return: modified kernel parameters ''' unattended_param = '%s=%s' % (self.unattended_file_kernel_param_name, unattended_file_url) if '%s=' % self.unattended_file_kernel_param_name in self.kernel_params: kernel_params = re.sub('%s=[\w\d:\-\./]+' % (self.unattended_file_kernel_param_name), unattended_param, self.kernel_params) else: kernel_params = '%s %s' % (self.kernel_params, unattended_param) return kernel_params def setup_unattended_http_server(self): ''' Setup a builtin http server for serving the kickstart/preseed file Does nothing if unattended file is not a kickstart/preseed file ''' if self.unattended_file.endswith('.ks') or self.unattended_file.endswith('.preseed'): # Red Hat kickstart install or Ubuntu preseed install dest_fname = 'ks.cfg' answer_path = os.path.join(self.tmpdir, dest_fname) self.answer_kickstart(answer_path) if self.unattended_server_port is None: self.unattended_server_port = utils_misc.find_free_port( 8000, 8099, self.url_auto_content_ip) start_unattended_server_thread(self.unattended_server_port, self.tmpdir) else: return # Point installation to this kickstart url unattended_file_url = 'http://%s:%s/%s' % (self.url_auto_content_ip, self.unattended_server_port, dest_fname) kernel_params = self.set_unattended_param_in_kernel( unattended_file_url) # reflect change on params self.kernel_params = kernel_params def setup_boot_disk(self): if self.unattended_file.endswith('.sif'): dest_fname = 'winnt.sif' setup_file = 'winnt.bat' boot_disk = utils_disk.FloppyDisk(self.floppy, self.qemu_img_binary, self.tmpdir, self.vfd_size) answer_path = boot_disk.get_answer_file_path(dest_fname) self.answer_windows_ini(answer_path) setup_file_path = os.path.join(self.unattended_dir, setup_file) boot_disk.copy_to(setup_file_path) if self.install_virtio == "yes": boot_disk.setup_virtio_win2003(self.virtio_floppy, self.virtio_oemsetup_id) boot_disk.copy_to(self.finish_program) elif self.unattended_file.endswith('.ks'): # Red Hat kickstart install dest_fname = 'ks.cfg' if self.params.get('unattended_delivery_method') == 'integrated': unattended_file_url = 'cdrom:/dev/sr0:/isolinux/%s' % ( dest_fname) kernel_params = self.set_unattended_param_in_kernel( unattended_file_url) # Standard setting is kickstart disk in /dev/sr0 and # install cdrom in /dev/sr1. As we merge them together, # we need to change repo configuration to /dev/sr0 if 'repo=cdrom' in kernel_params: kernel_params = re.sub('repo=cdrom[:\w\d\-/]*', 'repo=cdrom:/dev/sr0', kernel_params) self.kernel_params = None boot_disk = utils_disk.CdromInstallDisk( self.cdrom_unattended, self.tmpdir, self.cdrom_cd1_mount, kernel_params) elif self.params.get('unattended_delivery_method') == 'url': if self.unattended_server_port is None: self.unattended_server_port = utils_misc.find_free_port( 8000, 8099, self.url_auto_content_ip) path = os.path.join(os.path.dirname(self.cdrom_unattended), 'ks') boot_disk = RemoteInstall(path, self.url_auto_content_ip, self.unattended_server_port, dest_fname) unattended_file_url = boot_disk.get_url() kernel_params = self.set_unattended_param_in_kernel( unattended_file_url) # Standard setting is kickstart disk in /dev/sr0 and # install cdrom in /dev/sr1. When we get ks via http, # we need to change repo configuration to /dev/sr0 kernel_params = re.sub('repo=cdrom[:\w\d\-/]*', 'repo=cdrom:/dev/sr0', kernel_params) self.kernel_params = kernel_params elif self.params.get('unattended_delivery_method') == 'cdrom': boot_disk = utils_disk.CdromDisk(self.cdrom_unattended, self.tmpdir) elif self.params.get('unattended_delivery_method') == 'floppy': boot_disk = utils_disk.FloppyDisk(self.floppy, self.qemu_img_binary, self.tmpdir, self.vfd_size) ks_param = '%s=floppy' % self.unattended_file_kernel_param_name kernel_params = self.kernel_params if '%s=' % self.unattended_file_kernel_param_name in kernel_params: # Reading ks from floppy directly doesn't work in some OS, # options 'ks=hd:/dev/fd0' can reading ks from mounted # floppy, so skip repace it; if not re.search("fd\d+", kernel_params): kernel_params = re.sub('%s=[\w\d\-:\./]+' % (self.unattended_file_kernel_param_name), ks_param, kernel_params) else: kernel_params = '%s %s' % (kernel_params, ks_param) kernel_params = re.sub('repo=cdrom[:\w\d\-/]*', 'repo=cdrom:/dev/sr0', kernel_params) self.kernel_params = kernel_params else: raise ValueError("Neither cdrom_unattended nor floppy set " "on the config file, please verify") answer_path = boot_disk.get_answer_file_path(dest_fname) self.answer_kickstart(answer_path) elif self.unattended_file.endswith('.xml'): if "autoyast" in self.kernel_params: # SUSE autoyast install dest_fname = "autoinst.xml" if (self.cdrom_unattended and self.params.get('unattended_delivery_method') == 'cdrom'): boot_disk = utils_disk.CdromDisk(self.cdrom_unattended, self.tmpdir) elif self.floppy: autoyast_param = 'autoyast=device://fd0/autoinst.xml' kernel_params = self.kernel_params if 'autoyast=' in kernel_params: kernel_params = re.sub('autoyast=[\w\d\-:\./]+', autoyast_param, kernel_params) else: kernel_params = '%s %s' % ( kernel_params, autoyast_param) self.kernel_params = kernel_params boot_disk = utils_disk.FloppyDisk(self.floppy, self.qemu_img_binary, self.tmpdir, self.vfd_size) else: raise ValueError("Neither cdrom_unattended nor floppy set " "on the config file, please verify") answer_path = boot_disk.get_answer_file_path(dest_fname) self.answer_suse_xml(answer_path) else: # Windows unattended install dest_fname = "autounattend.xml" if self.params.get('unattended_delivery_method') == 'cdrom': boot_disk = utils_disk.CdromDisk(self.cdrom_unattended, self.tmpdir) if self.install_virtio == "yes": boot_disk.setup_virtio_win2008(self.virtio_floppy, self.cdrom_virtio) else: self.cdrom_virtio = None else: boot_disk = utils_disk.FloppyDisk(self.floppy, self.qemu_img_binary, self.tmpdir, self.vfd_size) if self.install_virtio == "yes": boot_disk.setup_virtio_win2008(self.virtio_floppy) answer_path = boot_disk.get_answer_file_path(dest_fname) self.answer_windows_xml(answer_path) boot_disk.copy_to(self.finish_program) else: raise ValueError('Unknown answer file type: %s' % self.unattended_file) boot_disk.close() @error_context.context_aware def setup_cdrom(self): """ Mount cdrom and copy vmlinuz and initrd.img. """ error_context.context("Copying vmlinuz and initrd.img from install cdrom %s" % self.cdrom_cd1) if not os.path.isdir(self.image_path): os.makedirs(self.image_path) if (self.params.get('unattended_delivery_method') in ['integrated', 'url']): i = iso9660.Iso9660Mount(self.cdrom_cd1) self.cdrom_cd1_mount = i.mnt_dir else: i = iso9660.iso9660(self.cdrom_cd1) if i is None: raise exceptions.TestFail("Could not instantiate an iso9660 class") i.copy(os.path.join(self.boot_path, os.path.basename(self.kernel)), self.kernel) assert(os.path.getsize(self.kernel) > 0) i.copy(os.path.join(self.boot_path, os.path.basename(self.initrd)), self.initrd) assert(os.path.getsize(self.initrd) > 0) if self.unattended_file.endswith('.preseed'): self.preseed_initrd() if self.params.get("vm_type") == "libvirt": if self.vm.driver_type == 'qemu': # Virtinstall command needs files "vmlinuz" and "initrd.img" os.chdir(self.image_path) base_kernel = os.path.basename(self.kernel) base_initrd = os.path.basename(self.initrd) if base_kernel != 'vmlinuz': process.run("mv %s vmlinuz" % base_kernel, verbose=DEBUG) if base_initrd != 'initrd.img': process.run("mv %s initrd.img" % base_initrd, verbose=DEBUG) if (self.params.get('unattended_delivery_method') != 'integrated'): i.close() utils_disk.cleanup(self.cdrom_cd1_mount) elif ((self.vm.driver_type == 'xen') and (self.params.get('hvm_or_pv') == 'pv')): logging.debug("starting unattended content web server") self.url_auto_content_port = utils_misc.find_free_port(8100, 8199, self.url_auto_content_ip) start_auto_content_server_thread(self.url_auto_content_port, self.cdrom_cd1_mount) self.medium = 'url' self.url = ('http://%s:%s' % (self.url_auto_content_ip, self.url_auto_content_port)) pxe_path = os.path.join( os.path.dirname(self.image_path), 'xen') if not os.path.isdir(pxe_path): os.makedirs(pxe_path) pxe_kernel = os.path.join(pxe_path, os.path.basename(self.kernel)) pxe_initrd = os.path.join(pxe_path, os.path.basename(self.initrd)) process.run("cp %s %s" % (self.kernel, pxe_kernel)) process.run("cp %s %s" % (self.initrd, pxe_initrd)) if 'repo=cdrom' in self.kernel_params: # Red Hat self.kernel_params = re.sub('repo=[:\w\d\-/]*', 'repo=http://%s:%s' % (self.url_auto_content_ip, self.url_auto_content_port), self.kernel_params) @error_context.context_aware def setup_url_auto(self): """ Configures the builtin web server for serving content """ auto_content_url = 'http://%s:%s' % (self.url_auto_content_ip, self.url_auto_content_port) self.params['auto_content_url'] = auto_content_url @error_context.context_aware def setup_url(self): """ Download the vmlinuz and initrd.img from URL. """ # it's only necessary to download kernel/initrd if running bare qemu if self.vm_type == 'qemu': error_context.context( "downloading vmlinuz/initrd.img from %s" % self.url) if not os.path.exists(self.image_path): os.mkdir(self.image_path) os.chdir(self.image_path) kernel_basename = os.path.basename(self.kernel) initrd_basename = os.path.basename(self.initrd) sha1sum_kernel_cmd = 'sha1sum %s' % kernel_basename sha1sum_kernel_output = decode_to_text(process.system_output(sha1sum_kernel_cmd, ignore_status=True, verbose=DEBUG)) try: sha1sum_kernel = sha1sum_kernel_output.split()[0] except IndexError: sha1sum_kernel = '' sha1sum_initrd_cmd = 'sha1sum %s' % initrd_basename sha1sum_initrd_output = decode_to_text(process.system_output(sha1sum_initrd_cmd, ignore_status=True, verbose=DEBUG)) try: sha1sum_initrd = sha1sum_initrd_output.split()[0] except IndexError: sha1sum_initrd = '' url_kernel = os.path.join(self.url, self.boot_path, os.path.basename(self.kernel)) url_initrd = os.path.join(self.url, self.boot_path, os.path.basename(self.initrd)) if not sha1sum_kernel == self.params.get('sha1sum_vmlinuz', None): if os.path.isfile(self.kernel): os.remove(self.kernel) logging.info('Downloading %s -> %s', url_kernel, self.image_path) download.get_file(url_kernel, os.path.join(self.image_path, os.path.basename(self.kernel))) if not sha1sum_initrd == self.params.get('sha1sum_initrd', None): if os.path.isfile(self.initrd): os.remove(self.initrd) logging.info('Downloading %s -> %s', url_initrd, self.image_path) download.get_file(url_initrd, os.path.join(self.image_path, os.path.basename(self.initrd))) if 'repo=cdrom' in self.kernel_params: # Red Hat self.kernel_params = re.sub('repo=[:\w\d\-/]*', 'repo=%s' % self.url, self.kernel_params) elif 'autoyast=' in self.kernel_params: # SUSE self.kernel_params = ( self.kernel_params + " ip=dhcp install=" + self.url) elif self.vm_type == 'libvirt': logging.info("Not downloading vmlinuz/initrd.img from %s, " "letting virt-install do it instead") else: logging.info("No action defined/needed for the current virt " "type: '%s'" % self.vm_type) def setup_nfs(self): """ Copy the vmlinuz and initrd.img from nfs. """ error_context.context( "copying the vmlinuz and initrd.img from NFS share") m_cmd = ("mount %s:%s %s -o ro" % (self.nfs_server, self.nfs_dir, self.nfs_mount)) process.run(m_cmd, verbose=DEBUG) if not os.path.isdir(self.image_path): os.makedirs(self.image_path) try: kernel_fetch_cmd = ("cp %s/%s/%s %s" % (self.nfs_mount, self.boot_path, os.path.basename(self.kernel), self.image_path)) process.run(kernel_fetch_cmd, verbose=DEBUG) initrd_fetch_cmd = ("cp %s/%s/%s %s" % (self.nfs_mount, self.boot_path, os.path.basename(self.initrd), self.image_path)) process.run(initrd_fetch_cmd, verbose=DEBUG) finally: utils_disk.cleanup(self.nfs_mount) if 'autoyast=' in self.kernel_params: # SUSE self.kernel_params = (self.kernel_params + " ip=dhcp " "install=nfs://" + self.nfs_server + ":" + self.nfs_dir) def setup_import(self): self.unattended_file = None self.kernel_params = None def setup(self): """ Configure the environment for unattended install. Uses an appropriate strategy according to each install model. """ logging.info("Starting unattended install setup") if DEBUG: utils_misc.display_attributes(self) if self.syslog_server_enabled == 'yes': start_syslog_server_thread(self.syslog_server_ip, self.syslog_server_port, self.syslog_server_tcp) if self.medium in ["cdrom", "kernel_initrd"]: if self.kernel and self.initrd: self.setup_cdrom() elif self.medium == "url": self.setup_url() elif self.medium == "nfs": self.setup_nfs() elif self.medium == "import": self.setup_import() else: raise ValueError("Unexpected installation method %s" % self.medium) if self.unattended_file: if self.floppy or self.cdrom_unattended: self.setup_boot_disk() if self.params.get("store_boot_disk") == "yes": logging.info("Storing the boot disk to result directory " "for further debug") src_dir = self.floppy or self.cdrom_unattended dst_dir = self.results_dir shutil.copy(src_dir, dst_dir) else: self.setup_unattended_http_server() # Update params dictionary as some of the values could be updated for a in self.attributes: self.params[a] = getattr(self, a) def start_syslog_server_thread(address, port, tcp): global _syslog_server_thread global _syslog_server_thread_event syslog_server.set_default_format('[UnattendedSyslog ' '(%s.%s)] %s') if _syslog_server_thread is None: _syslog_server_thread_event = threading.Event() _syslog_server_thread = threading.Thread( target=syslog_server.syslog_server, args=(address, port, tcp, terminate_syslog_server_thread)) _syslog_server_thread.start() def terminate_syslog_server_thread(): global _syslog_server_thread, _syslog_server_thread_event if _syslog_server_thread is None: return False if _syslog_server_thread_event is None: return False if _syslog_server_thread_event.isSet(): return True return False def copy_file_from_nfs(src, dst, mount_point, image_name): logging.info("Test failed before the install process start." " So just copy a good image from nfs for following tests.") utils_misc.mount(src, mount_point, "nfs", perm="ro") image_src = utils_misc.get_path(mount_point, image_name) shutil.copy(image_src, dst) utils_misc.umount(src, mount_point, "nfs") def string_in_serial_log(serial_log_file_path, string): """ Check if string appears in serial console log file. :param serial_log_file_path: Path to the installation serial log file. :param string: String to look for in serial log file. :return: Whether the string is found in serial log file. :raise: IOError: Serial console log file could not be read. """ if not string: return with open(serial_log_file_path, 'r') as serial_log_file: serial_log_msg = serial_log_file.read() if string in serial_log_msg: logging.debug("Message read from serial console log: %s", string) return True else: return False def attempt_to_log_useful_files(test, vm): """ Tries to use ssh or serial_console to get logs from usual locations. """ if not vm.is_alive(): return base_dst_dir = os.path.join(test.outputdir, vm.name) sessions = [] close = [] try: try: session = vm.wait_for_login() close.append(session) sessions.append(session) except Exception as details: pass if vm.serial_console: sessions.append(vm.serial_console) for i, console in enumerate(sessions): failures = False try: console.cmd("true") except Exception as details: logging.info("Skipping log_useful_files #%s: %s", i, details) continue failures = False for path_glob in ["/*.log", "/tmp/*.log", "/var/tmp/*.log"]: try: status, paths = console.cmd_status_output("ls -1 %s" % path_glob) if status: continue except Exception as details: failures = True continue for path in paths.splitlines(): if not path: continue if path.startswith(os.path.sep): rel_path = path[1:] else: rel_path = path dst = os.path.join(test.outputdir, vm.name, str(i), rel_path) dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as fd_dst: try: fd_dst.write(console.cmd("cat %s" % path)) logging.info('Attached "%s" log file from guest ' 'at "%s"', path, base_dst_dir) except Exception as details: logging.warning("Unknown exception while " "attempt_to_log_useful_files(): " "%s", details) fd_dst.write("Unknown exception while getting " "content: %s" % details) failures = True if not failures: # All commands succeeded, no need to use next session break finally: for session in close: session.close() @error_context.context_aware def run(test, params, env): """ Unattended install test: 1) Starts a VM with an appropriated setup to start an unattended OS install. 2) Wait until the install reports to the install watcher its end. :param test: QEMU test object. :param params: Dictionary with the test parameters.<|fim▁hole|> :param env: Dictionary with test environment. """ @error_context.context_aware def copy_images(): error_context.base_context( "Copy image from NFS after installation failure") image_copy_on_error = params.get("image_copy_on_error", "no") if image_copy_on_error == "yes": logging.info("Running image_copy to copy pristine image from NFS.") try: error_context.context( "Quit qemu-kvm before copying guest image") vm.monitor.quit() except Exception as e: logging.warn(e) from virttest import utils_test error_context.context("Copy image from NFS Server") image = params.get("images").split()[0] t_params = params.object_params(image) qemu_image = qemu_storage.QemuImg(t_params, data_dir.get_data_dir(), image) ver_to = utils_test.get_image_version(qemu_image) utils_test.run_image_copy(test, params, env) qemu_image = qemu_storage.QemuImg(t_params, data_dir.get_data_dir(), image) ver_from = utils_test.get_image_version(qemu_image) utils_test.update_qcow2_image_version(qemu_image, ver_from, ver_to) src = params.get('images_good') vt_data_dir = data_dir.get_data_dir() base_dir = params.get("images_base_dir", vt_data_dir) dst = storage.get_image_filename(params, base_dir) if params.get("storage_type") == "iscsi": dd_cmd = "dd if=/dev/zero of=%s bs=1M count=1" % dst txt = "iscsi used, need destroy data in %s" % dst txt += " by command: %s" % dd_cmd logging.info(txt) process.system(dd_cmd) image_name = os.path.basename(dst) mount_point = params.get("dst_dir") if mount_point and src: funcatexit.register(env, params.get("type"), copy_file_from_nfs, src, dst, mount_point, image_name) vm = env.get_vm(params["main_vm"]) local_dir = params.get("local_dir", os.path.abspath(vt_data_dir)) local_dir = utils_misc.get_path(vt_data_dir, local_dir) for media in params.get("copy_to_local", "").split(): media_path = params.get(media) if not media_path: logging.warn("Media '%s' is not available, will not " "be copied into local directory", media) continue media_name = os.path.basename(media_path) nfs_link = utils_misc.get_path(vt_data_dir, media_path) local_link = os.path.join(local_dir, media_name) if os.path.isfile(local_link): file_hash = crypto.hash_file(local_link, algorithm="md5") expected_hash = crypto.hash_file(nfs_link, algorithm="md5") if file_hash == expected_hash: continue msg = "Copy %s to %s in local host." % (media_name, local_link) error_context.context(msg, logging.info) download.get_file(nfs_link, local_link) params[media] = local_link unattended_install_config = UnattendedInstallConfig(test, params, vm) unattended_install_config.setup() # params passed explicitly, because they may have been updated by # unattended install config code, such as when params['url'] == auto vm.create(params=params) install_error_str = params.get("install_error_str") install_error_exception_str = ("Installation error reported in serial " "console log: %s" % install_error_str) rh_upgrade_error_str = params.get("rh_upgrade_error_str", "RH system upgrade failed") post_finish_str = params.get("post_finish_str", "Post set up finished") install_timeout = int(params.get("install_timeout", 4800)) wait_ack = params.get("wait_no_ack", "no") == "no" migrate_background = params.get("migrate_background") == "yes" if migrate_background: mig_timeout = float(params.get("mig_timeout", "3600")) mig_protocol = params.get("migration_protocol", "tcp") logging.info("Waiting for installation to finish. Timeout set to %d s " "(%d min)", install_timeout, install_timeout // 60) error_context.context("waiting for installation to finish") start_time = time.time() log_file = vm.serial_console_log if log_file is None: raise virt_vm.VMConfigMissingError(vm.name, "serial") logging.debug("Monitoring serial console log for completion message: %s", log_file) serial_read_fails = 0 # As the install process start, we may need collect information from # the image. So use the test case instead this simple function in the # following code. if mount_point and src: funcatexit.unregister(env, params.get("type"), copy_file_from_nfs, src, dst, mount_point, image_name) send_key_timeout = int(params.get("send_key_timeout", 60)) kickstart_reboot_bug = params.get("kickstart_reboot_bug", "no") == "yes" while (time.time() - start_time) < install_timeout: try: vm.verify_alive() if (params.get("send_key_at_install") and (time.time() - start_time) < send_key_timeout): vm.send_key(params.get("send_key_at_install")) # Due to a race condition, sometimes we might get a MonitorError # before the VM gracefully shuts down, so let's capture MonitorErrors. except (virt_vm.VMDeadError, qemu_monitor.MonitorError) as e: if wait_ack: try: install_error_str_found = string_in_serial_log( log_file, install_error_str) rh_upgrade_error_str_found = string_in_serial_log( log_file, rh_upgrade_error_str) post_finish_str_found = string_in_serial_log( log_file, post_finish_str) except IOError: logging.warn("Could not read final serial log file") else: if install_error_str_found: raise exceptions.TestFail(install_error_exception_str) if rh_upgrade_error_str_found: raise exceptions.TestFail("rh system upgrade failed, please " "check serial log") if post_finish_str_found: break # Bug `reboot` param from the kickstart is not actually restarts # the VM instead it shutsoff this is temporary workaround # for the test to proceed if unattended_install_config.unattended_file: with open(unattended_install_config.unattended_file) as unattended_fd: reboot_in_unattended = "reboot" in unattended_fd.read() if (reboot_in_unattended and kickstart_reboot_bug and not vm.is_alive()): try: vm.start() break except: logging.warn("Failed to start unattended install " "image workaround reboot kickstart " "parameter bug") # Print out the original exception before copying images. logging.error(e) copy_images() raise e else: break try: test.verify_background_errors() except Exception as e: attempt_to_log_useful_files(test, vm) copy_images() raise e if wait_ack: try: install_error_str_found = string_in_serial_log( log_file, install_error_str) rh_upgrade_error_str_found = string_in_serial_log( log_file, rh_upgrade_error_str) post_finish_str_found = string_in_serial_log( log_file, post_finish_str) except IOError: # Only make noise after several failed reads serial_read_fails += 1 if serial_read_fails > 10: logging.warn( "Cannot read from serial log file after %d tries", serial_read_fails) else: if install_error_str_found: attempt_to_log_useful_files(test, vm) raise exceptions.TestFail(install_error_exception_str) if rh_upgrade_error_str_found: raise exceptions.TestFail("rh system upgrade failed, please " "check serial log") if post_finish_str_found: break # Due to libvirt automatically start guest after import # we only need to wait for successful login. if params.get("medium") == "import": try: vm.login() break except (remote.LoginError, Exception) as e: pass if migrate_background: vm.migrate(timeout=mig_timeout, protocol=mig_protocol) else: time.sleep(1) else: logging.warn("Timeout elapsed while waiting for install to finish ") attempt_to_log_useful_files(test, vm) copy_images() raise exceptions.TestFail("Timeout elapsed while waiting for install to " "finish") logging.debug('cleaning up threads and mounts that may be active') global _url_auto_content_server_thread global _url_auto_content_server_thread_event if _url_auto_content_server_thread is not None: _url_auto_content_server_thread_event.set() _url_auto_content_server_thread.join(3) _url_auto_content_server_thread = None utils_disk.cleanup(unattended_install_config.cdrom_cd1_mount) global _unattended_server_thread global _unattended_server_thread_event if _unattended_server_thread is not None: _unattended_server_thread_event.set() _unattended_server_thread.join(3) _unattended_server_thread = None global _syslog_server_thread global _syslog_server_thread_event if _syslog_server_thread is not None: _syslog_server_thread_event.set() _syslog_server_thread.join(3) _syslog_server_thread = None time_elapsed = time.time() - start_time logging.info("Guest reported successful installation after %d s (%d min)", time_elapsed, time_elapsed // 60) if params.get("shutdown_cleanly", "yes") == "yes": shutdown_cleanly_timeout = int(params.get("shutdown_cleanly_timeout", 120)) logging.info("Wait for guest to shutdown cleanly") if params.get("medium", "cdrom") == "import": vm.shutdown() try: if utils_misc.wait_for(vm.is_dead, shutdown_cleanly_timeout, 1, 1): logging.info("Guest managed to shutdown cleanly") except qemu_monitor.MonitorError as e: logging.warning("Guest apparently shut down, but got a " "monitor error: %s", e)<|fim▁end|>
<|file_name|>TestPlugins.js<|end_file_name|><|fim▁begin|>/* ************************************************************************ * * qxcompiler - node.js based replacement for the Qooxdoo python * toolchain * * https://github.com/qooxdoo/qooxdoo-compiler * * Copyright: * 2011-2018 Zenesis Limited, http://www.zenesis.com * * License: * MIT: https://opensource.org/licenses/MIT * * This software is provided under the same licensing terms as Qooxdoo, * please see the LICENSE file in the Qooxdoo project's top-level directory * for details. * * Authors: * * John Spackman ([email protected], @johnspackman) * * ************************************************************************/ qx.Class.define("testapp.test.TestPlugins", { extend: qx.dev.unit.TestCase, <|fim▁hole|> var plugin = new testapp.plugins.PluginOne(); this.assertEquals("testapp.plugins.PluginOne: Plugin One Hello\n", plugin.sayHello()); }, this); qx.io.PartLoader.require(["pluginFramework", "pluginTwo"], () => { this.debug("pluginTwo loaded"); var plugin = new testapp.plugins.PluginTwo(); this.assertEquals("testapp.plugins.PluginTwo: Plugin One Hello\n", plugin.sayHello()); }, this); } } });<|fim▁end|>
members: { testSimple: function() { qx.io.PartLoader.require(["pluginFramework", "pluginOne"], () => { this.debug("pluginOne loaded");
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import include, url from ginger.views import utils __all__ = ('include', 'url', 'scan', 'scan_to_include') def scan(module, predicate=None): view_classes = utils.find_views(module, predicate=predicate) urls = [] for view in view_classes: if hasattr(view, 'as_urls'): urls.extend(view.as_urls()) else: urls.append(view.as_url()) pattern = urls return pattern def scan_to_include(module, predicate=None, app_name=None, namespace=None):<|fim▁hole|><|fim▁end|>
return scan(module, predicate), app_name, namespace
<|file_name|>give_player_dialog.hpp<|end_file_name|><|fim▁begin|>class GivePlayerDialog { idd = -1; movingenable = 0; enableSimulation = true; class controlsBackground { class Life_RscTitleBackground2:Life_RscText { colorBackground[] = {"(profilenamespace getvariable ['GUI_BCG_RGB_R',0.3843])", "(profilenamespace getvariable ['GUI_BCG_RGB_G',0.7019])", "(profilenamespace getvariable ['GUI_BCG_RGB_B',0.8862])", "(profilenamespace getvariable ['GUI_BCG_RGB_A',0.7])"}; idc = -1; x = 0.35;<|fim▁hole|> }; class MainBackground2:Life_RscText { colorBackground[] = {0, 0, 0, 0.7}; idc = -1; x = 0.35; y = 0.2 + (11 / 250); w = 0.3; h = 0.6 - (22 / 250); }; }; class Controls { class CashTitle5 : Life_RscStructuredText { idc = 2710; text = "You"; colorText[] = {0.8784,0.8471,0.651,1}; x = 0.39; y = 0.26; w = 0.3; h = 0.2; }; class RscTextT_10052 : RscTextT { idc = 14001; text = ""; colorText[] = {1,1,1,1}; x = 0.39; y = 0.27; w = 0.6; h = 0.2; }; class moneyEdit2 : Life_RscEdit { idc = 14000; colorText[] = {0.8784,0.8471,0.651,1}; text = "1"; sizeEx = 0.030; x = 0.4; y = 0.41; w = 0.2; h = 0.03; }; class Title2 : Life_RscTitle { colorBackground[] = {0, 0, 0, 0}; idc = -1; text = "Transfer Coins"; colorText[] = {1,1,1,1}; x = 0.35; y = 0.2; w = 0.6; h = (1 / 25); }; class DepositButton2 : life_RscButtonMenu { idc = -1; text = "Give"; colorBackground[] = {"(profilenamespace getvariable ['GUI_BCG_RGB_R',0.3843])", "(profilenamespace getvariable ['GUI_BCG_RGB_G',0.7019])", "(profilenamespace getvariable ['GUI_BCG_RGB_B',0.8862])", 0.5}; onButtonClick = "[(ctrlText 14000)] spawn GivePlayerAmount; ((ctrlParent (_this select 0)) closeDisplay 9000);"; colorText[] = {0.8784,0.8471,0.651,1}; x = 0.432; y = 0.512; w = (6 / 40); h = (1 / 25); }; class RscTextT_10005 : RscTextT { idc = 14003; text = ""; colorText[] = {0.8784,0.8471,0.651,1}; x = 0.39; y = 0.58; w = 0.3; h = 0.2; }; class CloseButtonKey2 : Life_RscButtonMenu { idc = -1; text = "Close"; onButtonClick = "((ctrlParent (_this select 0)) closeDisplay 9000);"; x = 0.35; y = 0.8 - (1 / 25); w = (6.25 / 40); h = (1 / 25); }; }; };<|fim▁end|>
y = 0.2; w = 0.3; h = (1 / 25);
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// Copyright 2013 Prometheus Team // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License.<|fim▁hole|>// minimally includes 1. applying pending commit logs, 2. compacting SSTables, // 3. purging stale SSTables, and 4. removing old tombstones. package main import ( "flag" "github.com/prometheus/prometheus/storage/metric/tiered" "time" "github.com/golang/glog" ) var ( storageRoot = flag.String("storage.root", "", "The path to the storage root for Prometheus.") ) func main() { flag.Parse() if storageRoot == nil || *storageRoot == "" { glog.Fatal("Must provide a path...") } persistences, err := tiered.NewLevelDBPersistence(*storageRoot) if err != nil { glog.Fatal(err) } defer persistences.Close() start := time.Now() glog.Info("Starting compaction...") size, _ := persistences.Sizes() glog.Info("Original Size: ", size) persistences.Prune() glog.Info("Finished in ", time.Since(start)) size, _ = persistences.Sizes() glog.Info("New Size: ", size) }<|fim▁end|>
// Pruner is responsible for cleaning all Prometheus disk databases, which
<|file_name|>no-undef.js<|end_file_name|><|fim▁begin|>/** * @fileoverview Rule to flag references to undeclared variables. * @author Mark Macdonald */ "use strict"; //------------------------------------------------------------------------------ // Helpers //------------------------------------------------------------------------------ /** * Checks if the given node is the argument of a typeof operator. * @param {ASTNode} node The AST node being checked. * @returns {boolean} Whether or not the node is the argument of a typeof operator. */ function hasTypeOfOperator(node) { const parent = node.parent; return parent.type === "UnaryExpression" && parent.operator === "typeof"; } //------------------------------------------------------------------------------ // Rule Definition //------------------------------------------------------------------------------ module.exports = { meta: { docs: { description: "disallow the use of undeclared variables unless mentioned in `/*global */` comments", category: "Variables", recommended: true }, schema: [ { type: "object", properties: { typeof: { type: "boolean" } }, additionalProperties: false } ] }, create(context) { const options = context.options[0]; const considerTypeOf = options && options.typeof === true || false; return { "Program:exit"(/* node */) { const globalScope = context.getScope(); globalScope.through.forEach(function(ref) { const identifier = ref.identifier; if (!considerTypeOf && hasTypeOfOperator(identifier)) { return; } <|fim▁hole|> }); }); } }; } };<|fim▁end|>
context.report({ node: identifier, message: "'{{name}}' is not defined.", data: identifier
<|file_name|>tmdrv.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright 2016, 2017 Andrew Conrad # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. <|fim▁hole|>import tmdrv_devices import usb1 from importlib import import_module from os import path from subprocess import check_call, CalledProcessError device_list = ['thrustmaster_t500rs', 'thrustmaster_tmx', 'thrustmaster_tx', 'thrustmaster_tsxw'] _context = usb1.USBContext() def initialize(device_name='thrustmaster_tx'): try: device = import_module('tmdrv_devices.' + device_name) except ModuleNotFoundError: print('Device name "' + device_name + '" is invalid.') raise try: device except UnboundLocalError: print('Device name "' + device_name + '" is invalid.') raise # Send all control packets for initialization for m in device.control: try: _control_init( device.idVendor, device.idProduct[m['step'] - 1], m['request_type'], m['request'], m['value'], m['index'], m['data'], ) except usb1.USBErrorNotFound: print('Error getting handle for device {:0=4x}:{:0=4x} ({} Step {}).'.format(device.idVendor, device.idProduct[m['step']-1], device.name, m['step'])) raise except usb1.USBErrorNoDevice: # Caught when device switches modes pass except usb1.USBErrorPipe: # Possibly caught when device switches modes on older libusb pass except usb1.USBErrorIO: # Possibly caught when device switches modes on newer # libusb. This still has to be investigated, there might # be another issue going on here. pass # Wait for device to switch connected = False while not connected: handle = _context.openByVendorIDAndProductID( device.idVendor, device.idProduct[m['step']], ) if handle is not None: connected = True # Load configuration to remove deadzones if device.jscal is not None: dev_path = '/dev/input/by-id/' + device.dev_by_id # Sometimes the device symlink is not ready in time, so we wait n = 9 while not path.islink(dev_path): if n > 0: time.sleep(.5) n -= 1 else: print('Device "{}" not found, skipping device calibration'.format(dev_path)) raise FileNotFoundError _jscal(device.jscal, dev_path) def _jscal(configuration, device_file): try: check_call(['jscal', '-s', configuration, device_file]) except FileNotFoundError: print('jscal not found, skipping device calibration.') except CalledProcessError as err: print('jscal non-zero exit code {}, device may not be calibrated'.format(str(err)[-1])) def _control_init(idVendor, idProduct, request_type, request, value, index, data): handle = _context.openByVendorIDAndProductID( idVendor, idProduct, ) if handle is None: raise usb1.USBErrorNotFound('Device not found or wrong permissions') handle.setAutoDetachKernelDriver(True) handle.claimInterface(0) # Send control packet that will switch modes handle.controlWrite( request_type, request, value, index, data, ) if __name__ == '__main__': parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-d', '--device', default='thrustmaster_tx', help='Specify device to use') parser.add_argument('-D', '--supported-devices', action='store_true', help='List all supported devices') args = parser.parse_args() if args.supported_devices: for d in device_list: print(d) else: initialize(args.device)<|fim▁end|>
"""Tool to initialize Thrustmaster racing wheels.""" import argparse import time
<|file_name|>finally.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /*! The Finally trait provides a method, `finally` on stack closures that emulates Java-style try/finally blocks. Using the `finally` method is sometimes convenient, but the type rules prohibit any shared, mutable state between the "try" case and the "finally" case. For advanced cases, the `try_finally` function can also be used. See that function for more details. # Example ``` use std::finally::Finally; (|| { // ... }).finally(|| { // this code is always run }) ``` */ #![experimental] use ops::Drop; /// A trait for executing a destructor unconditionally after a block of code, /// regardless of whether the blocked fails. pub trait Finally<T> { /// Executes this object, unconditionally running `dtor` after this block of /// code has run. fn finally(&mut self, dtor: ||) -> T; } impl<'a,T> Finally<T> for ||: 'a -> T { fn finally(&mut self, dtor: ||) -> T { try_finally(&mut (), self, |_, f| (*f)(), |_| dtor()) } } impl<T> Finally<T> for fn() -> T { fn finally(&mut self, dtor: ||) -> T { try_finally(&mut (), (), |_, _| (*self)(), |_| dtor()) } } /** * The most general form of the `finally` functions. The function * `try_fn` will be invoked first; whether or not it panics, the * function `finally_fn` will be invoked next. The two parameters * `mutate` and `drop` are used to thread state through the two * closures. `mutate` is used for any shared, mutable state that both * closures require access to; `drop` is used for any state that the * `try_fn` requires ownership of. * * **WARNING:** While shared, mutable state between the try and finally * function is often necessary, one must be very careful; the `try` * function could have panicked at any point, so the values of the shared * state may be inconsistent. * * # Example * * ``` * use std::finally::try_finally;<|fim▁hole|> * struct State<'a> { buffer: &'a mut [u8], len: uint } * # let mut buf = []; * let mut state = State { buffer: &mut buf, len: 0 }; * try_finally( * &mut state, (), * |state, ()| { * // use state.buffer, state.len * }, * |state| { * // use state.buffer, state.len to cleanup * }) * ``` */ pub fn try_finally<T,U,R>(mutate: &mut T, drop: U, try_fn: |&mut T, U| -> R, finally_fn: |&mut T|) -> R { let f = Finallyalizer { mutate: mutate, dtor: finally_fn, }; try_fn(&mut *f.mutate, drop) } struct Finallyalizer<'a,A:'a> { mutate: &'a mut A, dtor: |&mut A|: 'a } #[unsafe_destructor] impl<'a,A> Drop for Finallyalizer<'a,A> { #[inline] fn drop(&mut self) { (self.dtor)(self.mutate); } }<|fim▁end|>
*
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup # type: ignore[import] with open("README.md", "r") as fh: long_description = fh.read() setup( name="objname", version="0.12.0", packages=["objname"], package_data={ "objname": ["__init__.py", "py.typed", "_module.py",<|fim▁hole|> "test_objname.py"], }, zip_safe=False, author="Alan Cristhian Ruiz", author_email="[email protected]", description="A library with a base class that " "stores the assigned name of an object.", long_description=long_description, long_description_content_type="text/markdown", classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Software Development', 'Topic :: Software Development :: Object Brokering', 'Typing :: Typed' ], license="MIT", keywords="data structure debug", url="https://github.com/AlanCristhian/objname", )<|fim▁end|>
<|file_name|>16.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export { Moonrise16 as default } from "../../";
<|file_name|>shapiro.rs<|end_file_name|><|fim▁begin|>extern crate diffgeom; extern crate gr_engine; #[macro_use] extern crate generic_array; extern crate numeric_algs; use diffgeom::coordinates::Point; use diffgeom::tensors::Vector; use gr_engine::coord_systems::schwarzschild::{Mass, Schwarzschild}; use gr_engine::particle::Particle; use numeric_algs::integration::{DPIntegrator, Integrator, StepSize}; use std::f64::consts::PI; struct Sun; impl Mass for Sun { fn mass() -> f64 { M } } type Coords = Schwarzschild<Sun>; const M: f64 = 4.9e-6; // mass of the Sun in seconds, with c = G = 1 const D: f64 = 2.33; // radius of the Sun in seconds const YE: f64 = 498.67; // Earth "y" coordinate in seconds const YV: f64 = 370.7; // Venus "y" coordinate in seconds fn propagate_single_photon( photon: &mut Particle<Coords>, integrator: &mut DPIntegrator<Particle<Coords>>, r_max: f64, ) -> f64 { let mut last_pos = photon.get_pos().clone(); let mut i = 1; while photon.get_pos()[1] < r_max { last_pos = photon.get_pos().clone(); integrator.propagate_in_place(photon, Particle::derivative, StepSize::UseDefault); i += 1; if i % 100 == 0 { println!("Iteration {}... r = {}", i, photon.get_pos()[1]); } } let pos = photon.get_pos(); let (t, r) = (pos[0], pos[1]); let (last_t, last_r) = (last_pos[0], last_pos[1]); let coeff = (r_max - last_r) / (r - last_r); last_t + (t - last_t) * coeff } fn main() { let u0 = (D * D * D / (D - M * 2.0)).sqrt(); let r_e = (D * D + YE * YE).sqrt(); let r_v = (D * D + YV * YV).sqrt(); let t_flat = 2.0 * (YE + YV); <|fim▁hole|> let mut photon1 = Particle::new(start_point.clone(), u_init1); let mut photon2 = Particle::new(start_point.clone(), u_init2); let mut integrator = DPIntegrator::<Particle<Coords>>::new(0.01, 0.0001, 0.1, 1e-12); println!("Propagating the first photon..."); let t1 = propagate_single_photon(&mut photon1, &mut integrator, r_e); integrator.reset(); println!("Propagating the second photon..."); let t2 = propagate_single_photon(&mut photon2, &mut integrator, r_v); println!("Propagation finished."); println!("t1 = {}", t1); println!("t2 = {}", t2); let dt = (t1 - t2) * 2.0 * (1.0 - 2.0 * M / r_e).sqrt(); println!("dt = {}", dt); println!("delay = {}", dt - t_flat); }<|fim▁end|>
let start_point = Point::<Coords>::new(arr![f64; 0.0, D, PI / 2.0, 0.0]); let u_init1 = Vector::<Coords>::new(start_point.clone(), arr![f64; u0, 0.0, 0.0, 1.0]); let u_init2 = Vector::<Coords>::new(start_point.clone(), arr![f64; -u0, 0.0, 0.0, -1.0]);
<|file_name|>serialize.cc<|end_file_name|><|fim▁begin|>// { dg-options "-std=gnu++11" } // { dg-require-cstdint "" } // // 2012-01-28 Edward M. Smith-Rowland <[email protected]> // // Copyright (C) 2012-2015 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option) // any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // Class template rice_distribution // 26.5.1.6 Random number distribution requirements [rand.req.dist] #include <ext/random> #include <sstream> #include <testsuite_hooks.h> void test01() { bool test __attribute__((unused)) = true; std::stringstream str;<|fim▁hole|> __gnu_cxx::rice_distribution<double> u(1.5, 3.0), v; std::minstd_rand0 rng; u(rng); // advance str << u; str >> v; VERIFY( u == v ); } int main() { test01(); return 0; }<|fim▁end|>
<|file_name|>gsharpnote.go<|end_file_name|><|fim▁begin|>package gx var gsharpnote = []byte{ 0xff, 0xfb, 0x50, 0xc4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x58, 0x69, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x08, 0xfa, 0x00, 0x03, 0xa6, 0xc8, 0x00, 0x02, 0x04, 0x07, 0x09, 0x0c, 0x0e, 0x11, 0x14, 0x16, 0x19, 0x1b, 0x1e, 0x20, 0x23, 0x26, 0x28, 0x2b, 0x2d, 0x30, 0x32, 0x35, 0x37, 0x3a, 0x3d, 0x3f, 0x42, 0x44, 0x47, 0x49, 0x4c, 0x4e, 0x51, 0x54, 0x56, 0x59, 0x5b, 0x5e, 0x60, 0x63, 0x65, 0x68, 0x6b, 0x6d, 0x70, 0x72, 0x75, 0x77, 0x7a, 0x7d, 0x7f, 0x82, 0x84, 0x87, 0x89, 0x8c, 0x8e, 0x91, 0x93, 0x96, 0x99, 0x9b, 0x9e, 0xa0, 0xa3, 0xa5, 0xa8, 0xaa, 0xad, 0xb0, 0xb2, 0xb5, 0xb7, 0xba, 0xbc, 0xbf, 0xc2, 0xc4, 0xc7, 0xc9, 0xcc, 0xce, 0xd1, 0xd3, 0xd6, 0xd9, 0xdb, 0xde, 0xe0, 0xe3, 0xe5, 0xe8, 0xea, 0xed, 0xf0, 0xf2, 0xf5, 0xf7, 0xfa, 0xfc, 0x00, 0x00, 0x00, 0x2c, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xfa, 0x50, 0xc4, 0xbe, 0x21, 0x00, 0x00, 0x0b, 0x54, 0xc7, 0x3d, 0x39, 0xc9, 0x00, 0x01, 0x6a, 0x89, 0xa8, 0xb7, 0x38, 0x70, 0x00, 0x3c, 0xf3, 0xcf, 0x0c, 0x30, 0xc3, 0x0c, 0x30, 0xc0, 0xff, 0xbf, 0x89, 0xb4, 0x53, 0x62, 0x06, 0x97, 0xfd, 0xb9, 0xf3, 0x8e, 0x26, 0x0d, 0x02, 0x55, 0xff, 0xb1, 0xe6, 0x35, 0x11, 0x83, 0x84, 0x7b, 0x40, 0xe1, 0x00, 0x18, 0x3f, 0x0e, 0x90, 0x2e, 0x7c, 0x32, 0xdf, 0xf8, 0xc9, 0x8a, 0x00, 0x59, 0x04, 0x43, 0xff, 0x1c, 0xf1, 0xcf, 0x27, 0xc8, 0xa1, 0x14, 0xff, 0xf3, 0x42, 0x2e, 0x4f, 0x9b, 0x97, 0x0b, 0x9f, 0xff, 0x9a, 0x17, 0xcb, 0xe9, 0x97, 0x0b, 0x88, 0x7f, 0xc1, 0xf0, 0x20, 0x60, 0x1f, 0x7f, 0xfe, 0x50, 0x30, 0x0f, 0x81, 0x03, 0x00, 0xf8, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x02, 0x02, 0x01, 0x81, 0x00, 0x80, 0x18, 0x14, 0x5c, 0x64, 0x19, 0xd1, 0x76, 0x4c, 0xe2, 0x77, 0x10, 0x04, 0xcc, 0x03, 0x8f, 0x35, 0x90, 0x65, 0x2a, 0x85, 0xb1, 0x97, 0x4c, 0x18, 0x1b, 0xf4, 0x16, 0x54, 0xd8, 0x7c, 0x9b, 0xc1, 0x68, 0x0d, 0x47, 0xc0, 0x68, 0xf0, 0xdb, 0xf1, 0x18, 0x74, 0x88, 0xd7, 0xcb, 0x03, 0x42, 0x5f, 0x84, 0x81, 0xa1, 0x2f, 0xe1, 0x20, 0x68, 0x4a, 0x77, 0xf5, 0x03, 0x42, 0x50, 0x90, 0x35, 0xff, 0x12, 0x84, 0x81, 0xa1, 0x28, 0x48, 0xf7, 0xfe, 0x58, 0xa9, 0xd0, 0x34, 0xea, 0x95, 0x00, 0x02, 0x00, 0xff, 0xfa, 0x10, 0xc4, 0xa9, 0x3f, 0x01, 0x80, 0x05, 0x54, 0x1d, 0x31, 0x1d, 0xe0, 0x00, 0x18, 0x71, 0x03, 0xe4, 0xc1, 0xaf, 0x68, 0x40, 0x00, 0x4c, 0xa1, 0x20, 0x06, 0x30, 0x00, 0x01, 0x53, 0x03, 0xa0, 0x99, 0x31, 0x0c, 0x12, 0x93, 0x48, 0xa3, 0x92, 0x30, 0xa7, 0x0a, 0x60, 0x10, 0x63, 0x01, 0x80, 0x25, 0x13, 0x85, 0x00, 0x1c, 0x38, 0x01, 0x49, 0x80, 0x06, 0x5a, 0x0b, 0x2a, 0x5f, 0x17, 0x94, 0x2a, 0x40, 0xc1, 0x41, 0x30, 0x35, 0x17, 0x63, 0x51, 0x98, 0x19, 0x3e, 0x39, 0x8c, 0x21, 0x55, 0x66, 0x6c, 0xc9, 0xf4, 0x9a, 0xa4, 0xd2, 0xa7, 0xb8, 0x56, 0xd4, 0x3e, 0x2a, 0x93, 0x18, 0x88, 0x60, 0x02, 0x31, 0xa6, 0x37, 0x31, 0xff, 0xfa, 0x10, 0xc4, 0x13, 0xbe, 0x06, 0x82, 0xc3, 0x88, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x86, 0x83, 0xa4, 0x15, 0xaf, 0x6c, 0x41, 0x14, 0x66, 0xf8, 0x82, 0x0e, 0x03, 0x41, 0xa5, 0x7a, 0x70, 0x27, 0x99, 0x45, 0x09, 0x37, 0x50, 0xc2, 0x84, 0x61, 0x4c, 0x01, 0xe3, 0x3f, 0x8c, 0xc2, 0x08, 0x83, 0x8e, 0x81, 0xac, 0xac, 0xff, 0xa7, 0x0c, 0xec, 0xcc, 0xc5, 0x85, 0x8c, 0x18, 0x08, 0x14, 0x16, 0x0a, 0x00, 0x1e, 0x7f, 0x85, 0xab, 0x8b, 0xa9, 0xa0, 0x08, 0xe7, 0xce, 0x18, 0x3e, 0x87, 0x19, 0xcd, 0x6a, 0x89, 0x1f, 0x99, 0xd9, 0x99, 0x06, 0x18, 0x68, 0x19, 0x70, 0xd1, 0x31, 0x2a, 0xc7, 0x86, 0x2c, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x10, 0xc4, 0x38, 0x6f, 0x10, 0x03, 0xc5, 0x50, 0x1d, 0x26, 0x0c, 0x7b, 0x62, 0x00, 0x83, 0x83, 0xe4, 0x01, 0xaf, 0xe0, 0x48, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xe4, 0xad, 0x1e, 0x14, 0x4c, 0xc0, 0x3c, 0x39, 0x81, 0xc0, 0x01, 0x41, 0xa0, 0x00, 0x0a, 0x29, 0x83, 0x44, 0x00, 0x21, 0x80, 0xc2, 0x00, 0x09, 0xf8, 0xc0, 0x43, 0x08, 0xca, 0x40, 0x86, 0x1f, 0x77, 0x4a, 0x50, 0x64, 0x15, 0x03, 0x14, 0x70, 0x2a, 0x61, 0x06, 0x17, 0x87, 0x42, 0x68, 0xb2, 0x7f, 0xa4, 0xc1, 0x8b, 0xa3, 0x40, 0xeb, 0x06, 0xa1, 0xc9, 0x1a, 0x3c, 0x1d, 0x6b, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x10, 0xc4, 0x7f, 0x8a, 0x12, 0x83, 0xc5, 0x44, 0x1d, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x71, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xa4, 0x29, 0x75, 0x80, 0x05, 0x9a, 0x3a, 0x18, 0x2d, 0x0a, 0x39, 0xbc, 0x43, 0x8e, 0x1d, 0xa2, 0xb9, 0x8c, 0x86, 0x2e, 0x97, 0x69, 0x4d, 0x53, 0x18, 0x88, 0xe6, 0x77, 0xb5, 0x01, 0x00, 0x02, 0x99, 0x22, 0x08, 0xa9, 0x81, 0x6e, 0x4c, 0x03, 0xc5, 0x50, 0xca, 0xc5, 0xd9, 0xcd, 0x6d, 0xc1, 0x11, 0x34, 0xf4, 0x66, 0x8b, 0x79, 0x66, 0x94, 0x3e, 0xa3, 0x55, 0x34, 0x4b, 0x01, 0x04, 0x67, 0xcc, 0x15, 0xe3, 0x03, 0x91, 0xfd, 0x35, 0x49, 0xad, 0x23, 0x7d, 0x97, 0xff, 0xfa, 0x10, 0xc4, 0x68, 0x50, 0x17, 0x80, 0x03, 0xb8, 0x1d, 0x28, 0xac, 0xfb, 0x42, 0x18, 0x80, 0x83, 0xe4, 0x41, 0xaf, 0x6c, 0x40, 0x31, 0x62, 0xc3, 0x09, 0x0f, 0x05, 0x01, 0x89, 0x02, 0x09, 0x01, 0x93, 0x3a, 0x48, 0x3a, 0x82, 0x13, 0x9c, 0x08, 0x31, 0xab, 0x89, 0x83, 0x20, 0x9a, 0x1c, 0x0e, 0x35, 0xd9, 0xe4, 0x2a, 0x19, 0x38, 0xa9, 0x84, 0x02, 0x17, 0x21, 0x07, 0xd2, 0x10, 0x88, 0xde, 0x7f, 0x8f, 0x32, 0xbc, 0x08, 0x2c, 0xe0, 0x38, 0xc2, 0x1c, 0x27, 0xce, 0x95, 0xce, 0xc8, 0xc4, 0x14, 0x09, 0x4c, 0x0d, 0x80, 0x10, 0xda, 0x12, 0xeb, 0xa1, 0x82, 0x47, 0x8f, 0x02, 0xc5, 0x53, 0x54, 0x46, 0x12, 0x64, 0x16, 0xd1, 0xff, 0xfa, 0x10, 0xc4, 0x76, 0xf3, 0x21, 0x03, 0xc3, 0xac, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x75, 0x03, 0xa4, 0xc1, 0x9f, 0x60, 0x48, 0x85, 0x20, 0x27, 0x9e, 0x1e, 0x10, 0xb0, 0x0c, 0xe2, 0x3c, 0x87, 0x07, 0x06, 0x10, 0x10, 0x93, 0xe8, 0x76, 0x5a, 0xb5, 0x59, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0x7a, 0x53, 0x19, 0x00, 0xa6, 0x18, 0x46, 0xd4, 0x66, 0x0e, 0xe1, 0xf4, 0x72, 0x96, 0xb3, 0xa7, 0xce, 0x78, 0x65, 0x00, 0x29, 0x66, 0xd1, 0xd4, 0xa9, 0x2a, 0x47, 0x88, 0xad, 0x2a, 0x48, 0x52, 0xe7, 0x03, 0x0f, 0x32, 0x7c, 0x30, 0x42, 0x16, 0x93, 0x60, 0xa7, 0xce, 0x38, 0xa6, 0xff, 0xfa, 0x10, 0xc4, 0xb2, 0x2f, 0x2c, 0x03, 0xc5, 0x68, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x6f, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0x33, 0x0d, 0x12, 0x5e, 0x2f, 0x32, 0x96, 0xa7, 0x51, 0x12, 0x2c, 0xef, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x94, 0x32, 0x14, 0x0e, 0x3a, 0x80, 0x93, 0x19, 0x29, 0x06, 0x18, 0x26, 0xd9, 0x39, 0x20, 0x4d, 0x05, 0xc0, 0xb4, 0x10, 0x42, 0x03, 0x08, 0x87, 0x2b, 0x00, 0xaf, 0xf8, 0x3f, 0x95, 0x78, 0x54, 0x50, 0x42, 0x49, 0x85, 0x91, 0x81, 0xb8, 0x9d, 0x9a, 0x93, 0xb7, 0x21, 0xf7, 0xae, 0x64, 0x0a, 0x18, 0x10, 0x29, 0x16, 0x9a, 0x8a, 0x5e, 0xff, 0xfa, 0x10, 0xc4, 0x6b, 0x52, 0x30, 0x83, 0xc5, 0x50, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x73, 0x83, 0xe4, 0x41, 0xaf, 0x64, 0x48, 0x4c, 0xd2, 0x73, 0xb1, 0x85, 0x4e, 0x60, 0x00, 0x6d, 0xca, 0x60, 0xe8, 0x20, 0x47, 0x26, 0x4b, 0x72, 0x7c, 0xc8, 0x86, 0x5e, 0x22, 0x61, 0x40, 0x85, 0xb0, 0x42, 0x5a, 0x3c, 0x0f, 0x13, 0xdb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe8, 0x69, 0xca, 0x30, 0x34, 0x78, 0x2c, 0x43, 0x08, 0xd0, 0x4d, 0x3a, 0xb7, 0x20, 0x23, 0x87, 0x82, 0xc0, 0xc7, 0x00, 0x10, 0x5c, 0xbd, 0xe9, 0x18, 0x92, 0xeb, 0xd0, 0xe7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x10, 0xc4, 0x9f, 0x35, 0x35, 0x03, 0xc3, 0x8c, 0x1f, 0x28, 0x0c, 0xfb, 0x42, 0x00, 0xae, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd2, 0x5c, 0x50, 0x50, 0xa3, 0x2c, 0x0c, 0xe0, 0xa0, 0x30, 0xd6, 0x0e, 0xb3, 0xf6, 0x25, 0x4c, 0x31, 0x8e, 0x03, 0x30, 0xc0, 0xb2, 0x07, 0xa0, 0x44, 0x21, 0xc1, 0x88, 0xcc, 0x2d, 0xda, 0x5d, 0x2a, 0x40, 0x31, 0x67, 0x4c, 0x53, 0x8d, 0xce, 0xcc, 0x1d, 0xc5, 0x70, 0xe5, 0x8d, 0xe5, 0x4f, 0xa5, 0x70, 0xc9, 0x83, 0x52, 0xe9, 0xb8, 0xa6, 0x32, 0x12, 0x47, 0x90, 0xa7, 0xb9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x10, 0xc4, 0xbd, 0xf6, 0x39, 0x83, 0xc5, 0x3c, 0x1b, 0x26, 0x0c, 0xfb, 0x82, 0x00, 0x7d, 0x03, 0xe4, 0x01, 0xaf, 0x60, 0x48, 0xff, 0xf4, 0x02, 0x00, 0x83, 0x46, 0x01, 0x13, 0x98, 0xbd, 0xc6, 0x08, 0xc4, 0x8a, 0x6c, 0xa1, 0x89, 0x07, 0x21, 0x56, 0x61, 0x26, 0x03, 0xa0, 0xe9, 0xc6, 0x34, 0x12, 0x24, 0x0c, 0x45, 0x1b, 0x09, 0xed, 0x5a, 0x48, 0x75, 0x20, 0x20, 0x17, 0x8a, 0x60, 0x24, 0x31, 0x06, 0x5b, 0x90, 0xf2, 0x6e, 0x37, 0x18, 0x33, 0xa0, 0x40, 0x48, 0xf8, 0x99, 0xe9, 0x98, 0x51, 0x4e, 0x8f, 0x8f, 0x8a, 0x68, 0x03, 0x4b, 0x33, 0xb4, 0x30, 0x59, 0x14, 0xb3, 0x76, 0x77, 0x30, 0x3b, 0x55, 0xd3, 0x22, 0x18, 0xff, 0xfa, 0x10, 0xc4, 0xcf, 0x02, 0x3d, 0x03, 0xc5, 0x64, 0x1f, 0x24, 0x0c, 0xfb, 0x62, 0x00, 0x7e, 0x03, 0xe4, 0x01, 0xaf, 0x6c, 0x40, 0x30, 0x90, 0x72, 0xe5, 0xa2, 0x82, 0x45, 0x91, 0x1e, 0x4e, 0x75, 0x7c, 0x93, 0x5c, 0x04, 0x81, 0xce, 0xf9, 0x84, 0xc0, 0x72, 0x1d, 0xb5, 0x28, 0xf9, 0x89, 0x20, 0x1e, 0x18, 0x1f, 0x80, 0x79, 0xc8, 0xc0, 0x80, 0x17, 0x5c, 0xbe, 0x83, 0x5b, 0xb7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0x93, 0xa8, 0x68, 0x11, 0xe8, 0x86, 0xf1, 0x30, 0xc1, 0x00, 0xc3, 0xe0, 0xb0, 0xc3, 0x26, 0x2d, 0x70, 0x80, 0xa0, 0x3a, 0x84, 0x2a, 0x31, 0xc0, 0x16, 0x06, 0xff, 0xfa, 0x10, 0xc4, 0x72, 0x1a, 0x40, 0x03, 0xc3, 0x8c, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x75, 0x83, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0xe1, 0x5c, 0xda, 0x45, 0x52, 0xe3, 0x19, 0x01, 0x9c, 0xf2, 0x18, 0x49, 0x07, 0xe9, 0xd8, 0xf2, 0xd3, 0x1c, 0x60, 0x7a, 0x02, 0x37, 0x0f, 0x02, 0x16, 0x42, 0x46, 0xa1, 0x38, 0x68, 0x43, 0x67, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xa1, 0x28, 0x18, 0x04, 0x02, 0x48, 0xc9, 0x45, 0x30, 0x5f, 0x18, 0xa3, 0x7d, 0xf8, 0x83, 0x3b, 0xd6, 0xc3, 0x1a, 0x15, 0x53, 0x27, 0x15, 0x1f, 0x91, 0x44, 0x89, 0x4a, 0x7b, 0x95, 0x4e, 0x54, 0x11, 0x0c, 0x13, 0xff, 0xfa, 0x10, 0xc4, 0x9c, 0x52, 0x4b, 0x83, 0xc5, 0x88, 0x1d, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0x71, 0x83, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x11, 0xc4, 0x30, 0x02, 0x18, 0xd3, 0x1f, 0xa8, 0x88, 0x33, 0x1b, 0xc9, 0x4d, 0x17, 0x9d, 0x6b, 0xa8, 0x82, 0x79, 0x14, 0x51, 0xa2, 0xeb, 0x22, 0x49, 0xf1, 0x10, 0xe3, 0x03, 0x04, 0xc0, 0xc8, 0x5e, 0x0d, 0x3b, 0x20, 0x94, 0xf8, 0x6a, 0x32, 0x27, 0x4c, 0x10, 0x44, 0x00, 0x24, 0x9a, 0x6a, 0x13, 0x4b, 0x9f, 0xe2, 0x86, 0xd4, 0xc0, 0x02, 0x29, 0xad, 0x51, 0x83, 0x68, 0x8f, 0x9c, 0x52, 0xb2, 0x61, 0xeb, 0xa7, 0x99, 0x50, 0x91, 0x85, 0x82, 0x97, 0x2d, 0x07, 0x12, 0x1c, 0x78, 0xb2, 0x73, 0xff, 0xfa, 0x10, 0xc4, 0xf6, 0x65, 0x4f, 0x83, 0xc5, 0x64, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x75, 0x03, 0xe4, 0x81, 0xaf, 0x6c, 0x40, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x4b, 0x25, 0x44, 0xb0, 0x87, 0x0f, 0x54, 0x4c, 0x2f, 0x02, 0xa0, 0xf6, 0xd0, 0xf9, 0xcc, 0x58, 0x80, 0xa8, 0xc1, 0x44, 0x02, 0xcf, 0x08, 0x02, 0x10, 0x2e, 0x11, 0x41, 0x0b, 0x1e, 0x9f, 0x55, 0x45, 0x20, 0xc0, 0x41, 0xd1, 0x1e, 0xc5, 0x98, 0x5e, 0x84, 0x91, 0xee, 0x59, 0xbd, 0x98, 0xb2, 0x80, 0xb1, 0x30, 0x4c, 0x83, 0xce, 0x21, 0x09, 0x54, 0x03, 0x83, 0x16, 0x15, 0x6d, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x10, 0xc4, 0xf8, 0xcc, 0x53, 0x83, 0xc3, 0x74, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x74, 0x03, 0xe4, 0xc1, 0xaf, 0x68, 0x40, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0x09, 0x8a, 0x5d, 0xe3, 0x08, 0x53, 0x66, 0x83, 0x06, 0xe1, 0x15, 0x38, 0xd8, 0x61, 0xa3, 0xd8, 0x45, 0x32, 0x50, 0x44, 0xf1, 0x6c, 0x4a, 0x62, 0x97, 0xc3, 0xc5, 0x33, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0x2a, 0x4b, 0xe2, 0xf2, 0x85, 0x50, 0x30, 0xbf, 0x30, 0x37, 0x17, 0x43, 0x54, 0x58, 0x0d, 0x3e, 0xb9, 0x8c, 0x21, 0x55, 0x66, 0x73, 0x53, 0xe9, 0x35, 0x49, 0xa5, 0x4f, 0x71, 0xff, 0xfa, 0x10, 0xc4, 0x39, 0xf4, 0x5f, 0x83, 0xc5, 0x74, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x7b, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x0c, 0x47, 0x43, 0x16, 0x10, 0x8a, 0x62, 0x30, 0x05, 0x20, 0xb3, 0x1e, 0xdb, 0x20, 0x32, 0x19, 0xb3, 0x00, 0x2c, 0x00, 0x85, 0x97, 0x4c, 0x78, 0x10, 0x88, 0x0c, 0xa1, 0xf2, 0x0e, 0xed, 0x01, 0x00, 0x03, 0xc4, 0x96, 0xe1, 0x43, 0x0c, 0xbe, 0xcc, 0x13, 0x05, 0x78, 0xda, 0x71, 0xe5, 0xce, 0x79, 0x88, 0xc7, 0x46, 0x4c, 0x18, 0x10, 0xbc, 0x08, 0x6e, 0x8f, 0x84, 0x48, 0x73, 0xea, 0xbc, 0xe8, 0x28, 0x65, 0x8e, 0x77, 0x60, 0x61, 0x52, 0x26, 0xe7, 0x8f, 0xad, 0x84, 0x62, 0x86, 0x0b, 0x26, 0xff, 0xfa, 0x10, 0xc4, 0xf4, 0x34, 0x62, 0x83, 0xc5, 0x8c, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xac, 0x03, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0x09, 0x00, 0x34, 0x79, 0x69, 0x84, 0xa5, 0x9c, 0x0a, 0x84, 0x6e, 0x92, 0xcc, 0xd5, 0x60, 0xa9, 0x34, 0x4c, 0xa0, 0x7b, 0x66, 0x14, 0xc0, 0x48, 0x78, 0x98, 0x26, 0xe7, 0x3a, 0x00, 0x02, 0x91, 0x60, 0x21, 0x01, 0x6e, 0x11, 0x9d, 0x0e, 0x8e, 0x5d, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x84, 0x8a, 0x2e, 0xc8, 0x09, 0x63, 0xa1, 0xb3, 0x09, 0xb0, 0xd4, 0x3b, 0x8f, 0x4d, 0x13, 0x92, 0x8b, 0x02, 0x0e, 0xa2, 0xc0, 0xf4, 0xf7, 0x4c, 0xa4, 0xff, 0xfa, 0x10, 0xc4, 0xd7, 0x9e, 0x5f, 0x03, 0xc3, 0x78, 0x1f, 0x26, 0x0c, 0xfb, 0x42, 0x00, 0x7b, 0x83, 0xe4, 0x41, 0xaf, 0x6c, 0x40, 0x10, 0x8d, 0x05, 0xa9, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x15, 0x42, 0x48, 0x24, 0x03, 0x09, 0x13, 0x67, 0xe3, 0x06, 0xc1, 0x73, 0x38, 0xbc, 0x80, 0x83, 0xd6, 0x65, 0x32, 0x11, 0x04, 0xe9, 0x6b, 0x49, 0x12, 0x83, 0xc3, 0xc9, 0x33, 0xbd, 0x01, 0x00, 0x02, 0x27, 0x08, 0x01, 0x88, 0x4c, 0x02, 0x28, 0x98, 0x0f, 0x8e, 0x01, 0x9e, 0x1c, 0xce, 0x9a, 0x5c, 0x00, 0x50, 0x84, 0x50, 0x01, 0x5e, 0x24, 0xd2, 0x40, 0x93, 0x31, 0x49, 0xf8, 0xff, 0xfa, 0x10, 0xc4, 0xce, 0x1b, 0x6a, 0x00, 0x04, 0x04, 0x1d, 0x26, 0xac, 0xfb, 0x62, 0x18, 0x7c, 0x83, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0xb2, 0x5a, 0x25, 0x80, 0x02, 0xa5, 0xc0, 0x94, 0x0c, 0x0a, 0xc6, 0xf4, 0xd1, 0xa6, 0x61, 0x8d, 0x96, 0x04, 0xc3, 0x89, 0x0c, 0x0c, 0x24, 0xbb, 0x68, 0x18, 0x8d, 0x64, 0xcc, 0x12, 0x0e, 0xbd, 0x09, 0x56, 0x00, 0x30, 0xdb, 0xf4, 0xc1, 0xd0, 0x59, 0x8e, 0x48, 0x1f, 0x1c, 0xf9, 0x19, 0x8c, 0xc4, 0x74, 0xc3, 0x82, 0x8b, 0x20, 0x5c, 0x74, 0x02, 0x0f, 0x23, 0xcf, 0xf2, 0x79, 0x95, 0xe0, 0x41, 0x67, 0x02, 0x06, 0x10, 0xe1, 0x4e, 0x74, 0x92, 0x7a, 0xc6, 0x20, 0xa0, 0x52, 0x60, 0x6c, 0x00, 0xff, 0xfa, 0x10, 0xc4, 0x4d, 0x16, 0x72, 0x83, 0xc5, 0x50, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0xaf, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0xc6, 0xd0, 0x97, 0x5d, 0x0c, 0x12, 0x3c, 0x78, 0x96, 0x11, 0xf4, 0x48, 0x21, 0x28, 0x40, 0xf7, 0x18, 0x60, 0x02, 0x89, 0xef, 0xd1, 0x12, 0x00, 0x8b, 0x44, 0x58, 0x27, 0xc0, 0xe8, 0x0b, 0x00, 0x84, 0x24, 0xa2, 0x57, 0x35, 0x59, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0x7a, 0x6a, 0x2e, 0x50, 0x08, 0x13, 0x34, 0xc3, 0xb3, 0x53, 0x0a, 0x31, 0x48, 0x3c, 0x31, 0x72, 0xb3, 0x9b, 0x95, 0x0c, 0x7c, 0x07, 0x26, 0x07, 0xaa, 0x9a, 0x02, 0x8b, 0x9a, 0xff, 0xfa, 0x10, 0xc4, 0x4d, 0x47, 0x6f, 0x80, 0x03, 0xa0, 0x1f, 0x24, 0x0c, 0xfb, 0x62, 0x00, 0x82, 0x83, 0xe4, 0x95, 0xaf, 0x6c, 0x43, 0x34, 0x3a, 0x97, 0x62, 0x90, 0xa5, 0xce, 0x06, 0x1e, 0x64, 0xf8, 0x60, 0x88, 0x2c, 0xa6, 0xc6, 0x4f, 0x80, 0x71, 0xcc, 0x26, 0x1a, 0x24, 0xbc, 0x5d, 0x65, 0x2d, 0x4e, 0xa2, 0x24, 0x59, 0xde, 0xd5, 0x41, 0x30, 0xa0, 0x71, 0xd4, 0x04, 0x18, 0xc5, 0x48, 0x30, 0xc3, 0x46, 0xc9, 0x42, 0xe1, 0x34, 0x23, 0x02, 0xd0, 0x41, 0x08, 0x0c, 0x26, 0x1c, 0xa0, 0x12, 0xbf, 0xe0, 0xfe, 0x33, 0x14, 0x38, 0x05, 0x4a, 0x98, 0xa9, 0x46, 0x09, 0xa3, 0x4a, 0x6d, 0x17, 0x21, 0x07, 0x3e, 0xf2, 0x63, 0xc4, 0xff, 0xfa, 0x10, 0xc4, 0xe0, 0xf4, 0x79, 0x03, 0xc3, 0xc4, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x76, 0x83, 0xe4, 0x81, 0x9f, 0x6c, 0x40, 0x46, 0x12, 0x16, 0x5b, 0x72, 0xf4, 0x20, 0x3c, 0x89, 0x62, 0x41, 0xd5, 0x8c, 0x2a, 0x73, 0x00, 0x23, 0x6e, 0x73, 0x07, 0x41, 0x06, 0x39, 0x1c, 0x5d, 0x33, 0xe4, 0x46, 0x32, 0xf1, 0x13, 0x0a, 0x04, 0x2d, 0x82, 0x12, 0xd1, 0xe0, 0x78, 0x9e, 0xda, 0xd6, 0x46, 0x01, 0x28, 0xcf, 0x61, 0x0c, 0x2f, 0xc1, 0xdc, 0xf7, 0x94, 0xb7, 0x8c, 0x5a, 0x40, 0x6c, 0xc1, 0x40, 0x01, 0x0f, 0x27, 0x04, 0x94, 0x40, 0x01, 0x91, 0xa4, 0x85, 0x4d, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x10, 0xc4, 0x56, 0xae, 0x83, 0x83, 0xc3, 0xa8, 0x1d, 0x26, 0x0c, 0xfb, 0x02, 0x40, 0xb0, 0x03, 0xa4, 0x41, 0x9f, 0x60, 0x48, 0xff, 0xff, 0xff, 0xfd, 0x2a, 0x41, 0xd0, 0x80, 0x80, 0xd0, 0x9e, 0xab, 0x18, 0x5c, 0x06, 0x19, 0xec, 0x72, 0x39, 0x18, 0xae, 0x01, 0x38, 0x90, 0x4b, 0x04, 0x70, 0xbe, 0x88, 0x7a, 0x2a, 0x21, 0x66, 0xd2, 0xe9, 0x21, 0x8b, 0xba, 0x60, 0x90, 0x6b, 0x5e, 0x60, 0xce, 0x25, 0x47, 0x0a, 0xed, 0x1e, 0x79, 0x69, 0x86, 0x40, 0x0e, 0xa4, 0x9b, 0x8a, 0xa6, 0x48, 0x52, 0x23, 0x29, 0xef, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xdd, 0x4b, 0xd2, 0xf3, 0x05, 0xff, 0xfa, 0x10, 0xc4, 0x0d, 0xf0, 0x87, 0x03, 0xc3, 0xdc, 0x1f, 0x22, 0x0c, 0xfb, 0x82, 0x00, 0x71, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x88, 0x80, 0x60, 0x18, 0x14, 0x0b, 0xf9, 0xa3, 0x64, 0x1f, 0x1d, 0xcd, 0x20, 0x12, 0xc9, 0xde, 0xd0, 0xd4, 0x8a, 0x7d, 0x13, 0x4d, 0xa2, 0xe8, 0x08, 0x00, 0x16, 0x92, 0x1d, 0x48, 0x08, 0x08, 0x62, 0x98, 0x07, 0x0c, 0x51, 0x95, 0x24, 0x3e, 0x9b, 0x2d, 0xc6, 0x04, 0xe8, 0x10, 0x12, 0x3e, 0x26, 0x7a, 0x70, 0x14, 0x53, 0xa3, 0x55, 0x6c, 0xa8, 0x80, 0x05, 0x22, 0x65, 0x21, 0x18, 0x33, 0x8c, 0x09, 0xc2, 0x6c, 0x25, 0x1e, 0x7b, 0x79, 0x95, 0x0f, 0x18, 0x68, 0x59, 0x66, 0xcb, 0xb0, 0x80, 0xff, 0xfa, 0x10, 0xc4, 0xea, 0x1e, 0x91, 0x83, 0xc3, 0xa4, 0x1f, 0x22, 0x0d, 0x7b, 0x22, 0x40, 0x79, 0x83, 0xe4, 0x81, 0xaf, 0x6c, 0x40, 0x32, 0x24, 0xc9, 0xce, 0xbd, 0x89, 0x5e, 0x02, 0x30, 0xe7, 0x84, 0xc2, 0x58, 0x3b, 0x0e, 0xd6, 0x15, 0x0c, 0xc4, 0x88, 0x0f, 0x8c, 0x0f, 0xc0, 0x44, 0xe4, 0x60, 0x40, 0x0b, 0xae, 0x80, 0x81, 0xaf, 0xdb, 0xd7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd0, 0x2f, 0x90, 0xb1, 0x11, 0xe7, 0xc4, 0x68, 0x40, 0x41, 0x03, 0x1a, 0x48, 0x20, 0x44, 0x89, 0x07, 0x86, 0x06, 0x03, 0x80, 0xe5, 0x2c, 0x0a, 0x68, 0xa1, 0x45, 0x53, 0x59, 0x95, 0xcd, 0xa2, 0xa9, 0xff, 0xfa, 0x10, 0xc4, 0xe2, 0xb9, 0x9c, 0x03, 0xc3, 0x94, 0x1d, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xb3, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x71, 0x8c, 0x80, 0xce, 0x78, 0x8c, 0x25, 0x03, 0xdc, 0xec, 0xd1, 0x61, 0x0e, 0x34, 0x39, 0x03, 0x1b, 0x86, 0x81, 0x0a, 0x28, 0x91, 0xa8, 0x4e, 0x1a, 0x10, 0xd9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0xf5, 0x48, 0x62, 0xea, 0x81, 0x4c, 0x33, 0xb7, 0x30, 0x53, 0x15, 0x23, 0x70, 0x97, 0x65, 0x3a, 0x75, 0xa3, 0x14, 0x0f, 0x58, 0x67, 0x75, 0x4d, 0x93, 0x04, 0x88, 0xfa, 0x7b, 0x86, 0x02, 0xb6, 0xa3, 0x30, 0xc1, 0xa2, 0xbc, 0x18, 0x01, 0x0a, 0xff, 0xfa, 0x10, 0xc4, 0x40, 0xf7, 0x9f, 0x83, 0xc3, 0xb8, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xa8, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0xb9, 0x8f, 0x83, 0xbb, 0x19, 0x8b, 0xa4, 0x24, 0x92, 0x3d, 0x93, 0xae, 0x15, 0xbc, 0x51, 0x06, 0x89, 0x65, 0x01, 0x00, 0x02, 0x47, 0x10, 0x89, 0x0a, 0x24, 0x31, 0xbb, 0x4c, 0x15, 0x08, 0xf0, 0xdc, 0x4b, 0x08, 0xce, 0xba, 0xb0, 0xc9, 0x4d, 0xcc, 0x40, 0x50, 0x04, 0x10, 0x18, 0x1e, 0x18, 0x10, 0x45, 0x1f, 0x0b, 0x56, 0x26, 0xa6, 0x00, 0x51, 0x4d, 0x6b, 0x0c, 0x1a, 0xc4, 0x8c, 0xe2, 0x25, 0x99, 0x0f, 0x55, 0x44, 0xca, 0x85, 0x0c, 0x2c, 0x14, 0xb9, 0x68, 0x38, 0x90, 0xe3, 0xc5, 0x93, 0xff, 0xfa, 0x10, 0xc4, 0x8b, 0xeb, 0xa4, 0x00, 0x03, 0x74, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x7c, 0x83, 0xa4, 0xd5, 0xaf, 0x68, 0x43, 0x8a, 0x66, 0xa8, 0x5e, 0x10, 0xe1, 0xea, 0x99, 0x85, 0xd0, 0x59, 0x1e, 0xd4, 0x20, 0xd9, 0x8b, 0x00, 0x17, 0x18, 0x28, 0x80, 0x69, 0xe1, 0x00, 0x42, 0x05, 0xc2, 0x28, 0x21, 0x65, 0xd3, 0xe9, 0x30, 0x90, 0xa4, 0x31, 0x63, 0xb0, 0x53, 0x0a, 0x30, 0x74, 0x3c, 0x23, 0x2d, 0xb3, 0x9b, 0x83, 0x0a, 0x10, 0x21, 0x01, 0xb4, 0x27, 0xa0, 0x52, 0x1e, 0xa5, 0xd5, 0x6d, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0x15, 0x4c, 0x54, 0x03, 0x18, 0x42, 0x9b, 0xff, 0xfa, 0x10, 0xc4, 0xfb, 0x28, 0xaf, 0x03, 0xc3, 0xc4, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0xb3, 0x03, 0xe4, 0x81, 0x9f, 0x60, 0x48, 0x34, 0x18, 0x38, 0x08, 0x79, 0xc7, 0x83, 0x00, 0x1e, 0xe2, 0x19, 0x92, 0x81, 0x27, 0x8b, 0x46, 0x53, 0x14, 0xbe, 0x1e, 0x29, 0xb2, 0x97, 0xc5, 0xe5, 0x0a, 0x9c, 0x62, 0x7e, 0x60, 0x72, 0x2e, 0x46, 0xad, 0xf0, 0x02, 0x7e, 0x72, 0x18, 0x62, 0xab, 0x69, 0xcd, 0x4f, 0xa4, 0xd5, 0x26, 0x95, 0x3d, 0xcf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa5, 0x43, 0x11, 0x90, 0xc5, 0x44, 0x23, 0x18, 0x80, 0x24, 0x1a, 0x63, 0x23, 0x64, 0x46, 0x33, 0x36, 0x04, 0xff, 0xfa, 0x10, 0xc4, 0xc7, 0x9f, 0xb2, 0x03, 0xc3, 0xc4, 0x1f, 0x1e, 0x0d, 0x7f, 0x22, 0x40, 0xad, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0x2c, 0x00, 0x85, 0x97, 0x4c, 0x78, 0x10, 0x98, 0x0c, 0xa1, 0xf2, 0x0e, 0xeb, 0xc4, 0xb0, 0x61, 0x61, 0x8c, 0x5c, 0x4c, 0x0f, 0x84, 0xc8, 0xd6, 0xa9, 0xad, 0x4e, 0x11, 0x48, 0xc5, 0x44, 0xcc, 0x00, 0x01, 0x15, 0x13, 0xad, 0x4d, 0x09, 0x8d, 0xe7, 0xf8, 0x8b, 0xa9, 0xa0, 0x08, 0x33, 0x72, 0x03, 0x07, 0xb0, 0xef, 0x39, 0x87, 0x54, 0xc3, 0xef, 0x3f, 0x33, 0x00, 0xe3, 0x0d, 0x05, 0x2e, 0x1a, 0x26, 0x23, 0xd8, 0xf0, 0xe5, 0x85, 0x72, 0x4c, 0x11, 0x14, 0x01, 0x37, 0x98, 0x60, 0x01, 0xe1, 0xff, 0xfa, 0x10, 0xc4, 0x15, 0x7e, 0xb5, 0x82, 0xc3, 0x8c, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x70, 0x83, 0xa5, 0x21, 0x9f, 0x68, 0x41, 0xf0, 0x10, 0xd7, 0x98, 0xb5, 0x00, 0x40, 0x08, 0x28, 0xce, 0xc4, 0x06, 0x08, 0x54, 0x24, 0x02, 0x62, 0x75, 0xd9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xfa, 0x2a, 0x48, 0xa2, 0xec, 0x80, 0x96, 0x3a, 0x19, 0x30, 0x9b, 0x0c, 0xc3, 0xb9, 0xf4, 0xab, 0x39, 0x38, 0xa8, 0x30, 0xea, 0x24, 0x13, 0x4b, 0x74, 0xca, 0x41, 0x08, 0xd0, 0x4a, 0xd7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x42, 0x12, 0x4b, 0xff, 0xfa, 0x10, 0xc4, 0x8b, 0xbc, 0xc1, 0x82, 0xc4, 0x58, 0x1d, 0x20, 0xad, 0x7b, 0x62, 0x08, 0x72, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0x3c, 0x61, 0x1e, 0x6c, 0xfc, 0x60, 0xdc, 0x2d, 0xe7, 0x1c, 0x0f, 0xe0, 0x7b, 0x0c, 0x66, 0x46, 0x20, 0x9d, 0x2e, 0x12, 0x44, 0xa0, 0xf0, 0xf2, 0x4c, 0xef, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x8a, 0x01, 0x00, 0x01, 0x75, 0x82, 0xc2, 0x42, 0xa7, 0x80, 0x18, 0x4c, 0x0a, 0xc8, 0x00, 0xd2, 0x46, 0xbb, 0x4d, 0x86, 0x50, 0x10, 0x52, 0x28, 0x04, 0xa4, 0x08, 0x80, 0x86, 0x80, 0x49, 0x9e, 0xa4, 0x4a, 0xb0, 0x14, 0xa6, 0x19, 0x22, 0x0c, 0x84, 0xff, 0xfa, 0x10, 0xc4, 0x8d, 0xa1, 0xca, 0x03, 0xc3, 0xd8, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xaa, 0x83, 0xe4, 0x81, 0x9f, 0x70, 0x40, 0x60, 0x3e, 0x30, 0xa6, 0x75, 0x30, 0xc0, 0x73, 0xf7, 0x98, 0x83, 0x80, 0x10, 0xa8, 0xa6, 0x97, 0x09, 0x76, 0x4d, 0x32, 0x8b, 0xb5, 0x82, 0x14, 0xac, 0x10, 0x41, 0xa7, 0x99, 0x83, 0x00, 0x9e, 0x1b, 0xf0, 0x37, 0x39, 0xe1, 0x2b, 0x19, 0x38, 0xb9, 0x84, 0x02, 0x17, 0x21, 0x14, 0xd2, 0x20, 0x88, 0xde, 0x7f, 0x8f, 0x72, 0xb0, 0x04, 0x16, 0x70, 0x20, 0x61, 0x0a, 0x15, 0xa7, 0x47, 0xa7, 0xf4, 0x62, 0x06, 0x05, 0x66, 0x06, 0x80, 0x0c, 0x6d, 0x09, 0x70, 0xd1, 0xe1, 0x23, 0xc7, 0x91, 0x61, 0xff, 0xfa, 0x10, 0xc4, 0x4e, 0xce, 0xcd, 0x83, 0xc3, 0x7c, 0x1d, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xa7, 0x83, 0xe4, 0xc1, 0x9f, 0x68, 0x40, 0x41, 0x50, 0xe1, 0x41, 0xcc, 0x40, 0xe1, 0x8c, 0x37, 0x81, 0x44, 0xff, 0xd0, 0x8a, 0x80, 0xc6, 0x82, 0x24, 0x17, 0xe7, 0xa1, 0x03, 0x08, 0x42, 0x52, 0x54, 0x29, 0xad, 0x56, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe4, 0xd0, 0x78, 0xb6, 0xa6, 0x48, 0xc7, 0x25, 0x26, 0x11, 0xa2, 0x2c, 0x75, 0x92, 0xc5, 0x27, 0x0f, 0x22, 0x18, 0xc8, 0x06, 0x4c, 0x07, 0x5b, 0x69, 0x54, 0x80, 0x91, 0xe1, 0x54, 0xf7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x10, 0xc4, 0x66, 0x7b, 0xd3, 0x03, 0xc3, 0xb4, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0x75, 0x03, 0xe5, 0x01, 0x9f, 0x6c, 0x40, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xef, 0x2e, 0x08, 0x24, 0x31, 0x80, 0x36, 0x65, 0x6a, 0x18, 0x32, 0x0e, 0xa9, 0xc2, 0x15, 0x02, 0x1e, 0x34, 0x29, 0x8d, 0x8f, 0x24, 0x6b, 0x26, 0x40, 0x69, 0x76, 0x88, 0x9a, 0x64, 0x3d, 0x47, 0xa1, 0x40, 0x63, 0xa6, 0x07, 0x29, 0x88, 0x47, 0x20, 0xc4, 0x4e, 0x71, 0x40, 0xb0, 0x43, 0x24, 0x61, 0x40, 0x44, 0xd4, 0x46, 0x34, 0xb1, 0x2b, 0x33, 0x93, 0xbb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xee, 0xf4, 0x2a, 0xff, 0xfa, 0x10, 0xc4, 0xf1, 0x90, 0xde, 0x03, 0xc3, 0x90, 0x1d, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xb3, 0x03, 0xa4, 0x41, 0x9f, 0x60, 0x48, 0x6e, 0x29, 0xb8, 0x21, 0x3c, 0xc4, 0xf8, 0xc0, 0xf4, 0x5c, 0x4d, 0x64, 0x5f, 0xd8, 0xe0, 0xda, 0x4c, 0x50, 0x68, 0xc0, 0xc1, 0x50, 0x06, 0x8e, 0x09, 0x5e, 0x4c, 0x91, 0x39, 0xd8, 0xc2, 0xa7, 0x00, 0x04, 0x6c, 0xce, 0x60, 0xe4, 0x21, 0x87, 0x20, 0x8b, 0xda, 0x7c, 0x09, 0x06, 0x5e, 0x22, 0x61, 0x40, 0x85, 0xb0, 0x45, 0x74, 0x78, 0x1e, 0x2b, 0xb7, 0x4c, 0x41, 0x4d, 0x45, 0x64, 0xca, 0x10, 0x24, 0xb9, 0xd8, 0x11, 0x85, 0x28, 0x37, 0x9e, 0x14, 0x95, 0xa1, 0x89, 0xb8, 0x0b, 0x98, 0x21, 0xff, 0xfa, 0x10, 0xc4, 0x5a, 0x8c, 0xe1, 0x83, 0xc5, 0x74, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0xab, 0x83, 0xe4, 0x81, 0x9f, 0x6c, 0x40, 0x00, 0x11, 0xc4, 0x60, 0xd0, 0xa1, 0x31, 0x0e, 0x69, 0xf1, 0x53, 0x68, 0x3a, 0x10, 0x10, 0x1a, 0x13, 0xd5, 0x43, 0x0b, 0xa0, 0xb9, 0x3d, 0x9c, 0x44, 0x23, 0x15, 0xe0, 0x23, 0x16, 0x09, 0x60, 0x90, 0x17, 0xd1, 0x0f, 0x46, 0x44, 0x2c, 0xda, 0x5d, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0x0a, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x40, 0xff, 0xfa, 0x10, 0xc4, 0xb2, 0xcb, 0xde, 0x82, 0xc4, 0x24, 0x1d, 0x22, 0xad, 0x7b, 0x62, 0x08, 0x71, 0x03, 0xe4, 0xc1, 0xaf, 0x68, 0x40, 0x31, 0x67, 0x4c, 0x53, 0x0d, 0xcd, 0xcc, 0x1e, 0xc5, 0x48, 0xe6, 0x8d, 0xd8, 0x4f, 0xbd, 0x60, 0xca, 0x03, 0x53, 0x39, 0x98, 0xa4, 0x32, 0x12, 0x47, 0x8f, 0xa7, 0xb8, 0x97, 0xa5, 0xe6, 0x0b, 0x11, 0x00, 0xc0, 0x30, 0x2c, 0x17, 0xd3, 0x49, 0xa8, 0x36, 0x3c, 0x5a, 0x40, 0x25, 0x95, 0x4d, 0xa9, 0xa9, 0x14, 0xfa, 0x26, 0x9b, 0x3b, 0xda, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x01, 0x00, 0x02, 0xb7, 0x21, 0xd4, 0xff, 0xfa, 0x10, 0xc4, 0xcf, 0xc3, 0xe8, 0x03, 0xc3, 0xac, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x75, 0x83, 0xa4, 0xc1, 0x9f, 0x60, 0x48, 0x90, 0x80, 0x8e, 0x29, 0x80, 0x50, 0xc5, 0x19, 0x35, 0x44, 0x09, 0xac, 0xdc, 0x01, 0x3a, 0x08, 0x04, 0x8f, 0x89, 0x9e, 0x9c, 0x05, 0x14, 0xe8, 0xd5, 0x6c, 0x28, 0x80, 0x05, 0x22, 0x65, 0x21, 0x18, 0x32, 0x8c, 0x29, 0xc1, 0x3c, 0x2f, 0x1e, 0x5b, 0x79, 0x94, 0x10, 0x18, 0x68, 0x59, 0x66, 0xcb, 0xb0, 0x80, 0x32, 0x24, 0xc9, 0xc5, 0xaa, 0x84, 0x25, 0x78, 0x08, 0xc3, 0x9e, 0x13, 0x09, 0x60, 0xf2, 0x3b, 0x44, 0x56, 0x73, 0x12, 0x00, 0x40, 0x30, 0x3f, 0x01, 0x13, 0x91, 0x80, 0x80, 0x2e, 0xff, 0xfa, 0x10, 0xc4, 0xb5, 0x56, 0xf3, 0x03, 0xc5, 0x88, 0x1d, 0x20, 0x0d, 0x7b, 0x02, 0x40, 0xaf, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0xba, 0x02, 0x06, 0xbf, 0x6f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0x21, 0xb0, 0xd0, 0x92, 0x26, 0x45, 0x21, 0x06, 0x81, 0xc3, 0x01, 0x40, 0x14, 0x18, 0x1a, 0x38, 0x18, 0x0c, 0x03, 0xe9, 0x41, 0x27, 0x14, 0x21, 0x2a, 0x5b, 0x25, 0xc6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0xfe, 0x8a, 0x4c, 0x45, 0x52, 0xe3, 0x19, 0x01, 0x9c, 0xf0, 0x18, 0x4a, 0x07, 0x89, 0xda, 0x42, 0xb0, 0x1c, 0x70, 0x6e, 0xff, 0xfa, 0x10, 0xc4, 0x53, 0xa9, 0xef, 0x83, 0xc3, 0xc0, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0xac, 0x83, 0xa4, 0x81, 0xaf, 0x6c, 0x40, 0x0a, 0x39, 0x0d, 0x02, 0x14, 0x31, 0x2d, 0x50, 0x9c, 0x34, 0x1d, 0xb2, 0xcf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0x09, 0x0c, 0x5d, 0x50, 0x29, 0x86, 0x76, 0xe6, 0x0a, 0xa2, 0xa0, 0x6e, 0x56, 0xea, 0x67, 0x56, 0xb0, 0x62, 0x81, 0xeb, 0xb9, 0xdd, 0x53, 0x64, 0xc1, 0x22, 0x3e, 0x9e, 0xe5, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x01, 0x00, 0x02, 0xb6, 0xa3, 0x30, 0xc1, 0xa2, 0x3e, 0xff, 0xfa, 0x10, 0xc4, 0x24, 0x70, 0xf3, 0x03, 0xc3, 0xb0, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x71, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0x0c, 0x01, 0x05, 0x5c, 0xc8, 0x91, 0xdc, 0x8c, 0xf5, 0xd1, 0xd2, 0x49, 0x1e, 0xc9, 0xd7, 0x0a, 0xde, 0x28, 0x83, 0x44, 0xb0, 0x08, 0x00, 0x15, 0x58, 0x94, 0x28, 0x80, 0xe1, 0x87, 0xba, 0x60, 0x84, 0x3e, 0xc6, 0xbe, 0xd5, 0x62, 0x71, 0xd2, 0x86, 0x36, 0x5a, 0x61, 0x41, 0xc0, 0xa0, 0x00, 0xe0, 0x71, 0x20, 0x02, 0x27, 0x79, 0x1a, 0xaa, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x89, 0xa9, 0x80, 0x14, 0x73, 0x52, 0xc3, 0x06, 0x91, 0x29, 0x38, 0x6f, 0x67, 0xc3, 0xd1, 0x51, 0x32, 0xff, 0xfa, 0x10, 0xc4, 0xd7, 0x3f, 0xfc, 0x83, 0xc3, 0xcc, 0x1f, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0xb0, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0xa1, 0x43, 0x0b, 0x05, 0x2e, 0x5a, 0x0e, 0x24, 0x38, 0xf1, 0x84, 0xe7, 0x5e, 0x55, 0x9e, 0x1c, 0x41, 0xc6, 0x49, 0x84, 0x40, 0x45, 0x1d, 0x3b, 0x1a, 0x49, 0xc2, 0x03, 0xc6, 0x37, 0x01, 0x98, 0x30, 0x00, 0x5d, 0x44, 0x6f, 0x4b, 0x44, 0x2f, 0xcf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa6, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x4c, 0x24, 0x29, 0x0c, 0x58, 0xec, 0x0c, 0xc2, 0x8c, 0x1a, 0x8f, 0x0a, 0xca, 0xac, 0xe6, 0xe0, 0x82, 0x84, 0xff, 0xfa, 0x10, 0xc4, 0x10, 0x99, 0xf3, 0x83, 0xc3, 0x9c, 0x1f, 0x24, 0x0c, 0xfb, 0x62, 0x00, 0x6e, 0x83, 0xe4, 0xc1, 0xaf, 0x68, 0x40, 0x00, 0x38, 0x36, 0x84, 0xf4, 0xa4, 0x43, 0xd4, 0xfa, 0xad, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xbd, 0x25, 0xc9, 0x00, 0x80, 0x65, 0x1e, 0x75, 0xf4, 0x61, 0x38, 0x2c, 0x67, 0x78, 0x0f, 0x60, 0x72, 0x82, 0xe9, 0x8e, 0xc1, 0x44, 0xc0, 0x55, 0x1e, 0x41, 0x29, 0x6e, 0x86, 0x88, 0xb3, 0xbd, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x48, 0x62, 0xff, 0xfa, 0x10, 0xc4, 0x95, 0x3e, 0xf6, 0x03, 0xc3, 0xe0, 0x1d, 0x26, 0xad, 0x7b, 0x42, 0x18, 0x79, 0x03, 0xa4, 0x85, 0xaf, 0x6c, 0x43, 0xf2, 0x83, 0x4e, 0x31, 0x3f, 0x30, 0x3d, 0x16, 0xe3, 0x59, 0x97, 0xf5, 0x3f, 0xf9, 0x0c, 0x41, 0x55, 0x94, 0xe6, 0xa7, 0xd2, 0x56, 0x93, 0x4a, 0x9e, 0xe2, 0x18, 0x8c, 0x86, 0x24, 0x42, 0x41, 0x88, 0x1a, 0x41, 0xa6, 0x25, 0xb6, 0x48, 0x61, 0xb3, 0x60, 0x82, 0xc0, 0x08, 0x59, 0x7c, 0xc8, 0x81, 0x09, 0x80, 0xca, 0xcf, 0xa0, 0xee, 0xd5, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x00, 0x03, 0x00, 0x00, 0x78, 0x96, 0xff, 0xfa, 0x10, 0xc4, 0xed, 0xd0, 0xff, 0x83, 0xc5, 0x84, 0x1d, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0xad, 0x83, 0xa4, 0x01, 0xaf, 0xe0, 0x48, 0x0c, 0x40, 0x31, 0x8b, 0x89, 0x81, 0xe0, 0x9a, 0x1a, 0xc4, 0x36, 0x09, 0xc0, 0x2a, 0x18, 0xa8, 0x99, 0x80, 0x00, 0x24, 0x42, 0x57, 0xa9, 0xa1, 0x31, 0xbc, 0xfa, 0xb1, 0x75, 0x34, 0x01, 0x06, 0x6e, 0x40, 0x60, 0xf2, 0x1e, 0x67, 0x2e, 0x2a, 0xd4, 0x7d, 0xa8, 0x26, 0x60, 0x1c, 0x61, 0xa0, 0xa5, 0xc3, 0x42, 0x42, 0x3d, 0x8f, 0x0e, 0x58, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x55, 0x60, 0xa9, 0x44, 0x3c, 0xa0, 0x4b, 0x66, 0x14, 0x80, 0x78, 0x78, 0x8c, 0x36, 0x27, 0x3a, 0x04, 0x01, 0x91, 0xff, 0xfa, 0x10, 0xc4, 0xa8, 0x89, 0xfb, 0x83, 0xc5, 0x8c, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x71, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x66, 0x10, 0x00, 0x16, 0xe1, 0x10, 0xd0, 0xf1, 0x97, 0xd7, 0x67, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xdf, 0xe8, 0x48, 0xa2, 0xec, 0x81, 0x96, 0x3a, 0x17, 0x30, 0x9d, 0x0c, 0x43, 0xba, 0xf4, 0x83, 0x39, 0x48, 0xa0, 0x30, 0xf2, 0x24, 0x13, 0x4b, 0x74, 0xca, 0x41, 0x08, 0xd0, 0x4a, 0xd6, 0xd5, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x55, 0x48, 0x52, 0xeb, 0x00, 0x0b, 0x34, 0xf2, 0x30, 0x60, 0x13, 0xb3, 0x7e, 0x66, 0xe0, 0x3b, 0xe5, 0x33, 0x1d, 0xff, 0xfa, 0x10, 0xc4, 0xac, 0xeb, 0xf7, 0x80, 0x03, 0xb4, 0x1d, 0x28, 0xac, 0xfb, 0x42, 0x18, 0x8b, 0x03, 0xa4, 0x55, 0xaf, 0x6c, 0x41, 0x0a, 0x57, 0x2e, 0x12, 0xa5, 0x4c, 0x62, 0x23, 0x59, 0xde, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0x08, 0x2e, 0xb0, 0x58, 0x38, 0x54, 0xf1, 0x80, 0xbe, 0x60, 0x5e, 0x40, 0x06, 0x9a, 0x75, 0xca, 0x6c, 0xf2, 0x80, 0x02, 0x91, 0x40, 0x25, 0x20, 0x3c, 0x04, 0x2c, 0x02, 0x4c, 0xf5, 0x22, 0x55, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x60, 0x28, 0xbc, 0x3a, 0x44, 0x29, 0x08, 0xc0, 0x74, 0x61, 0xcc, 0xdd, 0xe1, 0x98, 0xe4, 0xef, 0xff, 0xfa, 0x10, 0xc4, 0x15, 0xb9, 0xfb, 0x83, 0xc3, 0xa8, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xae, 0x03, 0xa4, 0xc1, 0x9f, 0x70, 0x40, 0x30, 0xc7, 0x00, 0x20, 0xd1, 0x4d, 0x2e, 0x13, 0x3c, 0xa2, 0x85, 0x17, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x98, 0x21, 0x4a, 0xc1, 0x04, 0x1a, 0x39, 0x98, 0x2f, 0x0a, 0x01, 0xbe, 0x13, 0x7d, 0x9d, 0xf2, 0xc1, 0x92, 0x8b, 0x98, 0x40, 0x21, 0x72, 0x11, 0x4d, 0x22, 0x08, 0x8e, 0xe7, 0xf8, 0x7b, 0x93, 0x50, 0x14, 0x99, 0xd0, 0xc1, 0x84, 0xd8, 0x68, 0x9d, 0xcd, 0xa5, 0x91, 0x89, 0x48, 0x1b, 0x98, 0x20, 0x00, 0x61, 0xcc, 0xe0, 0x90, 0x97, 0xff, 0xfa, 0x10, 0xc4, 0x9a, 0x08, 0xfc, 0x03, 0xc5, 0x6c, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x7a, 0x83, 0xe4, 0x41, 0x9f, 0x70, 0x40, 0xb0, 0xbe, 0xe3, 0x54, 0xb1, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0x90, 0x54, 0x48, 0x50, 0x93, 0x10, 0x78, 0x63, 0x0d, 0xe0, 0x43, 0x3f, 0xf8, 0x1d, 0xc0, 0x83, 0x41, 0x0e, 0x0b, 0xf3, 0xd0, 0x81, 0x86, 0x1d, 0x29, 0x2a, 0x16, 0x2d, 0x5d, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xeb, 0x4c, 0x41, 0xe2, 0xda, 0x99, 0x22, 0x1c, 0x94, 0x98, 0x47, 0x88, 0x81, 0xd7, 0x0b, 0x03, 0x9c, 0x4c, 0x84, 0xff, 0xfa, 0x10, 0xc4, 0xa4, 0xdf, 0xf4, 0x03, 0xc3, 0x74, 0x1f, 0x26, 0x0c, 0xfb, 0x42, 0x00, 0x76, 0x03, 0xe4, 0x41, 0xaf, 0x6c, 0x40, 0x63, 0x40, 0x09, 0x10, 0x1d, 0x6d, 0xa6, 0x92, 0x02, 0x46, 0x85, 0x56, 0xb9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf4, 0xa4, 0x29, 0x73, 0x81, 0x06, 0x99, 0x7d, 0x18, 0x25, 0x0a, 0xf9, 0xb3, 0xe3, 0xd1, 0x1c, 0xb2, 0xf9, 0x88, 0x88, 0x2d, 0x17, 0x59, 0x4b, 0x53, 0x08, 0x89, 0x06, 0x77, 0xb5, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x47, 0xa1, 0x40, 0x63, 0x26, 0x06, 0x29, 0x83, 0x47, 0x20, 0xc5, 0xae, 0x70, 0xcc, 0x1e, 0x08, 0x74, 0x8c, 0x28, 0x04, 0xff, 0xfa, 0x10, 0xc4, 0x19, 0x3c, 0xf6, 0x02, 0xc4, 0x2c, 0x1d, 0x29, 0x0c, 0xfb, 0x62, 0x08, 0x72, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0x9a, 0x88, 0xc6, 0x96, 0x25, 0x66, 0x72, 0x77, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0xde, 0x85, 0x81, 0x1c, 0x04, 0x15, 0x36, 0x63, 0x2d, 0x18, 0x2a, 0x8f, 0x49, 0xb8, 0x5d, 0x36, 0x1d, 0x6c, 0x89, 0x91, 0x16, 0x18, 0x68, 0x78, 0x18, 0x0c, 0x30, 0x10, 0xba, 0x64, 0x4e, 0x12, 0x0e, 0xaa, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x8c, 0x28, 0x18, 0x00, 0x23, 0x66, 0xff, 0xfa, 0x10, 0xc4, 0x79, 0xbe, 0xfc, 0x03, 0xc5, 0x80, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x75, 0x03, 0xe4, 0x81, 0x9f, 0x70, 0x40, 0x93, 0x07, 0x01, 0x10, 0x38, 0xec, 0x60, 0x73, 0xde, 0x4a, 0x32, 0xd1, 0x33, 0x0a, 0x04, 0x2d, 0x82, 0x2b, 0xa2, 0x60, 0xf1, 0x5d, 0xbe, 0x2b, 0x11, 0x30, 0x40, 0xc5, 0xe7, 0x08, 0x71, 0x86, 0xe8, 0x52, 0x9f, 0xe1, 0x9e, 0x41, 0x8c, 0xe8, 0x15, 0x98, 0x30, 0x00, 0x61, 0xc2, 0x68, 0x04, 0x90, 0xb0, 0x82, 0xa4, 0x89, 0x09, 0x49, 0xba, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0x4c, 0x14, 0x07, 0x03, 0x95, 0x3a, 0xcc, 0x30, 0xa0, 0x09, 0x73, 0xbf, 0xc3, 0x8c, 0x39, 0x80, 0xff, 0xfa, 0x10, 0xc4, 0xe5, 0x6e, 0xfc, 0x02, 0xc5, 0x4c, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x7d, 0x83, 0xa4, 0x55, 0xaf, 0x6c, 0x41, 0x64, 0x68, 0xfc, 0x18, 0x14, 0x48, 0xc4, 0x9d, 0x43, 0x90, 0xd0, 0x06, 0x97, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0x24, 0x31, 0x77, 0x4c, 0x11, 0x8d, 0x6b, 0x4c, 0x1a, 0xc4, 0x80, 0xe2, 0x3d, 0x98, 0x4f, 0x45, 0x20, 0xc8, 0x81, 0xd5, 0x13, 0x75, 0x50, 0x24, 0xbd, 0x1e, 0x2e, 0x9e, 0xe5, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x4b, 0xd2, 0xf3, 0x05, 0x88, 0x98, 0x18, 0x06, 0x05, 0xc2, 0xf6, 0x69, 0x8d, 0x05, 0x87, 0x9b, 0x49, 0x81, 0x2c, 0xff, 0xfa, 0x10, 0xc4, 0xbb, 0x87, 0xfb, 0x03, 0xc5, 0x50, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x75, 0x83, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0xac, 0xed, 0x4d, 0x48, 0xa6, 0xb1, 0x34, 0xd9, 0xde, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0x28, 0x0a, 0x1c, 0x1a, 0x40, 0xa0, 0x2b, 0xcc, 0xc0, 0x60, 0x95, 0x8c, 0xd1, 0xb2, 0x8c, 0xd1, 0xac, 0x8c, 0x2c, 0xdc, 0xc1, 0x84, 0x01, 0xc0, 0x82, 0xc1, 0xe3, 0xc1, 0x05, 0x12, 0xf0, 0x7f, 0x15, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x7a, 0x53, 0xd0, 0x1a, 0x59, 0x99, 0xc1, 0x82, 0x98, 0xaa, 0x9b, 0x81, 0xbb, 0x31, 0xd5, 0xaf, 0x99, 0x00, 0xc1, 0x83, 0xff, 0xfa, 0x10, 0xc4, 0xfd, 0x7e, 0xff, 0x83, 0xc5, 0x88, 0x1d, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0xaf, 0x03, 0xe4, 0x01, 0xaf, 0x60, 0x48, 0x83, 0x97, 0x8d, 0x0d, 0x13, 0x0c, 0x88, 0xf2, 0x73, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x43, 0x78, 0x5d, 0x33, 0x40, 0x83, 0xef, 0x73, 0x0c, 0x81, 0x4c, 0x3e, 0x8a, 0x73, 0x93, 0x17, 0xa0, 0x60, 0x30, 0x59, 0x01, 0xf3, 0x81, 0x23, 0x08, 0x80, 0x28, 0x61, 0x41, 0x05, 0xbf, 0x97, 0xe9, 0x4e, 0xa1, 0xe0, 0x4a, 0x22, 0x1f, 0xc0, 0xc3, 0x04, 0x00, 0x4f, 0x83, 0x41, 0xd8, 0x1c, 0x5a, 0xe0, 0xa0, 0xa0, 0x3b, 0x8c, 0x1a, 0x31, 0x80, 0x12, 0xff, 0xfa, 0x10, 0xc4, 0x5f, 0x81, 0xfb, 0x83, 0xc5, 0x84, 0x1f, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x71, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x8d, 0xe0, 0xb8, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbf, 0xd0, 0x8a, 0xa5, 0xc6, 0x32, 0x01, 0x3a, 0x5e, 0x30, 0x96, 0x0e, 0x93, 0xb5, 0xa5, 0x3c, 0x38, 0xf0, 0xd4, 0x14, 0x72, 0x1a, 0x06, 0x28, 0x62, 0x5a, 0xa1, 0x38, 0x68, 0x3b, 0x67, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa1, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x48, 0x62, 0xea, 0x81, 0xff, 0xfa, 0x10, 0xc4, 0xb4, 0x6c, 0xfd, 0x03, 0xc5, 0x70, 0x1d, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x7e, 0x83, 0xe4, 0x41, 0xaf, 0x6c, 0x40, 0x4a, 0x33, 0xb7, 0x30, 0x57, 0x14, 0xc3, 0x74, 0xd7, 0x41, 0x3a, 0xf5, 0x83, 0x16, 0x0d, 0x58, 0xce, 0x4a, 0x9b, 0x24, 0x49, 0x11, 0xf4, 0xf7, 0x10, 0x5d, 0x51, 0x08, 0x81, 0x02, 0x10, 0x6e, 0x03, 0x01, 0xa2, 0x07, 0x33, 0x82, 0xb0, 0x83, 0x42, 0x97, 0x11, 0x95, 0x08, 0x41, 0x52, 0xec, 0x98, 0x04, 0x78, 0x08, 0xa1, 0xe6, 0x0e, 0x59, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x01, 0x00, 0x03, 0x22, 0x4a, 0xff, 0xfa, 0x10, 0xc4, 0x7c, 0xf1, 0xf6, 0x03, 0xc3, 0xa4, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x81, 0x83, 0xe4, 0x01, 0xaf, 0x64, 0x48, 0x71, 0x42, 0x00, 0x58, 0x26, 0x05, 0x42, 0xfc, 0x68, 0xc9, 0x08, 0x27, 0x8b, 0x61, 0x8d, 0x3a, 0x60, 0x02, 0x22, 0x82, 0x49, 0xa7, 0xc1, 0x34, 0xfa, 0x35, 0x62, 0x69, 0xc8, 0x05, 0x1c, 0xd4, 0xb8, 0xc1, 0xa4, 0x4b, 0x4e, 0x14, 0xda, 0x48, 0xf3, 0xd4, 0xcc, 0xa0, 0x50, 0xc2, 0xc1, 0x4b, 0x96, 0x83, 0x89, 0x0e, 0x44, 0x61, 0x39, 0xd5, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x55, 0x79, 0x55, 0xf8, 0x61, 0x07, 0x19, 0x66, 0x11, 0x01, 0x24, 0x74, 0xd8, 0x6d, 0xe7, 0x06, 0x10, 0x18, 0xdc, 0xff, 0xfa, 0x10, 0xc4, 0xc0, 0x58, 0xfb, 0x03, 0xc5, 0x64, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x72, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x0a, 0x60, 0xc0, 0x01, 0x75, 0x11, 0xbd, 0x2d, 0x10, 0xbf, 0x34, 0x52, 0x0e, 0x04, 0x32, 0x23, 0xd8, 0x33, 0x0b, 0xf0, 0x72, 0x3d, 0xe7, 0x2d, 0x33, 0x16, 0x70, 0x0a, 0x22, 0x09, 0xc0, 0x59, 0xc2, 0xe1, 0x2a, 0x00, 0x80, 0x69, 0x25, 0x59, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x55, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x40, 0x29, 0x69, 0x8c, 0xff, 0xfa, 0x10, 0xc4, 0x3f, 0xbc, 0xfc, 0x83, 0xc5, 0x4c, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x7f, 0x83, 0xe4, 0x01, 0xaf, 0x6c, 0x40, 0x62, 0x4e, 0x3b, 0x8c, 0x21, 0x84, 0xb4, 0xe9, 0xa1, 0xa8, 0x0e, 0x08, 0x4d, 0x31, 0x88, 0x10, 0xa0, 0x02, 0xa7, 0x49, 0x52, 0x82, 0x61, 0xe1, 0x8c, 0xef, 0x4b, 0xfc, 0x14, 0x06, 0x09, 0x28, 0x62, 0xa5, 0x98, 0x25, 0x8d, 0x21, 0xb4, 0x8c, 0x82, 0x9c, 0xcb, 0xa1, 0x87, 0x0d, 0xa8, 0x73, 0x41, 0x49, 0x24, 0x35, 0x22, 0x5a, 0x91, 0x71, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x4a, 0x91, 0xff, 0xfa, 0x10, 0xc4, 0x12, 0x7a, 0xfd, 0x03, 0xc5, 0x74, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x80, 0x83, 0xe4, 0x01, 0x9f, 0x64, 0x48, 0x90, 0x44, 0x86, 0x49, 0x29, 0x08, 0xc7, 0x24, 0xc2, 0x86, 0x71, 0x80, 0x30, 0xa1, 0x42, 0x20, 0x48, 0x2a, 0x39, 0xa2, 0x42, 0x59, 0x95, 0x99, 0x49, 0xba, 0x60, 0x3c, 0x4a, 0x2e, 0x20, 0x20, 0xc3, 0xc4, 0xc0, 0xe8, 0x4d, 0x8d, 0x59, 0x9b, 0x3c, 0xff, 0x54, 0x32, 0x64, 0xcc, 0x00, 0x04, 0x88, 0x4a, 0xf5, 0x34, 0x26, 0x6f, 0x3e, 0xaa, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0x87, 0xd2, 0x20, 0xc5, 0x14, 0xe2, 0xa8, 0xc2, 0x34, 0x47, 0xce, 0xab, 0x59, 0x38, 0xc4, 0x3c, 0x13, 0xcc, 0x0e, 0x80, 0xff, 0xfa, 0x10, 0xc4, 0x75, 0xcf, 0xff, 0x83, 0xc5, 0x7c, 0x1d, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xaf, 0x83, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0x50, 0xe2, 0x90, 0x28, 0x4b, 0xb8, 0x5e, 0xf1, 0xf6, 0x4e, 0x75, 0x5c, 0xa3, 0x10, 0xd4, 0x00, 0x7b, 0xcc, 0x30, 0x01, 0x58, 0xf7, 0xe0, 0x8c, 0xcc, 0x5a, 0x80, 0x40, 0xc1, 0x44, 0x00, 0xce, 0xc4, 0x04, 0x08, 0x46, 0x11, 0xc1, 0x2b, 0xba, 0xfa, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x6a, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x40, 0x10, 0x38, 0xff, 0xfa, 0x10, 0xc4, 0x12, 0xb2, 0xf5, 0x02, 0xc3, 0x88, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x7e, 0x03, 0xa4, 0x55, 0xaf, 0x6c, 0x41, 0x13, 0x44, 0x03, 0xd1, 0xd3, 0x0b, 0x70, 0xdc, 0x3d, 0x63, 0x4f, 0xb3, 0x15, 0x90, 0x2e, 0x0c, 0x09, 0x10, 0xcf, 0xa0, 0x8d, 0x0e, 0x42, 0x83, 0x16, 0xd5, 0x36, 0xd2, 0x14, 0xbf, 0xc0, 0x02, 0x4d, 0x5c, 0x0c, 0x18, 0x84, 0xdc, 0xe0, 0x11, 0xb3, 0x0f, 0x09, 0x44, 0xc7, 0x42, 0x15, 0x85, 0xd2, 0x54, 0xa9, 0x8c, 0x44, 0x6b, 0x3b, 0xda, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x01, 0x00, 0x02, 0xff, 0xfa, 0x10, 0xc4, 0x9d, 0x9d, 0xf5, 0x80, 0x03, 0xe8, 0x1d, 0x26, 0xad, 0x7b, 0x42, 0x18, 0x75, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x5f, 0x17, 0xd4, 0x42, 0x40, 0x11, 0x0c, 0xc0, 0x84, 0x60, 0x8c, 0xf8, 0xe1, 0x5c, 0xe7, 0xea, 0x06, 0x19, 0x4c, 0x86, 0x68, 0xa8, 0x94, 0x89, 0x34, 0xea, 0x35, 0x56, 0x8a, 0x2f, 0x0e, 0x91, 0x0a, 0x42, 0x30, 0x19, 0x18, 0x73, 0x34, 0x18, 0x6c, 0x38, 0xbb, 0xcc, 0x21, 0xc0, 0x28, 0x34, 0x7f, 0x4b, 0x84, 0xcf, 0x28, 0xa1, 0x44, 0xb5, 0x4c, 0x41, 0x4d, 0x45, 0x01, 0x00, 0x03, 0xe0, 0x8d, 0xe0, 0x03, 0x8d, 0x7f, 0xcc, 0x1b, 0x85, 0xd0, 0xe2, 0xda, 0x03, 0x4f, 0x69, 0xa8, 0xcb, 0x47, 0xff, 0xfa, 0x10, 0xc4, 0x5b, 0x34, 0xfc, 0x03, 0xc3, 0x90, 0x1d, 0x26, 0x0c, 0xfb, 0x82, 0x00, 0xb3, 0x03, 0xa4, 0x41, 0x9f, 0x60, 0x48, 0xcc, 0x30, 0x28, 0xb3, 0x05, 0xdb, 0x41, 0xc1, 0xe4, 0xb9, 0xf5, 0x5e, 0xe4, 0xd4, 0x03, 0x26, 0x74, 0x34, 0x61, 0x36, 0x1a, 0xe7, 0x71, 0x69, 0xb0, 0x62, 0x4e, 0x07, 0x26, 0x08, 0x00, 0x1c, 0x73, 0x38, 0x24, 0x25, 0xec, 0x2f, 0xb8, 0xd5, 0x2c, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xe5, 0x41, 0x51, 0x21, 0x42, 0xcc, 0x43, 0x43, 0x18, 0x70, 0x01, 0x99, 0xff, 0xe0, 0xc5, 0x09, 0x1a, 0x08, 0x60, 0x5f, 0x9f, 0x04, 0x0c, 0x30, 0xe9, 0x4a, 0xff, 0xfa, 0x10, 0xc4, 0xb6, 0xa6, 0xf5, 0x03, 0xc3, 0xc8, 0x1f, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x76, 0x03, 0xe4, 0x81, 0xaf, 0x6c, 0x40, 0xa8, 0x5d, 0xb5, 0x75, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xad, 0x07, 0x8b, 0x6a, 0x64, 0x88, 0x72, 0x4e, 0x61, 0x22, 0x21, 0x47, 0x5e, 0xeb, 0xce, 0x71, 0x52, 0x01, 0x8d, 0x00, 0x24, 0x40, 0x75, 0xb6, 0x9a, 0x48, 0x09, 0x1a, 0x13, 0x5a, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0xea, 0x4c, 0x41, 0x4d, 0x48, 0x52, 0xf7, 0x02, 0x0d, 0x32, 0xfa, 0x30, 0x4c, 0x15, 0xd3, 0x6a, 0x27, 0x92, 0x39, 0xff, 0xfa, 0x10, 0xc4, 0xfd, 0x72, 0xf4, 0x02, 0xc3, 0x7c, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x77, 0x03, 0xa5, 0x21, 0x9f, 0x68, 0x41, 0xa5, 0xd3, 0x11, 0x10, 0x5c, 0xae, 0xb2, 0x96, 0xa6, 0x11, 0x12, 0x0c, 0xef, 0x51, 0xe8, 0x50, 0x18, 0xc9, 0x81, 0x4a, 0x60, 0x11, 0xc8, 0x31, 0xbb, 0x9c, 0x13, 0x11, 0x82, 0x25, 0x23, 0x0b, 0x01, 0x26, 0xa2, 0x31, 0xa5, 0x09, 0x43, 0x34, 0x9f, 0x9f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x45, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x58, 0x12, 0x00, 0x41, 0x53, 0xa6, 0x32, 0xd1, 0x82, 0x88, 0xf6, 0x9b, 0x6a, 0xd3, 0xe1, 0xd3, 0xc8, 0x99, 0x11, 0xff, 0xfa, 0x10, 0xc4, 0x44, 0x16, 0xfd, 0x03, 0xc3, 0xd8, 0x1f, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0xb2, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x61, 0x86, 0x87, 0x81, 0x80, 0xc3, 0x01, 0x0b, 0xd6, 0x44, 0xe1, 0x20, 0xec, 0x30, 0xa0, 0x60, 0x01, 0x0d, 0x9a, 0x4c, 0x1b, 0x84, 0x58, 0xe3, 0x49, 0x88, 0x4f, 0x71, 0x28, 0xcb, 0x44, 0xcc, 0x28, 0x10, 0xb9, 0x08, 0xae, 0x89, 0x83, 0xc5, 0x76, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7a, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x64, 0xc9, 0xe0, 0x1c, 0xb9, 0xd8, 0x31, 0x85, 0x18, 0x40, 0x9e, 0x0b, 0x98, 0xa1, 0x89, 0xb8, 0x0e, 0x98, 0xff, 0xfa, 0x10, 0xc4, 0x87, 0x85, 0xf4, 0x83, 0xc3, 0xc8, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0x71, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x21, 0x00, 0x21, 0xcc, 0x60, 0xd0, 0xa1, 0x31, 0x06, 0xd1, 0xa2, 0xa2, 0x60, 0xa0, 0x38, 0x21, 0x53, 0xac, 0xa3, 0x0a, 0x00, 0x8d, 0x3c, 0x06, 0x36, 0x03, 0x99, 0x05, 0x47, 0x8f, 0xc1, 0x81, 0x44, 0x26, 0x25, 0x2a, 0x1c, 0x90, 0x86, 0xab, 0x3f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xef, 0x4d, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x48, 0x62, 0xee, 0x98, 0x23, 0x1a, 0xd5, 0x98, 0x36, 0x88, 0xe1, 0xc5, 0x4b, 0x24, 0x9e, 0xff, 0xfa, 0x10, 0xc4, 0xa2, 0x22, 0xf4, 0x83, 0xc3, 0xc8, 0x1d, 0x26, 0xad, 0x7b, 0x42, 0x18, 0x71, 0x83, 0xa4, 0xc5, 0xaf, 0x68, 0x43, 0xaa, 0x31, 0x91, 0x02, 0xa7, 0x13, 0x75, 0x53, 0x24, 0xbd, 0x1e, 0x2e, 0x9e, 0xe2, 0x5e, 0x97, 0x98, 0x28, 0x44, 0xc0, 0xc0, 0x30, 0x30, 0x17, 0x93, 0x4f, 0x48, 0x22, 0x3d, 0x99, 0xcc, 0x19, 0x45, 0x13, 0x6c, 0xea, 0x45, 0x35, 0x89, 0xa4, 0xce, 0xf7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe9, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x4e, 0x62, 0x00, 0x65, 0x43, 0xff, 0xfa, 0x10, 0xc4, 0x67, 0xbe, 0xfd, 0x80, 0x04, 0x08, 0x1d, 0x24, 0xac, 0xfb, 0x62, 0x18, 0xb1, 0x03, 0xa4, 0x81, 0x9f, 0x60, 0x48, 0x02, 0x3a, 0x66, 0x01, 0x03, 0x8c, 0x64, 0x75, 0x37, 0xa6, 0x51, 0x04, 0x60, 0x24, 0xa0, 0x40, 0x64, 0x34, 0x42, 0xf4, 0x98, 0x28, 0x67, 0x93, 0xf0, 0xc0, 0x82, 0x56, 0x00, 0x2e, 0x39, 0x8d, 0x71, 0x82, 0x18, 0x95, 0x9b, 0x02, 0xb4, 0x61, 0xc7, 0xa8, 0x98, 0xb0, 0x91, 0x81, 0x80, 0xa2, 0xba, 0x74, 0x29, 0xb9, 0x11, 0x84, 0xe2, 0xca, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0x84, 0x2b, 0xb0, 0x31, 0x0f, 0xbd, 0x30, 0x7e, 0x0d, 0xc3, 0x9b, 0xc4, 0xf9, 0x3f, 0x43, 0x83, 0xff, 0xfa, 0x10, 0xc4, 0x90, 0xc4, 0xff, 0x83, 0xc5, 0x78, 0x1f, 0x20, 0x0d, 0x7b, 0x02, 0x40, 0xae, 0x83, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0x33, 0x0d, 0x30, 0xc0, 0x22, 0xe0, 0x22, 0x7a, 0x36, 0x0f, 0x0d, 0xdb, 0x4e, 0xa1, 0xe0, 0x4a, 0xd1, 0x17, 0xb0, 0xc3, 0x04, 0x04, 0xcf, 0x82, 0xc3, 0x50, 0x0c, 0x5a, 0xe0, 0x60, 0xa0, 0x3b, 0x8c, 0x1a, 0x31, 0x80, 0x12, 0x8d, 0xc0, 0xb8, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbf, 0xd1, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x48, 0x92, 0xff, 0xfa, 0x10, 0xc4, 0xe1, 0xa8, 0xfa, 0x83, 0xc3, 0x88, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xa9, 0x83, 0xe4, 0x81, 0xaf, 0x6c, 0x40, 0xe3, 0x19, 0x40, 0x9d, 0x2e, 0x18, 0x4c, 0x07, 0x19, 0xdb, 0x62, 0x8b, 0x1c, 0x80, 0x66, 0x0e, 0x39, 0x0b, 0x03, 0x14, 0x31, 0x2d, 0x50, 0x9c, 0x34, 0x19, 0xb2, 0x90, 0xc5, 0xd5, 0x02, 0x94, 0x68, 0x6a, 0x60, 0xb2, 0x29, 0x06, 0xed, 0xae, 0x5e, 0x76, 0x2a, 0xc6, 0x30, 0x1a, 0xb1, 0x9d, 0x95, 0x36, 0x48, 0x92, 0x23, 0xe9, 0xee, 0x2a, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x2e, 0xa8, 0x84, 0xff, 0xfa, 0x10, 0xc4, 0x0f, 0xb7, 0xfc, 0x83, 0xc3, 0xf4, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0xac, 0x03, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0x40, 0x81, 0x08, 0x27, 0x01, 0x80, 0xf1, 0x03, 0x99, 0xe6, 0x58, 0x11, 0xa5, 0x4b, 0x85, 0xca, 0x04, 0x20, 0xa9, 0x76, 0x4c, 0x02, 0x3c, 0x04, 0x4c, 0xf3, 0x26, 0xeb, 0x4a, 0x4f, 0xf1, 0x82, 0x81, 0xbb, 0x98, 0x0f, 0x0a, 0x71, 0x9d, 0x73, 0xa2, 0x9c, 0xeb, 0x86, 0x1c, 0xa8, 0x10, 0x02, 0x60, 0x27, 0xda, 0xb0, 0x13, 0x3f, 0xa3, 0xe5, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x55, 0x80, 0xd3, 0x90, 0x0a, 0x39, 0xa9, 0x71, 0x83, 0x38, 0x98, 0x9c, 0x1b, 0xb5, 0x41, 0xe5, 0xaa, 0x99, 0x40, 0xff, 0xfa, 0x10, 0xc4, 0xad, 0x5c, 0xfc, 0x03, 0xc3, 0xb4, 0x1d, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0xb0, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0xa1, 0x84, 0x82, 0x97, 0x2d, 0x07, 0x12, 0x1c, 0x88, 0xc2, 0x73, 0xad, 0xea, 0x67, 0x83, 0x90, 0x3a, 0xd3, 0x30, 0x9e, 0x0b, 0x23, 0xbd, 0x44, 0x17, 0x31, 0x2e, 0x02, 0xc3, 0x04, 0x30, 0x0b, 0x39, 0x10, 0x10, 0x04, 0x05, 0xa0, 0x80, 0x69, 0x76, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x72, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x40, 0x10, 0x60, 0xff, 0xfa, 0x10, 0xc4, 0x65, 0xa9, 0xfa, 0x83, 0xc3, 0x94, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xa9, 0x83, 0xe4, 0xc1, 0xaf, 0x68, 0x40, 0x90, 0x86, 0x47, 0x08, 0x29, 0x86, 0xf8, 0x3c, 0x1f, 0xf0, 0x97, 0xc9, 0x8c, 0xf8, 0x05, 0x0f, 0x05, 0xe1, 0xec, 0x21, 0x52, 0x92, 0x10, 0x81, 0x28, 0xe5, 0x5b, 0x69, 0x8a, 0x80, 0x63, 0x08, 0x33, 0x6e, 0x43, 0x07, 0x41, 0x01, 0x39, 0x36, 0x5b, 0x23, 0xe4, 0x3d, 0x32, 0x70, 0x04, 0xd8, 0x6a, 0x8a, 0x52, 0x95, 0x43, 0xc4, 0xb6, 0x6a, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x2f, 0xf0, 0x50, 0x18, 0x14, 0xa1, 0x8c, 0x92, 0x60, 0x9e, 0x34, 0x06, 0xd7, 0xf1, 0xee, 0x73, 0xee, 0x86, 0xff, 0xfa, 0x10, 0xc4, 0x29, 0xf0, 0xf5, 0x82, 0xc3, 0xb8, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x7a, 0x83, 0xa5, 0x21, 0x9f, 0x6c, 0x41, 0x20, 0x32, 0xa1, 0xcd, 0xea, 0x49, 0x21, 0xa9, 0x12, 0xd4, 0x8b, 0x88, 0x62, 0x2a, 0x18, 0x81, 0x09, 0x63, 0x10, 0xe9, 0x06, 0x98, 0x18, 0xd9, 0x30, 0x5c, 0x26, 0xc2, 0x80, 0x58, 0x05, 0x25, 0x01, 0xe4, 0x42, 0x14, 0x06, 0x57, 0xf4, 0x1c, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbb, 0xd1, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x78, 0x94, 0x5c, 0x40, 0x41, 0x87, 0x99, 0x81, 0xc0, 0x9d, 0x1a, 0xa2, 0x36, 0xd9, 0xfa, 0xac, 0xff, 0xfa, 0x10, 0xc4, 0xaa, 0xaf, 0xfb, 0x03, 0xc3, 0x80, 0x1d, 0x26, 0x0c, 0x7b, 0x62, 0x00, 0xaf, 0x83, 0xa4, 0x41, 0x9f, 0x60, 0x48, 0x64, 0xca, 0x98, 0x00, 0x09, 0x10, 0x95, 0xea, 0x58, 0x4c, 0xde, 0x7f, 0x90, 0xfa, 0xa4, 0x30, 0x41, 0x36, 0xa4, 0x30, 0x77, 0x0f, 0xf3, 0x94, 0x75, 0xa0, 0x3e, 0x94, 0x53, 0x2e, 0x10, 0x30, 0xb0, 0x52, 0xe1, 0xa1, 0x21, 0x1e, 0xc7, 0x88, 0x2c, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xf5, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x5a, 0xa8, 0xc4, 0x2d, 0x01, 0xee, 0x09, 0x86, 0x00, 0x30, 0x1e, 0xf9, 0x13, 0x89, 0x8b, 0x40, 0x0a, 0x18, 0x28, 0x80, 0xff, 0xfa, 0x10, 0xc4, 0x9f, 0x4b, 0xf4, 0x03, 0xc3, 0x9c, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x71, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x19, 0xe0, 0x80, 0x81, 0x08, 0xc2, 0x38, 0x25, 0x2f, 0xaf, 0xa2, 0xe3, 0x02, 0x89, 0x35, 0xc2, 0x3f, 0xa3, 0x30, 0xd5, 0x0f, 0x43, 0xf5, 0x55, 0x7b, 0x31, 0x8f, 0x03, 0x60, 0x80, 0xb1, 0x09, 0x58, 0x8c, 0x63, 0xa2, 0x11, 0x1c, 0x5c, 0x54, 0xdb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x15, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x01, 0x00, 0x02, 0xff, 0xfa, 0x10, 0xc4, 0xc8, 0xad, 0xf4, 0x83, 0xc3, 0xd4, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0x70, 0x03, 0xe5, 0x01, 0x9f, 0x68, 0x40, 0x42, 0x97, 0xf8, 0x00, 0x49, 0xab, 0x81, 0x83, 0x20, 0x99, 0x9c, 0x11, 0x35, 0xb1, 0xe2, 0x27, 0x98, 0xf8, 0x42, 0xb0, 0xba, 0x4a, 0x95, 0x31, 0x88, 0x8d, 0x67, 0x56, 0x4b, 0xe2, 0xfa, 0x88, 0x48, 0x01, 0x21, 0x98, 0x12, 0x8c, 0x01, 0xa0, 0xac, 0x27, 0x9d, 0x3d, 0x40, 0x82, 0xe9, 0xb8, 0xd0, 0x15, 0x12, 0x7c, 0x93, 0x4e, 0xa3, 0xe2, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x52, 0x92, 0x50, 0x43, 0xff, 0xfa, 0x10, 0xc4, 0xfe, 0x06, 0xfc, 0x03, 0xc3, 0xac, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xb1, 0x03, 0xa4, 0x81, 0x9f, 0x60, 0x48, 0xa5, 0xc2, 0x94, 0x0c, 0x07, 0x47, 0x0c, 0xcd, 0xf6, 0x69, 0x0d, 0x2e, 0x0c, 0xc2, 0x09, 0x0c, 0x04, 0x24, 0xbe, 0x68, 0x40, 0x90, 0x65, 0x0c, 0x92, 0x6e, 0xbe, 0x09, 0xa6, 0x08, 0x20, 0xd1, 0xd4, 0xc1, 0x68, 0x51, 0x8d, 0xe0, 0x9c, 0x84, 0xee, 0x16, 0x8c, 0x8c, 0x5c, 0xc2, 0x01, 0x8b, 0x90, 0x8a, 0x69, 0x10, 0x44, 0x77, 0x3f, 0xca, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x6c, 0xe8, 0x50, 0x66, 0x82, 0x79, 0x48, 0x61, 0x66, 0x20, 0x67, 0xa2, 0xab, 0x68, 0x62, 0xa6, 0x08, 0xe6, 0xff, 0xfa, 0x10, 0xc4, 0x9d, 0xf6, 0xf4, 0x83, 0xc3, 0xcc, 0x1f, 0x20, 0x0d, 0x7b, 0x02, 0x40, 0x6f, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0x09, 0x80, 0x28, 0x79, 0x68, 0x04, 0xa0, 0xc0, 0x08, 0x42, 0x36, 0x4a, 0x4d, 0xa7, 0x48, 0xb0, 0x43, 0x50, 0x8b, 0xdc, 0x61, 0x80, 0x03, 0x67, 0xc1, 0x02, 0x14, 0x44, 0x5a, 0xa1, 0x81, 0x46, 0x74, 0x00, 0x50, 0x43, 0xa1, 0x2c, 0x09, 0xac, 0xdd, 0xd7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd3, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x41, 0xe2, 0xe2, 0x99, 0x22, 0x1c, 0xd2, 0x98, 0x49, 0x88, 0x11, 0xd8, 0x6a, 0xe2, 0x9c, 0x5c, 0xff, 0xfa, 0x10, 0xc4, 0xf9, 0xcb, 0xfc, 0x03, 0xc3, 0xac, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0xb2, 0x03, 0xa4, 0x41, 0xaf, 0x64, 0x48, 0x7c, 0x02, 0x36, 0x8f, 0x02, 0xd6, 0x5a, 0x47, 0x21, 0x34, 0x68, 0x4d, 0x6b, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa1, 0x21, 0x4b, 0xdc, 0x08, 0x34, 0xcd, 0xe0, 0xc1, 0x38, 0x56, 0x4d, 0xb1, 0x9e, 0x08, 0xe7, 0x97, 0x4c, 0x4c, 0x41, 0x72, 0xbb, 0xc9, 0xd2, 0x98, 0x44, 0x48, 0x33, 0xbd, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x47, 0xa1, 0x40, 0x62, 0xff, 0xfa, 0x10, 0xc4, 0xb6, 0xc2, 0xfb, 0x03, 0xc3, 0x8c, 0x1f, 0x28, 0x0c, 0xfb, 0x42, 0x00, 0xac, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0xa6, 0x04, 0x54, 0xcc, 0x00, 0xc7, 0x18, 0xc8, 0x1e, 0x6f, 0xcc, 0x66, 0x08, 0xb0, 0x46, 0x16, 0x02, 0x4d, 0x44, 0x09, 0xa5, 0x09, 0x43, 0x34, 0x9f, 0x8b, 0xc4, 0x80, 0x00, 0x84, 0xa9, 0x84, 0x98, 0x60, 0x82, 0x35, 0xe6, 0xb9, 0x72, 0x7c, 0x71, 0x6f, 0xa6, 0x30, 0x44, 0x60, 0xe1, 0x65, 0xc7, 0x2f, 0x42, 0x15, 0x93, 0x2e, 0x48, 0x3a, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0x86, 0x12, 0x1c, 0xc2, 0x20, 0xdf, 0xd4, 0xc2, 0x00, 0x51, 0x8e, 0x7d, 0x1c, 0x64, 0xff, 0x17, 0x0c, 0xdc, 0x6c, 0xff, 0xfa, 0x10, 0xc4, 0xcc, 0x12, 0xfd, 0x03, 0xc3, 0xd0, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xb3, 0x03, 0xe4, 0x01, 0x9f, 0x60, 0x48, 0xc4, 0x02, 0x8b, 0x28, 0x5c, 0xb2, 0xfe, 0x0f, 0x1d, 0xcf, 0xf1, 0xb6, 0x4e, 0x00, 0xe5, 0xce, 0xa2, 0x0c, 0x28, 0xc2, 0x2c, 0xf0, 0x3c, 0xd0, 0x8c, 0x4d, 0x40, 0x84, 0xc1, 0x08, 0x01, 0x0e, 0x63, 0x06, 0x85, 0x09, 0x88, 0x36, 0x84, 0x15, 0x3f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x6a, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0x4c, 0x14, 0x07, 0x04, 0x2a, 0x76, 0x90, 0x61, 0x44, 0x10, 0xa7, 0x82, 0x06, 0x64, 0x73, 0x40, 0xff, 0xfa, 0x10, 0xc4, 0xc1, 0x9a, 0xf4, 0x80, 0x03, 0xe0, 0x1d, 0x26, 0xac, 0xfb, 0x62, 0x18, 0x6e, 0x83, 0xe4, 0xc1, 0xaf, 0x68, 0x40, 0x99, 0x12, 0x08, 0x20, 0x30, 0x84, 0xc4, 0xa5, 0x43, 0x92, 0x34, 0xd5, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe9, 0x40, 0x31, 0x6c, 0x4c, 0x72, 0x8d, 0xed, 0x4c, 0x20, 0x45, 0x08, 0xe7, 0xf5, 0xc4, 0x4f, 0xe5, 0x48, 0xcb, 0x02, 0x52, 0x69, 0x9e, 0xa5, 0xf2, 0x09, 0x47, 0x8e, 0xa7, 0xb9, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x4b, 0xd2, 0xf3, 0x05, 0xff, 0xfa, 0x10, 0xc4, 0xc1, 0x20, 0xf5, 0x03, 0xc3, 0xc4, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x75, 0x83, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x11, 0x30, 0xfe, 0x30, 0x34, 0x17, 0x73, 0x52, 0x08, 0x16, 0x3e, 0x19, 0xcc, 0x29, 0x45, 0x65, 0x6c, 0xe9, 0xf2, 0x9a, 0xc4, 0xd2, 0x67, 0x7a, 0x5e, 0xe1, 0x81, 0xa5, 0x45, 0x02, 0x3e, 0xa6, 0x01, 0xc4, 0xb0, 0x65, 0x19, 0x95, 0xa6, 0x65, 0x64, 0x60, 0xe6, 0xe6, 0x08, 0x20, 0x0e, 0x04, 0x1a, 0x0f, 0x22, 0x08, 0x28, 0x97, 0x83, 0xf8, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x78, 0x52, 0xff, 0xfa, 0x10, 0xc4, 0x67, 0xaf, 0xfc, 0x03, 0xc3, 0xe4, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xab, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0xd0, 0x2a, 0x69, 0x95, 0xc1, 0x82, 0x78, 0xad, 0x9b, 0x5d, 0xbc, 0x41, 0xd1, 0xb0, 0x98, 0xf0, 0xc1, 0x83, 0x82, 0x97, 0x8d, 0x0d, 0x13, 0x0c, 0x89, 0x02, 0x73, 0xb1, 0x45, 0x37, 0x01, 0x10, 0xfb, 0xf3, 0x07, 0xc0, 0xe2, 0x39, 0xaa, 0x51, 0x73, 0xf2, 0x3a, 0x33, 0x30, 0xd3, 0x0c, 0x02, 0x2e, 0x02, 0x27, 0xa5, 0x40, 0xf0, 0xdd, 0xba, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x57, 0x49, 0x32, 0x50, 0xb8, 0xbb, 0x06, 0x14, 0xe0, 0x36, 0x78, 0x9e, 0x1f, 0xc0, 0x73, 0xb8, 0x29, 0xff, 0xfa, 0x10, 0xc4, 0x5f, 0x77, 0xfb, 0x03, 0xc5, 0x78, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x71, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x10, 0x02, 0x11, 0x96, 0xed, 0x19, 0x10, 0xee, 0xe4, 0x5c, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd2, 0x5e, 0x10, 0x50, 0x46, 0x70, 0xa7, 0x9c, 0x86, 0x16, 0x41, 0xfa, 0x7a, 0x38, 0xb4, 0x86, 0x2a, 0x00, 0x76, 0x0a, 0x08, 0xe1, 0x2c, 0x23, 0x22, 0x0c, 0x88, 0xc0, 0x36, 0x1a, 0x5d, 0x55, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x48, 0x62, 0xea, 0xff, 0xfa, 0x10, 0xc4, 0x25, 0xed, 0xf5, 0x03, 0xc3, 0xac, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x7a, 0x03, 0xe4, 0x81, 0xaf, 0x6c, 0x40, 0x80, 0x4a, 0x34, 0x35, 0x30, 0x5b, 0x14, 0x43, 0x78, 0xd7, 0x1b, 0x3b, 0x55, 0x43, 0x18, 0x0d, 0x5d, 0x4e, 0xca, 0x9b, 0x26, 0x29, 0x11, 0xd4, 0xf7, 0x10, 0x89, 0xa2, 0x30, 0x42, 0x03, 0x20, 0xda, 0x06, 0x03, 0x43, 0x86, 0x67, 0x21, 0x34, 0x06, 0x89, 0x02, 0x17, 0x20, 0x15, 0x01, 0x57, 0xe9, 0x42, 0x90, 0x45, 0x0c, 0xb2, 0x65, 0x95, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x60, 0x48, 0x16, 0x30, 0x40, 0x13, 0x0c, 0xc0, 0x90, 0x60, 0x8d, 0x01, 0x21, 0x4c, 0xec, 0x6c, 0xff, 0xfa, 0x10, 0xc4, 0xea, 0xd7, 0xfc, 0x83, 0xc3, 0xb8, 0x1f, 0x24, 0x0c, 0xfb, 0x62, 0x00, 0xb2, 0x03, 0xa4, 0x81, 0x9f, 0x60, 0x48, 0x31, 0x67, 0x40, 0x01, 0x11, 0x41, 0x24, 0xd3, 0xe0, 0x9a, 0x7d, 0x1f, 0x3f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x98, 0x0d, 0x39, 0x00, 0xa3, 0x9a, 0x58, 0x18, 0x32, 0x89, 0xa9, 0xc0, 0xcb, 0x5f, 0x1e, 0x4a, 0xa9, 0x93, 0x0b, 0x18, 0x48, 0x29, 0x72, 0xd0, 0x71, 0x21, 0xc8, 0x8d, 0x27, 0x3a, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0x6f, 0x52, 0xec, 0x1c, 0x81, 0xd6, 0xa9, 0x84, 0xf0, 0x5d, 0x1d, 0xe3, 0x22, 0x09, 0x89, 0x60, 0x17, 0x18, 0x20, 0xff, 0xfa, 0x10, 0xc4, 0xc5, 0xda, 0xfb, 0x03, 0xc5, 0x60, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x73, 0x83, 0xe4, 0x81, 0x9f, 0x6c, 0x40, 0x80, 0x59, 0xc8, 0x80, 0x80, 0x17, 0xbd, 0x04, 0x03, 0x4f, 0xb7, 0xaf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa1, 0x1f, 0x84, 0x81, 0x0e, 0x88, 0x07, 0x69, 0x86, 0x08, 0x2d, 0x1e, 0xfa, 0x92, 0xd9, 0x8b, 0x48, 0x01, 0x0d, 0x04, 0xe0, 0x1c, 0xe1, 0x70, 0x92, 0x00, 0x90, 0x6a, 0x5d, 0x5b, 0x75, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x41, 0xd2, 0xdb, 0x18, 0xc3, 0x9c, 0x76, 0x18, 0x45, 0x09, 0x19, 0xd4, 0xe3, 0x31, 0x1c, 0x28, 0xff, 0xfa, 0x10, 0xc4, 0xd3, 0xbe, 0xf5, 0x03, 0xc3, 0x7c, 0x1f, 0x26, 0x0c, 0xfb, 0x42, 0x00, 0x80, 0x03, 0xe4, 0x01, 0xaf, 0x6c, 0x40, 0x92, 0x63, 0x30, 0x21, 0x40, 0x01, 0x8c, 0x25, 0x4a, 0x19, 0x0f, 0x0b, 0x67, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7a, 0x43, 0x17, 0x34, 0x1a, 0x71, 0x91, 0xe9, 0x82, 0x18, 0xb4, 0x1b, 0x08, 0xbe, 0x69, 0xc5, 0xb0, 0x18, 0x60, 0x9a, 0xf2, 0x79, 0x54, 0xb9, 0x3a, 0x48, 0x91, 0xa7, 0xb8, 0x4c, 0x41, 0x4d, 0x45, 0x43, 0x11, 0x50, 0xc3, 0x88, 0x49, 0x71, 0x10, 0x10, 0x69, 0x83, 0x15, 0x93, 0x16, 0x02, 0x6c, 0x2c, 0x05, 0x80, 0x92, 0x50, 0xff, 0xfa, 0x10, 0xc4, 0x2a, 0x50, 0xf4, 0x03, 0xc3, 0xac, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x70, 0x83, 0xa4, 0xc1, 0x8f, 0x6c, 0x40, 0x1e, 0x4c, 0x21, 0x58, 0x25, 0x7f, 0x41, 0xcf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xbd, 0x0b, 0x04, 0x32, 0x1c, 0x08, 0x88, 0xcb, 0xe9, 0x30, 0x6e, 0x22, 0x63, 0x8b, 0x6b, 0xaf, 0x3d, 0xaa, 0x43, 0x31, 0x35, 0x31, 0x41, 0x63, 0x04, 0x00, 0x07, 0x07, 0x84, 0x02, 0x0f, 0x43, 0xc2, 0xf9, 0x4c, 0x41, 0x87, 0xd3, 0x10, 0xc5, 0x1c, 0xe2, 0xb8, 0xc2, 0x24, 0x4a, 0xce, 0x9e, 0xda, 0x30, 0xe1, 0x65, 0x33, 0x1c, 0x05, 0x8c, 0x22, 0x09, 0x2c, 0xff, 0xfa, 0x10, 0xc4, 0x47, 0xb6, 0xfb, 0x83, 0xc5, 0x3c, 0x1f, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x79, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0xb9, 0x77, 0x10, 0x4e, 0x3c, 0x30, 0x9c, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0xe8, 0xe2, 0x56, 0x1c, 0x39, 0xe1, 0xd1, 0x1a, 0x60, 0x80, 0x01, 0x2c, 0x69, 0x0b, 0x07, 0x02, 0x60, 0xee, 0x00, 0x50, 0x60, 0x39, 0x80, 0x1e, 0x73, 0x20, 0x61, 0x18, 0x17, 0x2c, 0x50, 0xc0, 0xe0, 0xeb, 0xe9, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x48, 0xa2, 0xf8, 0x82, 0x96, 0x3a, 0x91, 0x30, 0x9f, 0x0a, 0xe3, 0xbd, 0x94, 0x09, 0x39, 0xff, 0xfa, 0x10, 0xc4, 0x11, 0xee, 0xf4, 0x82, 0xc3, 0x88, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x78, 0x03, 0xa4, 0x95, 0xaf, 0x6c, 0x41, 0x68, 0x80, 0x48, 0xf2, 0x1c, 0x17, 0x48, 0xf4, 0x96, 0x41, 0x91, 0xa0, 0x55, 0x37, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xee, 0x48, 0x52, 0xef, 0x18, 0x04, 0x9a, 0xb7, 0x18, 0x32, 0x09, 0x79, 0xc1, 0xf3, 0x50, 0x1e, 0x42, 0x69, 0x8f, 0x84, 0x2b, 0xd7, 0x49, 0x52, 0xa4, 0x31, 0x11, 0x8c, 0xea, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x4b, 0xe2, 0xfa, 0x85, 0xc8, 0x01, 0x21, 0x98, 0x13, 0x8b, 0xf1, 0xa2, 0x2c, 0x23, 0x9d, 0x9d, 0x40, 0x42, 0xff, 0xfa, 0x10, 0xc4, 0xbb, 0x87, 0xfb, 0x03, 0xc5, 0x50, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x75, 0x83, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0xe9, 0xb8, 0xd0, 0x14, 0x92, 0x7c, 0x93, 0x4e, 0x9e, 0xe7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd2, 0x84, 0xa9, 0x21, 0x0c, 0x3a, 0x7c, 0x19, 0x80, 0xc0, 0x84, 0x81, 0xcc, 0xed, 0x6c, 0x10, 0xd4, 0x66, 0x4c, 0x30, 0xb0, 0xc1, 0x43, 0x4b, 0x7e, 0x2c, 0x08, 0x34, 0x06, 0x4c, 0xf1, 0x26, 0x5a, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0x7c, 0x13, 0x4c, 0x18, 0x51, 0x9f, 0xb9, 0x82, 0xc0, 0xa5, 0x1b, 0xb1, 0x39, 0xa9, 0xda, 0x2e, 0x19, 0x18, 0xc9, 0xff, 0xfa, 0x10, 0xc4, 0x4f, 0x97, 0xfc, 0x83, 0xc5, 0x98, 0x1f, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0x76, 0x83, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x84, 0x03, 0x17, 0x21, 0x05, 0xd2, 0x20, 0x88, 0xfe, 0x7f, 0x9f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x43, 0xe6, 0x9a, 0x80, 0x23, 0xce, 0x67, 0x8c, 0x26, 0x43, 0x9c, 0xed, 0xa5, 0x49, 0x0c, 0x48, 0xc0, 0xf4, 0xc0, 0xf8, 0x03, 0x8e, 0x67, 0x04, 0x84, 0xba, 0x88, 0x0b, 0x1a, 0xc5, 0x8d, 0xd5, 0x4e, 0x91, 0xa0, 0x87, 0xa1, 0x1f, 0xb8, 0xc3, 0x00, 0x02, 0x4f, 0x82, 0x82, 0xd0, 0x98, 0xb5, 0x02, 0x02, 0x8c, 0xe8, 0x00, 0xa0, 0x87, 0x42, 0x58, 0xff, 0xfa, 0x10, 0xc4, 0x53, 0x7b, 0xfb, 0x03, 0xc5, 0x74, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x71, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x13, 0xa3, 0x75, 0x9f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xa1, 0x07, 0x8b, 0x8a, 0x64, 0x84, 0x73, 0x46, 0x61, 0x26, 0x1f, 0x87, 0x64, 0x6b, 0x46, 0x71, 0x91, 0xd0, 0x18, 0xda, 0x3c, 0x0b, 0x59, 0x69, 0x1c, 0x84, 0xd1, 0xa1, 0x15, 0xaf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x85, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x48, 0x52, 0xeb, 0xff, 0xfa, 0x10, 0xc4, 0x20, 0x53, 0xfd, 0x83, 0xc5, 0x8c, 0x1d, 0x22, 0x0d, 0x7b, 0x22, 0x40, 0x82, 0x03, 0xe4, 0x01, 0xaf, 0x6c, 0x40, 0x01, 0x0d, 0x33, 0x78, 0x30, 0x50, 0x15, 0x73, 0x6e, 0xa7, 0x72, 0x3a, 0x25, 0xb3, 0x13, 0x0e, 0x58, 0x57, 0x79, 0x3a, 0x53, 0x08, 0x89, 0x06, 0x77, 0xa5, 0xee, 0x11, 0x09, 0x11, 0xa0, 0x0b, 0x63, 0x30, 0x0d, 0x20, 0xa3, 0x2a, 0x7b, 0x19, 0x32, 0xb9, 0x82, 0x02, 0xb0, 0xa0, 0x32, 0x48, 0x14, 0x03, 0x11, 0x00, 0x94, 0x3d, 0x41, 0xfc, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x64, 0x29, 0x38, 0xff, 0xfa, 0x10, 0xc4, 0x73, 0x57, 0xfe, 0x83, 0xc5, 0x8c, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x89, 0x83, 0xe3, 0xc1, 0xaf, 0xe4, 0x48, 0x23, 0x1e, 0x60, 0x20, 0x18, 0x19, 0x8b, 0xc9, 0xa7, 0x3c, 0x12, 0x1f, 0x1d, 0x66, 0x40, 0xe1, 0x81, 0x0a, 0x80, 0x34, 0x90, 0x4d, 0x72, 0x69, 0x13, 0x9d, 0x88, 0x29, 0x98, 0x00, 0x43, 0x5e, 0xd3, 0x06, 0xc1, 0x1e, 0x38, 0xa0, 0x65, 0x13, 0xd6, 0x4e, 0x32, 0xb1, 0x33, 0x0a, 0x04, 0x2e, 0x42, 0x0f, 0xa4, 0x20, 0xf1, 0x7c, 0xff, 0x15, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x75, 0x95, 0x00, 0x71, 0x67, 0x10, 0xc6, 0x11, 0x60, 0xfa, 0x75, 0x12, 0x60, 0x07, 0x0b, 0x0d, 0x98, 0xff, 0xfa, 0x10, 0xc4, 0xc2, 0x0a, 0xfa, 0x83, 0xc5, 0x60, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x6e, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0xd8, 0x04, 0x60, 0xd0, 0x09, 0x75, 0xd1, 0xb1, 0x2d, 0xd2, 0x00, 0xe7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd0, 0x8a, 0x21, 0x81, 0x03, 0xa1, 0x3d, 0xca, 0x30, 0xbc, 0x09, 0x13, 0xdc, 0xc3, 0x72, 0x31, 0x64, 0x01, 0x52, 0x60, 0x9a, 0x07, 0x90, 0x40, 0x02, 0xa0, 0x47, 0x44, 0x81, 0xb5, 0x74, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x40, 0x31, 0x69, 0x4c, 0x72, 0x8e, 0x0c, 0xcc, 0x20, 0xc4, 0xf0, 0xe8, 0x6d, 0xbd, 0x4f, 0xf5, 0x48, 0xcb, 0xff, 0xfa, 0x10, 0xc4, 0xc4, 0xdd, 0xfc, 0x02, 0xc5, 0x3c, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x7f, 0x83, 0xa4, 0x55, 0xaf, 0x6c, 0x41, 0x02, 0x51, 0x69, 0xa2, 0xa5, 0xf2, 0x09, 0x47, 0x8e, 0xa7, 0xb9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf4, 0xa0, 0xb8, 0x20, 0xd0, 0xc0, 0x43, 0xa6, 0x36, 0xd1, 0x82, 0x80, 0xf5, 0x9b, 0x72, 0x53, 0xb1, 0xd0, 0x46, 0x98, 0x89, 0x12, 0x1d, 0x58, 0xf2, 0x06, 0x97, 0xa8, 0x89, 0xc6, 0x42, 0xb5, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x4e, 0x61, 0xc0, 0x65, 0x83, 0x03, 0x34, 0xc0, 0x03, 0x90, 0x63, 0x45, 0x38, 0x66, 0x31, 0x08, 0x05, 0x25, 0x02, 0x03, 0xff, 0xfa, 0x10, 0xc4, 0x7a, 0x73, 0xfc, 0x83, 0xc5, 0x74, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x7a, 0x03, 0xe4, 0x81, 0x9f, 0x60, 0x48, 0x21, 0xa2, 0x17, 0xa5, 0x01, 0x43, 0x3c, 0x9d, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x77, 0xa0, 0x04, 0x00, 0x0f, 0x0a, 0x64, 0x05, 0x4d, 0x32, 0xba, 0x30, 0x4d, 0x15, 0xf3, 0x69, 0x77, 0x98, 0x39, 0xf6, 0x33, 0x1c, 0x1a, 0x30, 0x70, 0x52, 0xf1, 0xa1, 0xa2, 0x3f, 0x91, 0x20, 0x4e, 0x2c, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x81, 0x13, 0x0c, 0xc6, 0xff, 0xfa, 0x10, 0xc4, 0x71, 0x43, 0xff, 0x83, 0xc5, 0x78, 0x1d, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xaf, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0x08, 0xe5, 0x9c, 0xc2, 0x40, 0x43, 0x0e, 0xbc, 0x17, 0xc4, 0xc4, 0x58, 0x12, 0x8c, 0x0f, 0x40, 0x54, 0xe4, 0x80, 0x20, 0x0b, 0xbe, 0x5e, 0xc1, 0xb5, 0xdb, 0x52, 0xe2, 0x20, 0x48, 0xa3, 0x0d, 0xb0, 0xc3, 0x04, 0x0c, 0x4f, 0x81, 0xc5, 0xc0, 0xc5, 0xac, 0x01, 0x40, 0x41, 0x40, 0x77, 0x18, 0x34, 0x62, 0x80, 0x21, 0x1b, 0x38, 0xa9, 0xba, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x2f, 0x08, 0x28, 0x23, 0x38, 0x33, 0xce, 0x23, 0x0b, 0x20, 0xf5, 0x3d, 0x2e, 0x57, 0xa3, 0x15, 0x20, 0x37, 0xff, 0xfa, 0x10, 0xc4, 0x9f, 0x65, 0xf4, 0x83, 0xc3, 0x8c, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x7a, 0x83, 0xe4, 0x41, 0xaf, 0x6c, 0x40, 0x07, 0x04, 0x70, 0x96, 0x11, 0x91, 0x06, 0x44, 0x60, 0x1b, 0x0d, 0x2e, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x84, 0x25, 0x02, 0x01, 0x30, 0xd0, 0x36, 0x3f, 0x30, 0x6d, 0x17, 0x23, 0x8c, 0x38, 0x05, 0x3d, 0x76, 0x43, 0x22, 0x11, 0x4e, 0xa6, 0xb2, 0x91, 0x48, 0x3a, 0x3c, 0x95, 0x3d, 0xca, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x01, 0x00, 0x02, 0xff, 0xfa, 0x10, 0xc4, 0x6c, 0x2d, 0xf4, 0x83, 0xc3, 0xa0, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x75, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x5e, 0x97, 0xd8, 0x44, 0x44, 0x1b, 0x08, 0xc0, 0x60, 0x61, 0xcc, 0xd4, 0xa1, 0xa8, 0xdf, 0x6d, 0x0b, 0x98, 0x46, 0x75, 0xfe, 0x9e, 0x2a, 0x48, 0xa2, 0x8d, 0x12, 0xcc, 0x09, 0x02, 0xc6, 0x08, 0x02, 0x61, 0x98, 0x10, 0x0c, 0x11, 0x9e, 0xa4, 0x2d, 0x9d, 0x2d, 0x86, 0x24, 0xe8, 0x00, 0x22, 0x28, 0x25, 0xda, 0x7c, 0x13, 0x4f, 0xa3, 0xe5, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x78, 0xd1, 0xb0, 0x02, 0xff, 0xfa, 0x10, 0xc4, 0xd8, 0x3f, 0xfb, 0x83, 0xc5, 0x5c, 0x1b, 0x26, 0x0c, 0xfb, 0x82, 0x00, 0x78, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x69, 0xb1, 0xe1, 0x83, 0xa8, 0xb4, 0x9c, 0x8d, 0xbe, 0x51, 0xf2, 0xb3, 0x99, 0x80, 0xe1, 0x87, 0x85, 0x96, 0x4c, 0xb8, 0x88, 0x07, 0x1e, 0x44, 0x9c, 0xeb, 0xca, 0x9f, 0x60, 0xe4, 0x0e, 0x95, 0xcc, 0x27, 0x03, 0x08, 0xee, 0xe1, 0x1a, 0x4c, 0x4b, 0x00, 0xc8, 0xc1, 0x04, 0x02, 0xce, 0x44, 0x04, 0x00, 0xbd, 0xe5, 0xf4, 0x1a, 0x7d, 0x3a, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0x47, 0xe1, 0x20, 0x44, 0xa2, 0x05, 0xda, 0x61, 0x82, 0x09, 0xc7, 0xbf, 0x64, 0x3a, 0x02, 0xff, 0xfa, 0x10, 0xc4, 0x4a, 0x42, 0xfc, 0x02, 0xc5, 0x64, 0x1f, 0x24, 0x0c, 0xfb, 0x62, 0x00, 0x7a, 0x03, 0xa4, 0x55, 0xaf, 0x6c, 0x41, 0x2d, 0x21, 0x60, 0x9c, 0x03, 0xac, 0x2e, 0x12, 0x00, 0x12, 0x0d, 0x5d, 0x56, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd2, 0x98, 0xa8, 0x06, 0x30, 0x83, 0x37, 0x62, 0x30, 0x76, 0x0f, 0x53, 0x96, 0x25, 0x7c, 0x3e, 0x83, 0xb0, 0x12, 0x92, 0x2c, 0x34, 0x45, 0x29, 0x4a, 0xa1, 0xe2, 0x1b, 0x35, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x48, 0x62, 0xe6, 0x82, 0x4e, 0x32, 0x3d, 0x30, 0x45, 0x16, 0x43, 0x63, 0x77, 0xbf, 0x38, 0xf6, 0xff, 0xfa, 0x10, 0xc4, 0x08, 0xfd, 0xfd, 0x00, 0x05, 0x78, 0x1d, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x80, 0x83, 0xa4, 0xd5, 0x9f, 0x6c, 0x43, 0x03, 0x0c, 0x13, 0x5e, 0x4e, 0xaa, 0x97, 0x27, 0x49, 0x12, 0x34, 0xf7, 0x10, 0x4a, 0x2a, 0x20, 0x71, 0x08, 0xee, 0x21, 0x11, 0x06, 0x98, 0x70, 0x59, 0x20, 0x50, 0x26, 0xc4, 0x40, 0x58, 0x09, 0x25, 0x01, 0xe4, 0xc2, 0x14, 0x04, 0x57, 0xf4, 0x1d, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa1, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x5e, 0x43, 0xa0, 0xc1, 0xff, 0xfa, 0x10, 0xc4, 0xdb, 0xed, 0xf5, 0x03, 0xc3, 0xc0, 0x1d, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0x76, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x86, 0x8c, 0x7d, 0x93, 0x05, 0xa1, 0xe4, 0x37, 0x6e, 0xa5, 0x13, 0xb6, 0x8e, 0x32, 0x72, 0xd3, 0x0c, 0x0e, 0x03, 0x00, 0x06, 0x03, 0x97, 0x40, 0x89, 0xbe, 0x47, 0xc8, 0xda, 0xa4, 0x30, 0x43, 0x36, 0xa6, 0x30, 0x75, 0x10, 0x93, 0x91, 0x95, 0xd6, 0x3e, 0x54, 0x73, 0x2e, 0x10, 0x30, 0xb0, 0x52, 0xd9, 0xa2, 0xa2, 0x3d, 0x8f, 0x12, 0x58, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x55, 0x64, 0xaa, 0x16, 0x2c, 0xa1, 0xda, 0x09, 0x85, 0x20, 0x33, 0x1e, 0x17, 0x14, 0x49, 0x89, 0xc0, 0x0a, 0x18, 0xff, 0xfa, 0x10, 0xc4, 0xef, 0x2d, 0xfc, 0x03, 0xc5, 0x90, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0x74, 0x03, 0xe4, 0x81, 0x9f, 0x6c, 0x40, 0x21, 0x80, 0x09, 0xc0, 0x40, 0xc0, 0x21, 0x3d, 0x0e, 0x09, 0xf7, 0x5d, 0x22, 0x8b, 0xe2, 0x0a, 0x58, 0xea, 0x3c, 0xc2, 0x7c, 0x29, 0x0e, 0xf9, 0xcf, 0x7c, 0xe5, 0xe1, 0xc1, 0x63, 0xe8, 0x60, 0x5d, 0x23, 0xd2, 0x59, 0x06, 0x46, 0x80, 0xd4, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbb, 0xd2, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x42, 0x49, 0x67, 0x8c, 0xff, 0xfa, 0x10, 0xc4, 0x20, 0x2e, 0xf4, 0x80, 0x03, 0xcc, 0x1d, 0x26, 0xad, 0x7b, 0x42, 0x18, 0x71, 0x03, 0xe4, 0xc1, 0xaf, 0x68, 0x40, 0x43, 0x4d, 0xde, 0x0c, 0x1e, 0x85, 0x5c, 0xe6, 0x21, 0xdd, 0x8f, 0xb1, 0x6c, 0xca, 0x43, 0x12, 0xe5, 0xb9, 0x24, 0x2a, 0x12, 0x87, 0x90, 0x67, 0x7a, 0x02, 0x00, 0x04, 0xbe, 0x2f, 0x28, 0x5c, 0x80, 0x02, 0x09, 0x81, 0x48, 0xbe, 0x1a, 0x39, 0xc1, 0xf9, 0xdf, 0xd0, 0x00, 0x2e, 0x9d, 0x8d, 0x01, 0x49, 0x27, 0xc9, 0x34, 0xea, 0x35, 0x6a, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0x4a, 0x92, 0x10, 0xc3, 0xa7, 0xc2, 0x98, 0x0c, 0x06, 0xc8, 0x24, 0xcd, 0xae, 0xc2, 0x8d, 0x2e, 0x64, 0xc2, 0x8b, 0xff, 0xfa, 0x10, 0xc4, 0x95, 0xc6, 0xf5, 0x03, 0xc3, 0xb4, 0x1f, 0x24, 0x0c, 0xfb, 0x62, 0x00, 0x78, 0x03, 0xa4, 0x81, 0x9f, 0x60, 0x48, 0x0c, 0x0c, 0x34, 0xb7, 0xe3, 0x40, 0x83, 0xc0, 0x65, 0x0f, 0x10, 0x77, 0x5e, 0xa4, 0xf7, 0x06, 0x14, 0x67, 0xee, 0x60, 0xac, 0x29, 0xc6, 0xe8, 0x0e, 0x8e, 0x76, 0x0b, 0x86, 0x42, 0x32, 0x60, 0xc0, 0xc5, 0xe0, 0x41, 0x74, 0x88, 0x22, 0x3f, 0x9f, 0xe7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd1, 0x83, 0xd5, 0xc0, 0x28, 0xe7, 0xec, 0x18, 0x40, 0x86, 0x69, 0xcf, 0x8a, 0x50, 0x1f, 0xd9, 0xa9, 0x9a, 0x05, 0x18, 0x68, 0x19, 0x70, 0xd1, 0x31, 0x1b, 0xff, 0xfa, 0x10, 0xc4, 0x31, 0x2d, 0xfa, 0x83, 0xc5, 0x5c, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0x6e, 0x03, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0xc7, 0x84, 0x2c, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf4, 0x21, 0xa8, 0xd0, 0xa2, 0x26, 0x24, 0xe1, 0x81, 0xc0, 0xe0, 0x9a, 0x00, 0x80, 0x31, 0x0b, 0x06, 0x88, 0x0a, 0x03, 0x0c, 0xf8, 0x40, 0x41, 0x86, 0x4a, 0x54, 0x43, 0x75, 0xba, 0xcf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbf, 0xd0, 0x2c, 0xd0, 0x08, 0x93, 0x54, 0xc3, 0xeb, 0x53, 0x0c, 0x91, 0x46, 0x3e, 0x93, 0x72, 0x93, 0x17, 0x90, 0x52, 0xff, 0xfa, 0x10, 0xc4, 0xd3, 0x5c, 0xfb, 0x03, 0xc3, 0x88, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xae, 0x03, 0xe4, 0x41, 0xaf, 0x64, 0x48, 0x30, 0x50, 0x00, 0x51, 0x6f, 0xa1, 0xcc, 0x46, 0x00, 0xb1, 0x45, 0xf5, 0x2e, 0xe2, 0x42, 0x97, 0x58, 0x08, 0x69, 0x9f, 0xb1, 0x82, 0x90, 0xa9, 0x9b, 0x86, 0x3b, 0x01, 0xd3, 0x2d, 0x98, 0xa8, 0x72, 0xc2, 0xbb, 0xca, 0x6a, 0x98, 0x44, 0x47, 0xb3, 0xbd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x2a, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x01, 0x00, 0x01, 0x6a, 0x82, 0xc3, 0x44, 0x2a, 0x01, 0xff, 0xfa, 0x10, 0xc4, 0xed, 0xed, 0xf5, 0x03, 0xc3, 0xf0, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0x72, 0x03, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0x9c, 0xcc, 0x06, 0x49, 0x50, 0xcd, 0x97, 0x28, 0x4c, 0xfe, 0xc0, 0x60, 0xd4, 0x18, 0x14, 0x5f, 0x82, 0x80, 0xe1, 0xe0, 0x92, 0x89, 0x68, 0x3d, 0x50, 0x10, 0x00, 0x37, 0x15, 0x64, 0x15, 0x2c, 0x09, 0xa1, 0x81, 0x38, 0xa4, 0x9a, 0x11, 0xb9, 0x61, 0xdb, 0xb6, 0x62, 0x8a, 0x01, 0x40, 0xa6, 0x1a, 0x6a, 0x2b, 0x19, 0x33, 0xca, 0x25, 0xaa, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x88, 0x29, 0x98, 0x10, 0x43, 0x5e, 0xd3, 0x06, 0xa1, 0x24, 0x38, 0x84, 0x66, 0x93, 0xd4, 0x50, 0x32, 0xb1, 0xff, 0xfa, 0x10, 0xc4, 0x21, 0xdb, 0xfc, 0x03, 0xc3, 0xb8, 0x1d, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0xb0, 0x83, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0x53, 0x0a, 0x04, 0x2e, 0x42, 0x0f, 0xa4, 0x20, 0xf1, 0x7c, 0xff, 0x1b, 0x64, 0xe0, 0x0c, 0x5c, 0xea, 0x30, 0xc2, 0x84, 0x26, 0xce, 0xfe, 0x4e, 0x70, 0xc4, 0xcc, 0x09, 0x4c, 0x10, 0x80, 0x18, 0xe6, 0x50, 0x68, 0x50, 0x10, 0x82, 0x31, 0xa0, 0x54, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0xd5, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x45, 0x10, 0xe0, 0x82, 0x21, 0x3d, 0xc8, 0x30, 0xbe, 0x08, 0x73, 0xdd, 0x63, 0x3c, 0x31, 0x64, 0x01, 0xff, 0xfa, 0x10, 0xc4, 0x57, 0x1e, 0xf5, 0x00, 0x03, 0x9c, 0x1f, 0x24, 0x0c, 0xfb, 0x62, 0x00, 0x79, 0x83, 0xa4, 0xd5, 0xaf, 0x68, 0x43, 0x12, 0xb0, 0x4f, 0x05, 0x90, 0x40, 0x02, 0xc0, 0x47, 0x44, 0x81, 0xb5, 0x74, 0x80, 0x62, 0xd2, 0x98, 0xe5, 0x1c, 0x18, 0x98, 0x42, 0x89, 0xc1, 0xd1, 0xbb, 0x6b, 0x9f, 0xfa, 0x81, 0x97, 0x03, 0xa2, 0xd3, 0x19, 0x4b, 0xe4, 0x12, 0x8f, 0x1b, 0x4f, 0x73, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe9, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x55, 0x2f, 0xe8, 0x54, 0x10, 0x20, 0xa9, 0x89, 0x94, 0x60, 0x88, 0x35, 0x26, 0xc7, 0x52, 0x40, 0x71, 0xce, 0xe6, 0xff, 0xfa, 0x10, 0xc4, 0x0f, 0x03, 0xfc, 0x83, 0xc3, 0xe8, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0xae, 0x83, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x1a, 0x34, 0x9d, 0xad, 0x49, 0x24, 0x51, 0xc8, 0x89, 0x66, 0x43, 0xd4, 0xe6, 0x1c, 0x06, 0x55, 0x30, 0x3b, 0x4c, 0x10, 0x39, 0x06, 0x2a, 0x13, 0x8a, 0x62, 0x10, 0x80, 0x92, 0x50, 0x40, 0x32, 0x1a, 0x22, 0x5a, 0x50, 0x14, 0x33, 0xc9, 0xdd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0x7a, 0x15, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x67, 0xa8, 0xc8, 0x0d, 0x26, 0x63, 0xa3, 0x18, 0x2c, 0x8c, 0xb9, 0xba, 0xdc, 0x5e, 0x1d, 0xab, 0xa9, 0xff, 0xfa, 0x10, 0xc4, 0x4d, 0x99, 0xff, 0x83, 0xc5, 0x64, 0x1d, 0x26, 0x0c, 0x7b, 0x62, 0x00, 0xb3, 0x03, 0xa4, 0x01, 0xaf, 0xe0, 0x48, 0x92, 0x10, 0x18, 0x58, 0x59, 0x68, 0xcb, 0xa0, 0x82, 0xe4, 0x4a, 0x92, 0x0e, 0xc5, 0x14, 0xdc, 0x04, 0x01, 0xbb, 0x09, 0x83, 0xd0, 0x77, 0x1c, 0xc1, 0x2a, 0x89, 0xf7, 0x1f, 0x19, 0x88, 0x79, 0x86, 0x02, 0x17, 0x01, 0x13, 0xd1, 0xe0, 0x78, 0x7e, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xef, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x52, 0xe2, 0x60, 0x47, 0xa3, 0x07, 0xd8, 0x61, 0x82, 0x07, 0xe7, 0xc0, 0x63, 0x70, 0x62, 0xd6, 0x01, 0x26, 0x0a, 0xff, 0xfa, 0x10, 0xc4, 0x5e, 0xd4, 0xfc, 0x03, 0xc3, 0xf4, 0x1f, 0x20, 0x0c, 0xfb, 0x02, 0x40, 0xaa, 0x83, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x00, 0x00, 0x77, 0x28, 0x34, 0x62, 0x80, 0x21, 0x1b, 0x10, 0xa8, 0x80, 0x10, 0x70, 0x46, 0x80, 0x67, 0x9c, 0x06, 0x16, 0x81, 0xda, 0x7a, 0x84, 0xa9, 0xc6, 0x2a, 0x80, 0x6a, 0x0e, 0x09, 0x20, 0xec, 0x23, 0x22, 0x1c, 0x44, 0x62, 0x1a, 0xed, 0x2e, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x85, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x48, 0x62, 0xea, 0x80, 0x4a, 0x34, 0xb3, 0x30, 0x5f, 0x13, 0xe3, 0x7c, 0xd6, 0xf3, 0x3b, 0xd5, 0x23, 0xff, 0xfa, 0x10, 0xc4, 0x9b, 0xae, 0xf7, 0x80, 0x04, 0x30, 0x1d, 0x20, 0xad, 0x7b, 0x62, 0x08, 0x7b, 0x03, 0xa5, 0x15, 0x9f, 0x68, 0x43, 0x1a, 0x0b, 0x57, 0x4e, 0x0a, 0x83, 0x26, 0x29, 0x11, 0xd4, 0xf3, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf4, 0x80, 0x80, 0x01, 0x4c, 0x51, 0x08, 0x44, 0x68, 0x57, 0x63, 0x01, 0x41, 0x53, 0x33, 0x14, 0x75, 0xa3, 0x71, 0x6c, 0x42, 0x49, 0x43, 0xd9, 0xba, 0xdd, 0x57, 0xc5, 0x0f, 0x68, 0x96, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x46, 0xe2, 0xa0, 0xb1, 0x12, 0x63, 0x07, 0xc4, 0xc0, 0xe8, 0x93, 0x0d, 0x5a, 0xb1, 0xbc, 0xe0, 0x2c, 0x0c, 0x64, 0xe4, 0xff, 0xfa, 0x10, 0xc4, 0x7b, 0x99, 0xfc, 0x03, 0xc3, 0xa8, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xb0, 0x83, 0xa4, 0x81, 0x9f, 0x60, 0x48, 0xc2, 0x84, 0x80, 0xc1, 0x01, 0xc1, 0xe2, 0x41, 0x04, 0xd2, 0x70, 0xbe, 0x41, 0x6a, 0x50, 0x09, 0x24, 0xd2, 0xc8, 0xc1, 0x84, 0x4f, 0x4d, 0xf7, 0x5b, 0xa0, 0xf0, 0xd6, 0x4c, 0x98, 0x58, 0xc2, 0x41, 0x4b, 0x96, 0x8a, 0x09, 0x16, 0x44, 0x69, 0x39, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa1, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x6f, 0x51, 0x1c, 0x04, 0xe1, 0xe6, 0xf9, 0x85, 0xb0, 0x75, 0x1e, 0xa5, 0x29, 0xb9, 0x8a, 0xc0, 0x1f, 0x18, 0x27, 0x80, 0xff, 0xfa, 0x10, 0xc4, 0x01, 0x7b, 0xfb, 0x83, 0xc3, 0xbc, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xac, 0x83, 0xe4, 0x81, 0x9f, 0x6c, 0x40, 0x99, 0xe1, 0x40, 0x02, 0x05, 0x42, 0x20, 0x10, 0xb7, 0xe9, 0xf4, 0x82, 0xc1, 0xc2, 0x43, 0x99, 0x02, 0xc2, 0x98, 0x6f, 0x82, 0x71, 0xff, 0xa9, 0x10, 0x81, 0x8d, 0x08, 0x38, 0x2f, 0x0f, 0x63, 0x06, 0x9c, 0x80, 0x84, 0x89, 0x53, 0x6a, 0xcd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xfd, 0x0a, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xff, 0xfa, 0x10, 0xc4, 0x49, 0x16, 0xfc, 0x03, 0xc3, 0xb0, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0xaf, 0x03, 0xa4, 0x81, 0xaf, 0x6c, 0x40, 0x4c, 0x54, 0x03, 0x18, 0x41, 0x9b, 0xb0, 0x18, 0x3c, 0x07, 0x89, 0xcb, 0xc2, 0xb0, 0x1f, 0x51, 0xc8, 0x09, 0x59, 0x16, 0x18, 0x42, 0x94, 0xa5, 0x50, 0xf0, 0xed, 0x94, 0x86, 0x2e, 0x68, 0x24, 0xe3, 0x2b, 0xb3, 0x04, 0x91, 0x62, 0x36, 0x5d, 0x7b, 0x13, 0x95, 0x5e, 0x30, 0xe1, 0x15, 0xa4, 0xea, 0xa9, 0x72, 0x74, 0x91, 0x23, 0x4f, 0x72, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x41, 0x28, 0xa8, 0x81, 0x84, 0x23, 0xff, 0xfa, 0x10, 0xc4, 0xab, 0x11, 0xfb, 0x83, 0xc3, 0xc4, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0xab, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0x38, 0x82, 0x84, 0x1a, 0x62, 0x99, 0x64, 0x86, 0x05, 0x32, 0x28, 0x58, 0x09, 0x09, 0x42, 0xb2, 0x60, 0x42, 0x80, 0x22, 0xb3, 0xe8, 0x3b, 0xa0, 0x20, 0x00, 0x6e, 0x49, 0x2e, 0x20, 0x38, 0xc5, 0xfc, 0xc0, 0xf0, 0x5c, 0x0d, 0x62, 0x9f, 0xe4, 0xe0, 0x9a, 0x0c, 0x54, 0x6c, 0xc0, 0x81, 0x10, 0x00, 0x8e, 0x69, 0x58, 0x4c, 0x97, 0x3e, 0xad, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x8d, 0xaa, 0x40, 0x08, 0x66, 0xc4, 0xc6, 0x0e, 0x62, 0x1a, 0x72, 0x02, 0xbe, 0x07, 0xc2, 0xff, 0xfa, 0x10, 0xc4, 0x5b, 0xe6, 0xfc, 0x83, 0xc3, 0xbc, 0x1d, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xb2, 0x83, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x92, 0x65, 0x82, 0x06, 0x16, 0x0a, 0x5b, 0x34, 0x54, 0x47, 0xb1, 0xe2, 0x8b, 0x0d, 0x35, 0x5a, 0xc5, 0x8c, 0x01, 0x8e, 0x61, 0x18, 0x0b, 0xc7, 0x55, 0x04, 0xc6, 0x70, 0xe0, 0x80, 0x08, 0xe6, 0x02, 0x0c, 0x17, 0x51, 0x23, 0xd1, 0xd1, 0x5f, 0xe7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd2, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x41, 0xe0, 0x80, 0x40, 0xd1, 0x1e, 0x89, 0x98, 0x5d, 0x85, 0xb1, 0xec, 0xfa, 0x1d, 0x98, 0xaf, 0x81, 0x10, 0xff, 0xfa, 0x10, 0xc4, 0xfc, 0xa6, 0xfb, 0x80, 0x05, 0x64, 0x1d, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x77, 0x83, 0xa5, 0x15, 0x9f, 0x68, 0x43, 0xb0, 0x4a, 0x84, 0x8c, 0xbe, 0xe8, 0x76, 0x18, 0x18, 0xb3, 0xa9, 0x9b, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa5, 0x00, 0xa5, 0x9e, 0x31, 0x0d, 0x37, 0x76, 0x30, 0x7a, 0x15, 0x33, 0x9a, 0x67, 0x5c, 0x3e, 0xe5, 0x93, 0x29, 0x0c, 0x4c, 0xd6, 0x64, 0x90, 0xa8, 0x4a, 0x1e, 0x3d, 0x9d, 0xed, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x2e, 0x10, 0x50, 0x38, 0xff, 0xfa, 0x10, 0xc4, 0xe1, 0x1c, 0xfd, 0x03, 0xc4, 0x04, 0x1f, 0x20, 0x0d, 0x7b, 0x62, 0x00, 0xae, 0x83, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x24, 0xf1, 0x88, 0xba, 0x60, 0x86, 0x3e, 0x86, 0xc1, 0x35, 0x5a, 0x71, 0x32, 0x06, 0x10, 0x4a, 0x48, 0x00, 0xb7, 0x10, 0x88, 0xbf, 0x24, 0xce, 0xd2, 0x2e, 0x29, 0x49, 0x08, 0x22, 0x13, 0x22, 0x0a, 0x46, 0x02, 0x23, 0x8e, 0x65, 0x77, 0x36, 0x86, 0x6f, 0x06, 0x60, 0x44, 0x80, 0x10, 0x74, 0x17, 0x42, 0x04, 0x83, 0x28, 0x64, 0x93, 0x75, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x7a, 0x93, 0xdc, 0x18, 0x51, 0x9f, 0xb9, 0x82, 0xa0, 0xa9, 0x1b, 0x8f, 0x3a, 0xc9, 0xd7, 0x2f, 0x19, 0xff, 0xfa, 0x10, 0xc4, 0xdc, 0xce, 0xfd, 0x03, 0xc3, 0xdc, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xb2, 0x03, 0xa4, 0x01, 0xaf, 0x60, 0x48, 0x08, 0xc9, 0x83, 0x03, 0x17, 0x81, 0x05, 0xd2, 0x20, 0x88, 0xfe, 0x7f, 0x90, 0xb5, 0x70, 0x06, 0x39, 0xfb, 0x46, 0x10, 0x21, 0xa6, 0x73, 0xbe, 0x98, 0x07, 0xf2, 0x6e, 0x66, 0x41, 0x46, 0x1a, 0x06, 0x5c, 0x34, 0x4c, 0x46, 0xf1, 0xe1, 0x4b, 0x1f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0x0a, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x47, 0xd1, 0xa1, 0x44, 0xcc, 0x49, 0xc3, 0x89, 0x03, 0x80, 0x68, 0x02, 0x00, 0x36, 0x10, 0x1a, 0x20, 0x18, 0xff, 0xfa, 0x10, 0xc4, 0x85, 0x3b, 0xf3, 0x03, 0xc3, 0x70, 0x1d, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x71, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x0c, 0x33, 0xe1, 0x81, 0x06, 0x15, 0x29, 0x22, 0x1b, 0x1d, 0xd6, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0xfe, 0x84, 0x56, 0x2e, 0x29, 0x92, 0x11, 0xcd, 0x09, 0x84, 0xa8, 0x77, 0x1d, 0xa5, 0xaa, 0xd9, 0xc7, 0xc6, 0xc0, 0xa3, 0x88, 0xd0, 0x2d, 0x43, 0xd2, 0xd9, 0x09, 0xa3, 0x41, 0xeb, 0x55, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x42, 0x48, 0x34, 0x01, 0xff, 0xfa, 0x10, 0xc4, 0x1f, 0x38, 0xf6, 0x00, 0x03, 0xac, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0x81, 0x03, 0xa4, 0xd5, 0x9f, 0x6c, 0x43, 0x80, 0x22, 0x65, 0x61, 0x18, 0x32, 0x0c, 0x19, 0xc1, 0x94, 0x2b, 0x1e, 0x33, 0x49, 0x8e, 0x8a, 0x2a, 0x56, 0xbc, 0x8f, 0xa8, 0x3c, 0x44, 0x9b, 0x3b, 0xd0, 0x10, 0x00, 0x27, 0x32, 0x08, 0x45, 0x49, 0x08, 0x22, 0x98, 0x05, 0x8c, 0x51, 0x93, 0xec, 0x40, 0x9a, 0x7d, 0xc3, 0x06, 0xcb, 0xe8, 0xbd, 0x14, 0x29, 0x38, 0x4a, 0x29, 0x49, 0xd5, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x01, 0x00, 0x03, 0x21, 0x4a, 0xff, 0xfa, 0x10, 0xc4, 0x4c, 0xca, 0xfa, 0x83, 0xc3, 0x8c, 0x1d, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xaa, 0x03, 0xa4, 0xc1, 0x9f, 0x70, 0x40, 0x41, 0x52, 0x20, 0x08, 0x06, 0x05, 0xa2, 0xfa, 0x69, 0x1b, 0x07, 0x07, 0x9f, 0x69, 0x8e, 0x38, 0x60, 0x42, 0xa0, 0xba, 0x48, 0x26, 0xb9, 0x34, 0xc9, 0xc5, 0xa0, 0x04, 0x7b, 0x30, 0x0a, 0x37, 0x79, 0x30, 0x7c, 0x15, 0x43, 0x99, 0x87, 0x6b, 0x3f, 0x26, 0x03, 0x33, 0x1b, 0x30, 0xe0, 0xa2, 0xca, 0x17, 0x8d, 0x09, 0x03, 0xc8, 0x73, 0xfc, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x55, 0x6d, 0x93, 0x30, 0x21, 0x73, 0xa8, 0xe3, 0x0a, 0x10, 0xa3, 0x3b, 0xed, 0x3c, 0x83, 0x13, 0x10, 0x29, 0x30, 0xff, 0xfa, 0x10, 0xc4, 0x1f, 0xdd, 0xfc, 0x03, 0xc5, 0x98, 0x1d, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0x73, 0x83, 0xe4, 0x81, 0x9f, 0x6c, 0x40, 0x42, 0x00, 0x83, 0x99, 0x41, 0x21, 0x40, 0x42, 0x08, 0xc6, 0x89, 0x53, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0x22, 0x88, 0x70, 0x41, 0x10, 0x9e, 0xe3, 0x18, 0x5f, 0x03, 0xd9, 0xee, 0xf1, 0x82, 0x18, 0xb3, 0x00, 0x69, 0x30, 0x4f, 0x82, 0xc8, 0x20, 0x01, 0x54, 0x24, 0x22, 0x44, 0x5a, 0xba, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x40, 0x31, 0x69, 0x4c, 0x72, 0x0e, 0x0c, 0x4c, 0x21, 0xc4, 0xc8, 0xe9, 0x4d, 0xae, 0x4e, 0x06, 0xff, 0xfa, 0x10, 0xc4, 0x40, 0xcc, 0xf5, 0x03, 0xc3, 0xc4, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0x76, 0x83, 0xe4, 0x81, 0xaf, 0x6c, 0x40, 0x4e, 0x31, 0x70, 0x1d, 0x18, 0x98, 0xca, 0x3d, 0x20, 0x94, 0x78, 0x6d, 0x3d, 0xcf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa5, 0x2f, 0x4b, 0x9c, 0x0c, 0x3c, 0xc5, 0xf8, 0xc0, 0xf0, 0x5b, 0xcd, 0x67, 0x9f, 0xc8, 0xff, 0x65, 0x31, 0x25, 0x16, 0x4b, 0x9c, 0x9f, 0x29, 0x5c, 0x4d, 0x26, 0x77, 0xaa, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0x4a, 0xa1, 0xc0, 0x64, 0xa6, 0x08, 0x69, 0x85, 0x87, 0x20, 0xc3, 0xda, 0x71, 0x4c, 0x1a, 0x10, 0x1a, 0x4a, 0x08, 0x04, 0xff, 0xfa, 0x10, 0xc4, 0x5a, 0xdd, 0xfb, 0x83, 0xc3, 0xac, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xac, 0x03, 0xa4, 0xc1, 0x8f, 0x6c, 0x40, 0x47, 0x04, 0x4b, 0x4a, 0x02, 0xb3, 0x39, 0x3b, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xef, 0x42, 0xf9, 0x18, 0x00, 0x09, 0x32, 0x65, 0x2b, 0x18, 0x33, 0x8e, 0xb9, 0xc1, 0x9d, 0x05, 0x1e, 0x6c, 0x59, 0x95, 0x15, 0x18, 0x78, 0x88, 0x08, 0x0c, 0x20, 0x10, 0xb7, 0xe4, 0x4d, 0x12, 0x0e, 0xd5, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x55, 0x8a, 0x29, 0xb9, 0x80, 0x01, 0xbb, 0x09, 0x83, 0xc0, 0x7a, 0x1c, 0xb7, 0x2b, 0x79, 0xf6, 0x20, 0x19, 0x88, 0xff, 0xfa, 0x10, 0xc4, 0xde, 0x9c, 0xfc, 0x03, 0xc5, 0x90, 0x1d, 0x20, 0x0d, 0x7f, 0x02, 0x40, 0x73, 0x83, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0x79, 0x86, 0x02, 0x17, 0x01, 0x09, 0x68, 0xf0, 0x3c, 0x3f, 0x6f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xab, 0xa4, 0x59, 0x1e, 0x8c, 0x17, 0x61, 0x86, 0x08, 0x25, 0x9e, 0xfe, 0x8f, 0xd1, 0x8b, 0x58, 0x06, 0x98, 0x28, 0x00, 0x01, 0xdc, 0xa0, 0x91, 0x88, 0x80, 0x3a, 0x35, 0x71, 0x53, 0x6a, 0x48, 0x92, 0xed, 0x01, 0x95, 0x3a, 0xd6, 0x30, 0x9c, 0x0c, 0x53, 0xbb, 0x04, 0x7c, 0x39, 0x40, 0xa4, 0x30, 0xf4, 0x24, 0x10, 0x4b, 0x44, 0xc9, 0x41, 0xff, 0xfa, 0x10, 0xc4, 0x67, 0x6a, 0xf5, 0x00, 0x03, 0xb0, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x7a, 0x03, 0xa4, 0xd5, 0xaf, 0x68, 0x43, 0x10,<|fim▁hole|> 0xd1, 0x08, 0x20, 0xb1, 0x90, 0x2d, 0x03, 0x02, 0x61, 0xbd, 0x34, 0x40, 0x98, 0xa3, 0xb1, 0xfc, 0xff, 0xfa, 0x10, 0xc4, 0x9f, 0xa3, 0xf5, 0x80, 0x03, 0xe8, 0x1d, 0x26, 0xad, 0x7b, 0x42, 0x18, 0x77, 0x03, 0xe4, 0x81, 0x9f, 0x6c, 0x40, 0x12, 0x79, 0x06, 0xd6, 0x7a, 0x4c, 0xa3, 0x51, 0x36, 0x19, 0x37, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x95, 0x2a, 0x2a, 0x01, 0x18, 0x2c, 0x09, 0xa2, 0x60, 0x44, 0x38, 0x06, 0x7d, 0x93, 0x2e, 0x6a, 0xd0, 0x06, 0x16, 0x4a, 0x60, 0x01, 0x05, 0xd8, 0x40, 0xf4, 0x68, 0x26, 0x63, 0x93, 0xf1, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x82, 0xd4, 0xa0, 0x12, 0x49, 0xa1, 0x91, 0x82, 0xf8, 0xa0, 0x9b, 0xdf, 0xb7, 0xe1, 0xdf, 0xac, 0x99, 0xff, 0xfa, 0x10, 0xc4, 0xae, 0x91, 0xfc, 0x03, 0xc5, 0x84, 0x1d, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0x77, 0x83, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x20, 0xb1, 0x84, 0x83, 0x97, 0x2d, 0x14, 0x12, 0x2c, 0x88, 0xe2, 0x73, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x44, 0x25, 0x58, 0xc1, 0xc4, 0x1c, 0x29, 0x98, 0x42, 0x05, 0xa1, 0xd1, 0x62, 0x10, 0x98, 0x80, 0x01, 0x61, 0x81, 0xa8, 0x04, 0x9b, 0x04, 0x5c, 0x04, 0x7b, 0x48, 0xc1, 0xe5, 0xdb, 0x41, 0x61, 0x21, 0x22, 0x4c, 0x81, 0xe1, 0x4c, 0x37, 0xc1, 0x00, 0xff, 0xec, 0x74, 0xc2, 0x0d, 0x08, 0x38, 0x2f, 0x0f, 0x63, 0x06, 0x9c, 0x70, 0x85, 0xff, 0xfa, 0x10, 0xc4, 0xa6, 0x0f, 0xfb, 0x03, 0xc5, 0x78, 0x1f, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0x6e, 0x83, 0xe4, 0xc1, 0x9f, 0x68, 0x40, 0x44, 0xac, 0x6a, 0xcd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xfd, 0x08, 0xaa, 0x5b, 0x63, 0x20, 0x53, 0x96, 0x83, 0x08, 0xe1, 0x0f, 0x3a, 0xe4, 0x60, 0x23, 0x89, 0x10, 0xcc, 0x6a, 0x00, 0x22, 0x02, 0x2d, 0x84, 0xd1, 0x40, 0x50, 0xd0, 0xa6, 0xcf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x42, 0x48, 0x62, 0xe6, 0x82, 0x4c, 0x32, 0xbb, 0x30, 0x4b, 0x15, 0xe3, 0x68, 0x37, 0xa1, 0x39, 0x95, 0xe3, 0xff, 0xfa, 0x10, 0xc4, 0xb0, 0xe0, 0xfd, 0x03, 0xc5, 0x74, 0x1d, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x7e, 0x03, 0xe4, 0x41, 0xaf, 0x6c, 0x40, 0x10, 0x11, 0x5a, 0x4e, 0xaa, 0x97, 0x26, 0x09, 0x12, 0x14, 0xf7, 0x3f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x80, 0x10, 0x00, 0x23, 0xc8, 0xa8, 0x21, 0x83, 0x23, 0x34, 0x81, 0x03, 0x92, 0x62, 0xed, 0x38, 0x46, 0x11, 0x06, 0x38, 0x44, 0x15, 0x03, 0x4d, 0x74, 0x60, 0x4b, 0x22, 0x86, 0x59, 0x32, 0xd5, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x74, 0x95, 0xff, 0xfa, 0x10, 0xc4, 0x15, 0x78, 0xfc, 0x03, 0xc5, 0x64, 0x1d, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x79, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x4c, 0x44, 0x40, 0x07, 0x53, 0x02, 0xe1, 0x42, 0x34, 0xb6, 0x70, 0x93, 0xd5, 0x60, 0xc7, 0x95, 0x00, 0x00, 0x48, 0x84, 0xd7, 0x52, 0xc2, 0x67, 0x73, 0xfc, 0x8d, 0xa8, 0x00, 0x04, 0x33, 0x62, 0x83, 0x07, 0x11, 0x13, 0x38, 0xe9, 0x60, 0xa3, 0xdf, 0x4b, 0x32, 0xc1, 0x23, 0x0b, 0x05, 0x2d, 0x9a, 0x2a, 0x22, 0x78, 0xf1, 0x45, 0x8e, 0xaa, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x6d, 0x55, 0x78, 0xff, 0xfa, 0x10, 0xc4, 0x96, 0xe1, 0xff, 0x83, 0xc5, 0x64, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0xac, 0x03, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0xb1, 0x87, 0x18, 0x26, 0x11, 0x80, 0xcc, 0x75, 0x40, 0x51, 0xa7, 0x0c, 0x0a, 0x00, 0x8d, 0xe0, 0x20, 0xc1, 0x75, 0x12, 0x3d, 0x1d, 0x14, 0x9e, 0x65, 0xc6, 0x07, 0x09, 0x01, 0x2e, 0x38, 0x06, 0x4c, 0x36, 0x43, 0x28, 0xfe, 0x55, 0x27, 0x4c, 0x65, 0x40, 0x98, 0x58, 0x2d, 0x41, 0x6f, 0x10, 0x8c, 0x94, 0x82, 0x87, 0x16, 0x95, 0x36, 0xea, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x40, 0x29, 0x67, 0x8c, 0x43, 0x4d, 0xfd, 0x8c, 0x1f, 0x05, 0x34, 0xe7, 0x19, 0xd0, 0x8f, 0xc9, 0x5c, 0xca, 0xff, 0xfa, 0x10, 0xc4, 0xb7, 0xe4, 0xfb, 0x83, 0xc5, 0x50, 0x1f, 0x24, 0x0d, 0x7b, 0x42, 0x00, 0x79, 0x03, 0xe4, 0x81, 0xaf, 0x6c, 0x40, 0xc2, 0x92, 0x05, 0x9f, 0x24, 0x2a, 0x12, 0x87, 0x8f, 0x67, 0x7b, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf4, 0x97, 0x08, 0x28, 0x1c, 0x12, 0x78, 0xc4, 0x5d, 0x30, 0x47, 0x1f, 0x23, 0x64, 0x5a, 0xa1, 0x38, 0xf9, 0x03, 0x0a, 0x25, 0x2a, 0x00, 0x2e, 0x04, 0x22, 0x2f, 0xc9, 0x13, 0xb4, 0x8b, 0x95, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x56, 0xd4, 0x3e, 0x25, 0x22, 0x29, 0x10, 0xc0, 0x1c, 0x62, 0xcc, 0x89, 0xe2, 0x18, 0xd2, 0xf1, 0x00, 0xff, 0xfa, 0x10, 0xc4, 0x79, 0x79, 0xfb, 0x83, 0xc5, 0x74, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x75, 0x83, 0xa4, 0xc1, 0x9f, 0x60, 0x48, 0x1c, 0x04, 0x83, 0x4a, 0xf4, 0xcc, 0x4e, 0x32, 0x8a, 0x14, 0x5d, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x0c, 0x08, 0x29, 0x60, 0xc2, 0xc3, 0x19, 0x37, 0x98, 0x22, 0x09, 0x41, 0xb0, 0xf3, 0x3f, 0x9c, 0x92, 0x81, 0x8c, 0x89, 0x98, 0x10, 0x02, 0x2a, 0x27, 0x5a, 0x9a, 0x11, 0x19, 0xcf, 0xaa, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x85, 0xa7, 0x41, 0x8e, 0x09, 0xcd, 0x11, 0x84, 0xa8, 0x7d, 0x9d, 0x96, 0xac, 0x81, 0x88, 0xe8, 0x22, 0xff, 0xfa, 0x10, 0xc4, 0x85, 0x48, 0xff, 0x83, 0xc5, 0x8c, 0x1d, 0x20, 0x0d, 0x7b, 0x02, 0x40, 0xb1, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0x98, 0x1f, 0x00, 0x91, 0xcd, 0x20, 0x50, 0x97, 0x51, 0x01, 0x63, 0x60, 0xb1, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xd3, 0xa4, 0x78, 0x22, 0x88, 0x47, 0xef, 0x30, 0xc0, 0x00, 0x43, 0xe0, 0xc0, 0x87, 0x07, 0x16, 0xa0, 0x28, 0x28, 0xce, 0xc2, 0x06, 0x08, 0x64, 0x24, 0x84, 0x74, 0xee, 0xea, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0x2f, 0x10, 0x28, 0x13, 0x3c, 0x43, 0xc2, 0x73, 0x0b, 0x11, 0x0c, 0x3c, 0xf5, 0x5f, 0x53, 0x14, 0xd0, 0x40, 0x03, 0xff, 0xfa, 0x10, 0xc4, 0xa0, 0xd5, 0xfb, 0x80, 0x05, 0x50, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x7b, 0x83, 0xa4, 0x95, 0xaf, 0x6c, 0x43, 0x04, 0x48, 0xb5, 0xd1, 0xdd, 0x04, 0x42, 0x00, 0x8d, 0xaa, 0x9b, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x42, 0x12, 0x41, 0xa0, 0x0c, 0x01, 0x13, 0x2f, 0x00, 0xc1, 0x98, 0x5f, 0xce, 0x16, 0x21, 0x08, 0xf2, 0x9a, 0x4c, 0x7c, 0x49, 0x41, 0x5c, 0xa4, 0xc1, 0x41, 0xe2, 0x24, 0xd9, 0xde, 0xd5, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x4e, 0x64, 0xff, 0xfa, 0x10, 0xc4, 0xd3, 0x81, 0xf4, 0x03, 0xc3, 0x80, 0x1f, 0x28, 0x0c, 0xfb, 0x42, 0x00, 0x74, 0x83, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x10, 0x8a, 0x92, 0x0b, 0x45, 0x30, 0x0f, 0x18, 0x83, 0x2b, 0x78, 0x7b, 0x35, 0xbb, 0x85, 0x0c, 0xa2, 0x02, 0xf4, 0x4f, 0x24, 0xe1, 0x28, 0xa5, 0x47, 0xc4, 0x2d, 0x12, 0xa0, 0x01, 0x19, 0x73, 0x01, 0x38, 0xc0, 0xd4, 0x6e, 0x4d, 0x3f, 0x65, 0xb0, 0xfa, 0xff, 0x32, 0x49, 0x0c, 0x28, 0x92, 0xe3, 0x97, 0xe1, 0x11, 0xc9, 0xb0, 0x48, 0x16, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x88, 0x27, 0x38, 0x10, 0xff, 0xfa, 0x10, 0xc4, 0x4d, 0xb4, 0xf4, 0x83, 0xc3, 0x6c, 0x1d, 0x26, 0x0c, 0xfb, 0x82, 0x00, 0x7b, 0x83, 0xe4, 0x01, 0xaf, 0x60, 0x48, 0x63, 0x56, 0xf3, 0x06, 0x61, 0x2c, 0x38, 0x4e, 0x69, 0x73, 0xce, 0x52, 0x32, 0x91, 0x53, 0x0a, 0x04, 0x2e, 0x42, 0x0f, 0xa4, 0x21, 0x11, 0x9c, 0xff, 0x1b, 0x64, 0xcc, 0x08, 0x4c, 0xea, 0x40, 0xc2, 0x7c, 0x2a, 0xce, 0xf8, 0x4f, 0xc8, 0xc4, 0xc4, 0x0a, 0xcc, 0x10, 0x80, 0x20, 0xe6, 0x50, 0x48, 0x50, 0x10, 0x82, 0x31, 0xa2, 0x52, 0x2a, 0x45, 0x10, 0xe0, 0x83, 0x21, 0x3d, 0xc2, 0x30, 0xbe, 0x07, 0x13, 0xde, 0x62, 0xca, 0x31, 0x66, 0x00, 0xb2, 0x20, 0x9f, 0x03, 0x90, 0x2c, 0x02, 0xa8, 0xff, 0xfa, 0x10, 0xc4, 0x41, 0x74, 0xfc, 0x03, 0xc5, 0x64, 0x1f, 0x24, 0x0c, 0xfb, 0x62, 0x00, 0x79, 0x03, 0xe4, 0x41, 0xaf, 0x6c, 0x40, 0x48, 0x44, 0x92, 0x35, 0x75, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf4, 0xa6, 0x32, 0x01, 0x4c, 0x31, 0x0d, 0xa9, 0x4c, 0x1d, 0x44, 0x10, 0xe4, 0x8d, 0x72, 0xcf, 0x8c, 0xf8, 0xc9, 0x80, 0x53, 0x65, 0xaa, 0xa9, 0x92, 0x3c, 0x8f, 0x13, 0x5a, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe8, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x48, 0x52, 0xff, 0xfa, 0x10, 0xc4, 0xb7, 0x30, 0xfb, 0x82, 0xc5, 0x48, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x7a, 0x83, 0xa5, 0x21, 0x9f, 0x6c, 0x41, 0xe7, 0x03, 0x0f, 0x31, 0xfc, 0x30, 0x3e, 0x16, 0xd3, 0x5c, 0x67, 0xe6, 0x38, 0x26, 0x33, 0x0b, 0x12, 0x59, 0x2d, 0xf2, 0xb0, 0xa5, 0x71, 0x32, 0x2c, 0xef, 0x50, 0xc8, 0x60, 0x39, 0x0a, 0x02, 0x5c, 0x62, 0x22, 0x0c, 0x30, 0xa8, 0xb2, 0x73, 0x00, 0x9a, 0x06, 0x96, 0x81, 0x02, 0x90, 0x18, 0x44, 0x0e, 0x50, 0x00, 0x56, 0x7f, 0x07, 0xf2, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x66, 0x28, 0x70, 0x0a, 0x93, 0xff, 0xfa, 0x10, 0xc4, 0x36, 0x20, 0xfb, 0x83, 0xc5, 0x88, 0x1d, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0x72, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x31, 0x92, 0x0c, 0x14, 0xc6, 0x74, 0xdc, 0x16, 0x37, 0x8e, 0xad, 0xdc, 0xc8, 0x08, 0x8c, 0x24, 0x2c, 0xb6, 0xe5, 0xd0, 0x43, 0x72, 0x25, 0x49, 0x07, 0x61, 0xc4, 0x8b, 0x31, 0x04, 0x38, 0xed, 0x30, 0x8c, 0x12, 0x03, 0xaa, 0x86, 0x53, 0x31, 0x0e, 0x05, 0x03, 0x03, 0xb0, 0x17, 0x38, 0x20, 0x08, 0x02, 0xef, 0x97, 0xb0, 0x7d, 0xf3, 0xfc, 0x60, 0xc9, 0x62, 0x3c, 0xb8, 0x2d, 0x83, 0x0a, 0x50, 0x49, 0x3c, 0x41, 0x1f, 0x23, 0x9d, 0x82, 0xc0, 0x48, 0x83, 0x08, 0x80, 0x4b, 0x76, 0x88, 0x08, 0xff, 0xfa, 0x10, 0xc4, 0x69, 0xc0, 0xfc, 0x83, 0xc5, 0x94, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0x76, 0x03, 0xe4, 0x81, 0xaf, 0x6c, 0x40, 0x7e, 0xc1, 0x2a, 0x37, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x4a, 0x44, 0x97, 0xc8, 0x0c, 0xa9, 0xd6, 0xa1, 0x84, 0xe0, 0x5e, 0x9d, 0xdf, 0x22, 0xa1, 0xca, 0x84, 0xa1, 0xc7, 0xa0, 0xe0, 0x82, 0x5a, 0x26, 0xea, 0x0d, 0x0d, 0x03, 0x6c, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xef, 0x4d, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x40, 0x31, 0x67, 0x4c, 0xff, 0xfa, 0x10, 0xc4, 0xa3, 0xdf, 0xf4, 0x02, 0xc3, 0x70, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0x7a, 0x03, 0xa4, 0x95, 0xaf, 0x68, 0x41, 0x33, 0x8d, 0xae, 0xcc, 0x1d, 0x45, 0x90, 0xe4, 0xad, 0xef, 0xcf, 0x8d, 0x78, 0xc9, 0x03, 0xd3, 0x59, 0xa4, 0xa6, 0x32, 0x12, 0x47, 0x91, 0xa7, 0xb8, 0x02, 0x00, 0x04, 0x4d, 0x10, 0x82, 0x0b, 0x17, 0x00, 0xd0, 0x30, 0x2a, 0x1b, 0xd3, 0x47, 0xa9, 0x82, 0x3b, 0xdf, 0xc0, 0xa7, 0x90, 0xe6, 0xbf, 0xd2, 0x65, 0x1a, 0x89, 0xb0, 0xc8, 0x56, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x01, 0x00, 0x02, 0x55, 0x12, 0x07, 0x18, 0x3c, 0xff, 0xfa, 0x10, 0xc4, 0x06, 0x5e, 0xf5, 0x03, 0xc3, 0xa8, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x77, 0x83, 0xa4, 0x81, 0x9f, 0x60, 0x48, 0x05, 0xc2, 0x60, 0x50, 0x40, 0x86, 0x87, 0x15, 0xf2, 0x6b, 0xd3, 0x06, 0x1e, 0x5a, 0x60, 0x81, 0x85, 0xac, 0x16, 0x07, 0x1a, 0x00, 0x26, 0x7b, 0x93, 0xab, 0x06, 0xa6, 0x80, 0x24, 0x93, 0x43, 0x43, 0x05, 0xd1, 0x45, 0x37, 0x9f, 0x71, 0x03, 0xbb, 0x5b, 0x32, 0x41, 0x63, 0x09, 0x07, 0x2e, 0x5a, 0x28, 0x24, 0x59, 0x11, 0xc4, 0xe7, 0x55, 0x84, 0xab, 0x18, 0x38, 0x43, 0x85, 0x53, 0x08, 0x40, 0xba, 0x3a, 0x1c, 0x44, 0x33, 0xfe, 0x2e, 0x33, 0x50, 0x93, 0x0c, 0x02, 0x2e, 0x02, 0x3d, 0xa4, 0xff, 0xfa, 0x10, 0xc4, 0xbe, 0x6a, 0xff, 0x83, 0xc5, 0x84, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xab, 0x83, 0xa4, 0xc1, 0x9f, 0x6c, 0x40, 0x60, 0xf0, 0x7d, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x84, 0x16, 0x12, 0x12, 0x2c, 0xc8, 0x34, 0x29, 0x87, 0x08, 0x18, 0x1f, 0xff, 0x8b, 0xf8, 0x91, 0xa1, 0x06, 0x05, 0xe1, 0xf4, 0x60, 0xd3, 0x8e, 0x10, 0xa8, 0x96, 0x1d, 0x59, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xa1, 0x2c, 0xc9, 0x8a, 0x01, 0xa8, 0x79, 0xf3, 0xd1, 0x86, 0x20, 0xb0, 0x9f, 0x22, 0x3d, 0x41, 0x8b, 0x80, 0x2d, 0xff, 0xfa, 0x10, 0xc4, 0x14, 0x20, 0xf4, 0x03, 0xc3, 0x88, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x76, 0x03, 0xe4, 0x41, 0xaf, 0x6c, 0x40, 0x98, 0x27, 0x80, 0x40, 0xf3, 0x09, 0x28, 0x20, 0x04, 0x2a, 0x20, 0xb8, 0xb2, 0xdc, 0x92, 0x18, 0xbd, 0xa0, 0x93, 0x0c, 0xce, 0xcc, 0x13, 0x45, 0x70, 0xda, 0x9d, 0xe4, 0x4e, 0x75, 0x70, 0xc4, 0x04, 0x57, 0x33, 0xaa, 0xa5, 0xc9, 0x82, 0x44, 0x85, 0x3d, 0xcf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa1, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x2f, 0x68, 0x8c, 0x40, 0xff, 0xfa, 0x10, 0xc4, 0x68, 0x63, 0xf5, 0x83, 0xc3, 0xc8, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x7b, 0x03, 0xe4, 0x81, 0x9f, 0x60, 0x48, 0xa2, 0x11, 0x1e, 0x23, 0x00, 0x22, 0x0b, 0x32, 0x0a, 0xb1, 0xe3, 0x18, 0x99, 0x24, 0x2a, 0x06, 0x83, 0xa3, 0x99, 0x40, 0x21, 0x30, 0x11, 0x43, 0xec, 0x1d, 0xd6, 0xe4, 0x93, 0xe2, 0x03, 0x8c, 0x3f, 0xcc, 0x0d, 0x85, 0xd0, 0xd4, 0xda, 0x04, 0x4f, 0xc6, 0x83, 0x26, 0x6c, 0xc1, 0x04, 0x40, 0x02, 0x39, 0xa6, 0xa1, 0x34, 0xb9, 0xfe, 0x2a, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x86, 0xd4, 0x00, 0x02, 0x29, 0xb1, 0x51, 0x83, 0x78, 0x8b, 0x9c, 0x68, 0xb1, 0x11, 0xee, 0xa6, 0x99, 0x60, 0xff, 0xfa, 0x10, 0xc4, 0xd3, 0x2f, 0xff, 0x83, 0xc5, 0x74, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0xb0, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0x91, 0x85, 0x82, 0x97, 0x2d, 0x15, 0x12, 0x1c, 0x78, 0xa2, 0x73, 0xad, 0x35, 0x3c, 0xc3, 0x94, 0x3b, 0x47, 0x30, 0xa2, 0x08, 0x23, 0xc1, 0x63, 0x1b, 0x39, 0xb0, 0x70, 0xc8, 0x60, 0x53, 0x09, 0x00, 0x81, 0x80, 0x04, 0x27, 0xa0, 0xc2, 0x35, 0xd7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0xd5, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x55, 0x55, 0x2e, 0x30, 0x38, 0x48, 0x19, 0x71, 0xc0, 0x2a, 0x61, 0xb6, 0x17, 0xc7, 0xf4, 0x68, 0xc2, 0x63, 0x2e, 0x04, 0xff, 0xfa, 0x10, 0xc4, 0xd2, 0x53, 0xf5, 0x00, 0x03, 0x9c, 0x1f, 0x24, 0x0c, 0xfb, 0x62, 0x00, 0x7c, 0x83, 0xa4, 0x95, 0xaf, 0x68, 0x43, 0x43, 0x41, 0x6a, 0x0b, 0x78, 0x5c, 0x64, 0xa4, 0x18, 0x38, 0x93, 0xa9, 0xb6, 0x80, 0x52, 0xd9, 0x18, 0x85, 0x9b, 0xfa, 0x18, 0x3f, 0x0a, 0x39, 0xcf, 0x23, 0x93, 0x1f, 0xb2, 0xa9, 0x95, 0x85, 0x24, 0xcb, 0x3e, 0x4b, 0xd4, 0x13, 0x0f, 0x1c, 0xce, 0xf7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe9, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x4b, 0xe2, 0xf2, 0x85, 0x48, 0x18, 0x28, 0x26, 0x06, 0x62, 0xf0, 0x69, 0xf7, 0x04, 0x27, 0xb7, 0xff, 0xfa, 0x10, 0xc4, 0xc2, 0xe6, 0xf7, 0x02, 0xc4, 0x3c, 0x1d, 0x22, 0xad, 0x7b, 0x62, 0x08, 0x75, 0x83, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x31, 0x82, 0x2a, 0xa2, 0x4d, 0x99, 0x49, 0x26, 0xa9, 0x34, 0xa9, 0xee, 0x2b, 0x6a, 0x1f, 0x15, 0x49, 0x8a, 0x44, 0x30, 0x03, 0x18, 0xd3, 0x1e, 0x78, 0x88, 0x33, 0xfc, 0x40, 0x87, 0x01, 0x20, 0xd2, 0xbd, 0x33, 0x13, 0x8c, 0xa2, 0x85, 0x13, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf4, 0x2a, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0x6e, 0xc8, 0xce, 0x08, 0x20, 0x64, 0xe2, 0x98, 0x30, 0x0c, 0x61, 0xbe, 0x84, 0x44, 0x9e, 0x03, 0x91, 0x93, 0x90, 0xff, 0xfa, 0x10, 0xc4, 0xd1, 0x04, 0xff, 0x83, 0xc5, 0x70, 0x1d, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xb1, 0x03, 0xa4, 0x01, 0xaf, 0x60, 0x48, 0x98, 0x50, 0x51, 0x66, 0x0b, 0xb6, 0x82, 0x84, 0x4a, 0x73, 0xfc, 0x81, 0xd3, 0xa0, 0xc7, 0x04, 0xe6, 0x90, 0xc2, 0x4c, 0x40, 0x4e, 0xc6, 0x56, 0xc8, 0xc4, 0x6c, 0x11, 0xcc, 0x0f, 0x00, 0x48, 0xe6, 0x90, 0x28, 0x4b, 0xa8, 0x80, 0xb1, 0xb2, 0x58, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0xf5, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x4e, 0x91, 0xe0, 0x8a, 0x20, 0x17, 0xbc, 0xc3, 0x00, 0x05, 0x0f, 0x82, 0x83, 0x84, 0x0c, 0x5a, 0x80, 0xff, 0xfa, 0x10, 0xc4, 0x69, 0xb2, 0xfc, 0x03, 0xc4, 0x08, 0x1f, 0x20, 0x0c, 0xfb, 0x22, 0x40, 0xaa, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x60, 0xa3, 0x3b, 0x08, 0x18, 0x21, 0x50, 0x92, 0x11, 0xc3, 0xbb, 0xa2, 0xf1, 0x02, 0x81, 0x33, 0xc4, 0x3c, 0xa5, 0x30, 0xb1, 0x10, 0x43, 0xd0, 0xb5, 0xcb, 0x31, 0x4f, 0x03, 0xc0, 0x30, 0x46, 0x89, 0x5d, 0x19, 0xd0, 0x68, 0x44, 0x11, 0xb3, 0x53, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xe5, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x48, 0x52, 0xff, 0xfa, 0x10, 0xc4, 0xcb, 0x45, 0xf5, 0x03, 0xc3, 0xd0, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0x72, 0x83, 0xe4, 0xc1, 0x9f, 0x68, 0x40, 0xeb, 0x01, 0x0b, 0x34, 0x74, 0x30, 0x58, 0x14, 0x93, 0x77, 0x27, 0x2a, 0x3b, 0x25, 0x73, 0x19, 0x0c, 0x58, 0xae, 0xd2, 0x9a, 0xa4, 0x51, 0x11, 0xec, 0xef, 0x40, 0x40, 0x00, 0xa6, 0x48, 0x82, 0x2a, 0x60, 0x83, 0x93, 0x00, 0xd1, 0x54, 0x32, 0x8f, 0x76, 0xb3, 0x53, 0x70, 0x50, 0x9a, 0x5a, 0x33, 0x45, 0xbc, 0xb3, 0x4a, 0x21, 0x51, 0xaa, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0x4d, 0x12, 0xa0, 0x41, 0x19, 0xf3, 0x05, 0x78, 0xc0, 0xec, 0x7e, 0xcd, 0x5d, 0x6b, 0x28, 0xe0, 0x65, 0xcc, 0x60, 0xb0, 0xff, 0xfa, 0x10, 0xc4, 0x1f, 0x25, 0xfc, 0x03, 0xc3, 0xac, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xb0, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0xc2, 0x43, 0xc1, 0x40, 0x62, 0x40, 0x82, 0x40, 0x64, 0xce, 0x92, 0x0e, 0xc0, 0x09, 0xce, 0x04, 0x18, 0xd5, 0xc4, 0xc1, 0x98, 0x4c, 0x0e, 0x0c, 0x9a, 0xb4, 0xf2, 0x95, 0x0c, 0xa4, 0x54, 0xc2, 0x01, 0x0b, 0x90, 0x83, 0xe9, 0x08, 0x44, 0x6f, 0x3f, 0xcf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa2, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0x5f, 0xc2, 0xc0, 0x00, 0xab, 0x3f, 0xa0, 0x30, 0xd7, 0x0f, 0x13, 0xf5, 0xf5, 0x5e, 0x31, 0x91, 0x04, 0x33, 0x05, 0xe0, 0xff, 0xfa, 0x10, 0xc4, 0xef, 0x61, 0xfc, 0x03, 0xc3, 0xdc, 0x1f, 0x20, 0x0d, 0x7b, 0x02, 0x40, 0xac, 0x83, 0xe4, 0x81, 0x9f, 0x6c, 0x40, 0x14, 0x38, 0x4f, 0x30, 0x49, 0x06, 0x08, 0x21, 0x1c, 0x59, 0xca, 0x4d, 0xa7, 0x4a, 0x15, 0x09, 0x32, 0x07, 0x68, 0xc2, 0x90, 0x15, 0x4f, 0x0e, 0x08, 0xf0, 0xe7, 0x00, 0x12, 0x24, 0x38, 0x38, 0x30, 0x80, 0x84, 0x9f, 0x43, 0xb2, 0xd5, 0xab, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xfd, 0x15, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x4c, 0x64, 0x02, 0xff, 0xfa, 0x10, 0xc4, 0xb8, 0x9f, 0xfb, 0x03, 0xc3, 0x84, 0x1f, 0x26, 0x0d, 0x7b, 0x42, 0x00, 0xac, 0x03, 0xa4, 0xc1, 0xaf, 0x68, 0x40, 0x98, 0x61, 0x1b, 0x52, 0x98, 0x3a, 0x87, 0xf1, 0xc9, 0xda, 0xd7, 0x9f, 0x29, 0xe1, 0x93, 0x00, 0xa6, 0xcb, 0x55, 0x52, 0xa4, 0xa9, 0x1e, 0x26, 0xb4, 0x02, 0x00, 0x04, 0x85, 0x2e, 0x70, 0x30, 0xf3, 0x1f, 0xc3, 0x04, 0x01, 0x6b, 0x35, 0xee, 0x7d, 0x83, 0x88, 0x63, 0x30, 0xb1, 0x25, 0x81, 0x79, 0x94, 0xb5, 0x2b, 0x89, 0x91, 0x67, 0x56, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x01, 0x00, 0x02, 0x55, 0xff, 0xfa, 0x10, 0xc4, 0x0e, 0xf4, 0xfc, 0x83, 0xc3, 0xc0, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0xb1, 0x83, 0xa4, 0x81, 0x9f, 0x60, 0x48, 0x0c, 0x03, 0x21, 0x30, 0x54, 0xa6, 0x40, 0x39, 0x06, 0x01, 0xd3, 0x8e, 0x22, 0x84, 0x10, 0x91, 0x83, 0x01, 0x11, 0xc1, 0x16, 0xd2, 0xc0, 0xac, 0xce, 0x4f, 0xc5, 0x9e, 0x15, 0x14, 0x10, 0x92, 0x61, 0x64, 0x60, 0x6e, 0x27, 0x66, 0xa8, 0x2d, 0xb8, 0x7e, 0xeb, 0x99, 0x22, 0x86, 0x04, 0x0a, 0x45, 0xa5, 0x62, 0x97, 0x93, 0x34, 0x9c, 0xea, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0xaa, 0x87, 0x15, 0x39, 0x80, 0x01, 0xb7, 0x29, 0x83, 0xb0, 0x7f, 0x1c, 0xa1, 0x2d, 0x29, 0xf4, 0x22, 0x19, 0x78, 0xff, 0xfa, 0x10, 0xc4, 0x86, 0x0e, 0xfb, 0x83, 0xc3, 0x90, 0x1f, 0x22, 0x0c, 0xfb, 0x02, 0x40, 0xb0, 0x03, 0xa4, 0x41, 0x9f, 0x60, 0x48, 0x89, 0x85, 0x02, 0x17, 0x01, 0x09, 0x68, 0xf0, 0x3c, 0x47, 0x6f, 0x8c, 0x19, 0x36, 0x06, 0x97, 0x03, 0xb0, 0x61, 0x4a, 0x0a, 0x67, 0x87, 0x64, 0x50, 0x73, 0x90, 0x78, 0x09, 0x10, 0x61, 0x10, 0x09, 0x6e, 0xd1, 0x01, 0x0e, 0x6b, 0x52, 0xa3, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xef, 0x4d, 0x48, 0x92, 0xf9, 0x01, 0x95, 0x3a, 0xd2, 0x30, 0x9e, 0x0b, 0x53, 0xbc, 0xc4, 0x2c, 0x39, 0x60, 0x8c, 0x48, 0xf4, 0x1c, 0x10, 0x48, 0xc4, 0xdd, 0x41, 0xff, 0xfa, 0x10, 0xc4, 0xc9, 0x88, 0xf4, 0x00, 0x03, 0x88, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x76, 0x83, 0xa5, 0x15, 0x9f, 0x68, 0x43, 0xa1, 0xa0, 0x4d, 0x9f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x94, 0x86, 0x2f, 0xe8, 0x04, 0x83, 0x53, 0x13, 0x06, 0x51, 0x30, 0x38, 0x27, 0x6b, 0x53, 0xc7, 0x4e, 0x31, 0xe0, 0x95, 0x62, 0x74, 0x55, 0x32, 0x42, 0x91, 0x1b, 0x4f, 0x73, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe9, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x2e, 0xa8, 0x5c, 0x30, 0xff, 0xfa, 0x10, 0xc4, 0x61, 0xe0, 0xfd, 0x03, 0xc4, 0x04, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0xae, 0x83, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x30, 0xf9, 0x85, 0xbc, 0x60, 0x6c, 0x3f, 0x66, 0xab, 0x95, 0x9c, 0x6e, 0xd2, 0x66, 0x06, 0x4c, 0x3a, 0x06, 0xa8, 0xc7, 0x80, 0x45, 0x80, 0x89, 0x9d, 0x64, 0x3d, 0x5a, 0x48, 0x75, 0x1c, 0x20, 0x17, 0x8a, 0x60, 0x28, 0x31, 0x06, 0x60, 0x90, 0xea, 0x6f, 0x37, 0x18, 0x33, 0xa0, 0x40, 0x48, 0xf8, 0x97, 0x69, 0x98, 0x51, 0x4e, 0x8f, 0x8a, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x7c, 0x53, 0xff, 0xfa, 0x10, 0xc4, 0xb2, 0xc7, 0xfd, 0x03, 0xc4, 0x04, 0x1f, 0x20, 0x0c, 0xfb, 0x22, 0x40, 0xad, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0x40, 0x12, 0x49, 0xa1, 0xa1, 0x82, 0xd8, 0xa4, 0x9b, 0xbf, 0xb9, 0x21, 0xdc, 0xad, 0x99, 0x10, 0xb1, 0x84, 0x83, 0x97, 0x2d, 0x14, 0x12, 0x2c, 0x88, 0xe2, 0x73, 0xb0, 0x72, 0xb1, 0x82, 0x88, 0x7f, 0xb9, 0x84, 0x10, 0x60, 0x1d, 0x07, 0x23, 0x29, 0xff, 0x18, 0x19, 0xa8, 0x49, 0x86, 0x01, 0x17, 0x01, 0x1e, 0xd2, 0x30, 0x78, 0x3e, 0xdd, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x43, 0x61, 0x61, 0x23, 0xff, 0xfa, 0x10, 0xc4, 0x53, 0x8f, 0xf4, 0x80, 0x03, 0x7c, 0x1d, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x7c, 0x83, 0xa4, 0xd5, 0x9f, 0x6c, 0x43, 0x4c, 0x84, 0xc2, 0x98, 0x1c, 0x20, 0x0f, 0x1a, 0x00, 0x20, 0x93, 0x91, 0x06, 0x84, 0x10, 0x06, 0x01, 0xf4, 0xa0, 0xd3, 0x8c, 0x10, 0xb0, 0x96, 0x55, 0x73, 0x68, 0xaa, 0x5c, 0x63, 0x20, 0x53, 0x9e, 0x43, 0x09, 0x21, 0x03, 0x3b, 0x10, 0x5b, 0xe3, 0x8b, 0x0f, 0x40, 0x46, 0xe1, 0xe0, 0x42, 0xc8, 0x48, 0xd4, 0x27, 0x0d, 0x09, 0x6c, 0xaa, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x42, 0xff, 0xfa, 0x10, 0xc4, 0xb1, 0xfa, 0xf5, 0x00, 0x03, 0xe8, 0x1f, 0x24, 0xad, 0x7b, 0x62, 0x18, 0x71, 0x83, 0xe5, 0x01, 0x9f, 0x68, 0x40, 0x50, 0x30, 0x08, 0x04, 0x91, 0x92, 0x8a, 0x60, 0xba, 0x31, 0x86, 0xf8, 0x31, 0x1e, 0x77, 0x6d, 0x86, 0x30, 0x2e, 0xa6, 0x4e, 0x2a, 0x57, 0x22, 0x89, 0x12, 0x94, 0x8b, 0x89, 0xca, 0x83, 0x43, 0x04, 0xc5, 0x62, 0x18, 0x00, 0x0c, 0x69, 0x8e, 0x74, 0x45, 0x19, 0x2d, 0xe5, 0x53, 0x45, 0xe7, 0x56, 0x35, 0x10, 0x4f, 0x22, 0x8a, 0x34, 0x5d, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x01, 0x00, 0x03, 0x22, 0x49, 0xf1, 0xff, 0xfa, 0x10, 0xc4, 0x9e, 0x36, 0xfc, 0x03, 0xc3, 0xac, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0xb1, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0x10, 0xe3, 0x03, 0x04, 0xc0, 0xd0, 0x5d, 0x8d, 0x43, 0x20, 0x74, 0xfa, 0x6a, 0x32, 0x67, 0x4c, 0x10, 0x44, 0x00, 0x23, 0x9a, 0x6a, 0x13, 0x4b, 0x9f, 0x56, 0x1b, 0x48, 0x43, 0x0c, 0x93, 0x7b, 0x43, 0x08, 0x11, 0x47, 0x39, 0xf1, 0x71, 0xc3, 0xfb, 0x5d, 0x33, 0x61, 0xa3, 0x11, 0x0b, 0x2c, 0xb9, 0x72, 0x0b, 0xfe, 0x3c, 0x71, 0x39, 0xda, 0x4c, 0x41, 0x4d, 0x45, 0xaa, 0xaa, 0x6d, 0x53, 0x8c, 0x39, 0x43, 0xac, 0x93, 0x0a, 0x20, 0x8a, 0x3c, 0x0c, 0x34, 0x93, 0x13, 0x40, 0x20, 0x30, 0x43, 0xff, 0xfa, 0x10, 0xc4, 0x1a, 0x96, 0xff, 0x83, 0xc5, 0x70, 0x1d, 0x24, 0x0c, 0xfb, 0x82, 0x00, 0xaa, 0x03, 0xe4, 0xc1, 0x9f, 0x6c, 0x40, 0x00, 0x53, 0x90, 0x81, 0x80, 0x40, 0x5a, 0x0c, 0x21, 0x1d, 0x7d, 0x22, 0x90, 0x60, 0x20, 0xe8, 0x8f, 0x63, 0x4c, 0x2f, 0x42, 0x60, 0xf7, 0x0c, 0xe8, 0x4c, 0x58, 0xc0, 0x60, 0x88, 0x26, 0x41, 0xe3, 0x2f, 0x39, 0x54, 0x03, 0x83, 0x16, 0x15, 0x6d, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0x35, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x40, 0xff, 0xfa, 0x10, 0xc4, 0xfb, 0x85, 0xf5, 0x03, 0xc3, 0xe0, 0x1f, 0x22, 0x0d, 0x7b, 0x62, 0x00, 0x71, 0x83, 0xe4, 0xc1, 0xaf, 0x68, 0x40, 0x29, 0x6c, 0x8c, 0x62, 0xce, 0x1d, 0x0c, 0x20, 0x05, 0x0c, 0xe8, 0x09, 0xc3, 0x0f, 0xe1, 0x4c, 0xcb, 0x42, 0x11, 0x25, 0x9f, 0x25, 0xea, 0x09, 0x87, 0x8e, 0x67, 0x7a, 0x97, 0xc5, 0xe5, 0x0a, 0xa0, 0x61, 0x7e, 0x60, 0x6a, 0x2e, 0x86, 0xa4, 0xf0, 0x2a, 0x7c, 0xf3, 0x18, 0x42, 0xaa, 0xcc, 0xd9, 0x93, 0xe9, 0x35, 0x49, 0xa5, 0x4f, 0x71, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x4e, 0x51, 0xd0, 0x45, 0x53, 0x22, 0x94, 0x8c, 0x00, 0xc7, 0x24, 0xc7, 0x96, 0x6f, 0x8c, 0x7e, 0x0c, 0x00, 0x48, 0x05, 0xff, 0xfa, 0x10, 0xc4, 0x2b, 0xc8, 0xf4, 0x03, 0xc3, 0xac, 0x1f, 0x26, 0x0c, 0xfb, 0x62, 0x00, 0x71, 0x03, 0xa4, 0xc1, 0x8f, 0x6c, 0x40, 0x07, 0x43, 0x74, 0x2c, 0x49, 0xb2, 0x86, 0x49, 0x37, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x86, 0x7c, 0x2a, 0x04, 0x08, 0x60, 0xcb, 0xd3, 0x30, 0x6e, 0x1c, 0xc3, 0x8b, 0xc9, 0xd1, 0x3d, 0xa8, 0x63, 0x2f, 0x2b, 0x31, 0x01, 0x00, 0x10, 0x00, 0x38, 0x0c, 0xb7, 0x83, 0xcc, 0xf2, 0x3e, 0x2a, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x81, 0xd3, 0x00, 0xc7, 0x0c, 0xff, 0xfa, 0x10, 0xc4, 0x4d, 0xe8, 0xf5, 0x03, 0xc3, 0xcc, 0x1f, 0x20, 0x0d, 0x7f, 0x02, 0x40, 0x73, 0x03, 0xa4, 0x81, 0x9f, 0x70, 0x40, 0xe4, 0x98, 0xc2, 0x4c, 0x42, 0x4e, 0xc0, 0xd7, 0x50, 0xc4, 0x64, 0x12, 0x4c, 0x0f, 0x00, 0x48, 0xe6, 0x90, 0x28, 0x4b, 0x80, 0x80, 0xb1, 0xb2, 0x58, 0x4a, 0xd1, 0xe1, 0x44, 0xcb, 0x03, 0x43, 0x98, 0x70, 0x01, 0x71, 0xff, 0xf0, 0xaf, 0x98, 0xd1, 0x00, 0x31, 0x83, 0x08, 0x00, 0x9f, 0x8c, 0x04, 0x30, 0x8c, 0xa4, 0x08, 0x61, 0xf7, 0x75, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x48, 0xa2, 0xec, 0x99, 0x60, 0x1d, 0x0d, 0x98, 0x4c, 0x86, 0xc1, 0xdc, 0x1a, 0x76, 0x9c, 0x94, 0x5c, 0xff, 0xfa, 0x10, 0xc4, 0x3f, 0x49, 0xf3, 0x83, 0xc3, 0xac, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x6e, 0x83, 0xe4, 0xc1, 0xaf, 0x68, 0x40, 0x0e, 0x3a, 0x8b, 0x03, 0xd4, 0x3d, 0x32, 0x91, 0x04, 0x68, 0x2d, 0x6b, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0x7a, 0x52, 0x14, 0xba, 0xc0, 0x02, 0xcd, 0x1d, 0x0c, 0x16, 0x85, 0x14, 0xde, 0x49, 0xc6, 0x0e, 0xd1, 0x54, 0xc6, 0x43, 0x17, 0x4b, 0xb4, 0xa0, 0xa9, 0x8c, 0x44, 0x73, 0x3b, 0xd5, 0x4c, 0x41, 0x4d, 0x45, 0x55, 0x44, 0xe1, 0x10, 0x31, 0x09, 0x80, 0x65, 0x13, 0x01, 0xd1, 0xc2, 0x33, 0x99, 0x9a, 0x13, 0x45, 0x80, 0x0b, 0x10, 0x8a, 0xff, 0xfa, 0x10, 0xc4, 0x86, 0x85, 0xf6, 0x00, 0x03, 0xf8, 0x1d, 0x26, 0xad, 0x7b, 0x42, 0x18, 0x77, 0x03, 0xe4, 0x81, 0x9f, 0x6c, 0x40, 0x00, 0x2b, 0xc4, 0xa2, 0x48, 0x12, 0x86, 0x29, 0x3f, 0x3f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x94, 0x6d, 0x2c, 0x0a, 0x10, 0xa7, 0x30, 0xdc, 0x0c, 0x10, 0xc9, 0x24, 0xd7, 0xcf, 0x16, 0x8e, 0x46, 0xbc, 0xc7, 0x0d, 0x8c, 0x34, 0x54, 0x0c, 0x16, 0x1c, 0x18, 0x24, 0x16, 0x4d, 0x21, 0x08, 0xea, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x82, 0x13, 0x9c, 0x08, 0x31, 0xa7, 0x89, 0x83, 0x20, 0x9a, 0x1c, 0x0a, 0x36, 0x19, 0xe4, 0x2a, 0x19, 0xff, 0xfa, 0x10, 0xc4, 0xde, 0x27, 0xfc, 0x83, 0xc3, 0xc4, 0x1f, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0xb1, 0x03, 0xe4, 0x41, 0x9f, 0x60, 0x48, 0x38, 0xb9, 0x84, 0x02, 0x17, 0x21, 0x07, 0xd1, 0x50, 0x88, 0xde, 0x7f, 0x9f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x43, 0xcc, 0xaf, 0x02, 0x0b, 0x38, 0x0e, 0x30, 0x87, 0x0a, 0x13, 0xa5, 0x33, 0xbc, 0x31, 0x05, 0x02, 0x73, 0x03, 0x60, 0x06, 0x36, 0x84, 0xba, 0xe8, 0x60, 0x91, 0xe3, 0xc4, 0xb1, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0x55, 0x53, 0x54, 0x46, 0x12, 0x64, 0x16, 0xd1, 0x85, 0x20, 0x25, 0x9e, 0x1f, 0x10, 0x30, 0x0c, 0xe2, 0xff, 0xfa, 0x10, 0xc4, 0x92, 0x4a, 0xf3, 0x83, 0xc3, 0x9c, 0x1f, 0x24, 0x0c, 0xfb, 0x62, 0x00, 0x6f, 0x83, 0xe4, 0xc1, 0x9f, 0x68, 0x40, 0x3c, 0x87, 0x05, 0x07, 0x10, 0x10, 0x93, 0xe8, 0x76, 0x5a, 0xb5, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xa1, 0x07, 0x8b, 0x6a, 0x63, 0x8c, 0x71, 0x5a, 0x61, 0x16, 0x24, 0x07, 0x53, 0xec, 0xbe, 0x70, 0xb2, 0x41, 0x8c, 0x80, 0x65, 0x00, 0x36, 0x36, 0x95, 0x48, 0x62, 0x3c, 0x2e, 0x9e, 0x48, 0x52, 0xe7, 0x03, 0x0f, 0x32, 0x7c, 0x30, 0x42, 0x16, 0x73, 0x61, 0x67, 0xca, 0x38, 0xc6, 0x13, 0x0d, 0x12, 0x5e, 0x2f, 0x32, 0x96, 0xa7, 0x51, 0xff, 0xfa, 0x10, 0xc4, 0xcb, 0x6f, 0xfd, 0x03, 0xc5, 0x70, 0x1f, 0x24, 0x0d, 0x7b, 0x62, 0x00, 0x7e, 0x83, 0xe4, 0x41, 0xaf, 0x6c, 0x40, 0x12, 0x2c, 0xef, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x82, 0xdd, 0x08, 0x86, 0x8e, 0xa8, 0x20, 0xea, 0x3a, 0x4b, 0x06, 0x11, 0x79, 0x6e, 0x43, 0x64, 0x15, 0x37, 0x00, 0x07, 0x06, 0x00, 0x11, 0x07, 0x95, 0x82, 0x15, 0xa5, 0xc1, 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd4, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x78, 0xff, 0xfa, 0x10, 0xc4, 0xd6, 0x61, 0xf5, 0x83, 0xc3, 0xc0, 0x1d, 0x24, 0x0c, 0xfb, 0x02, 0x40, 0x7b, 0x83, 0xe4, 0x01, 0xaf, 0x60, 0x48, 0x54, 0x50, 0x42, 0x49, 0x81, 0x91, 0x81, 0xa8, 0x9e, 0x9a, 0x8e, 0xb7, 0x41, 0xf7, 0xae, 0x64, 0x0a, 0x18, 0x10, 0x29, 0x16, 0x9a, 0x8a, 0x5e, 0x4c, 0xd2, 0x73, 0xb1, 0x85, 0x4e, 0x60, 0x00, 0x6d, 0xca, 0x60, 0xe8, 0x20, 0x87, 0x25, 0x8b, 0x82, 0x7c, 0xc8, 0xc6, 0x5e, 0x22, 0x61, 0x40, 0x85, 0xb0, 0x42, 0x5a, 0x3c, 0x0f, 0x13, 0xdb, 0x4c, 0x41, 0x4d, 0x45, 0x33, 0x2e, 0x38, 0x32, 0xaa, }<|fim▁end|>
0xd0, 0x46, 0x97, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0x24, 0x31, 0x75, 0x40, 0x24, 0x1a, 0x59, 0x98, 0x30, 0x89, 0xd1, 0xbf, 0x5b, 0x6f, 0x9d, 0xfa, 0x81, 0x8e, 0x05, 0xad, 0x67, 0x05, 0x53, 0x26, 0x29, 0x11, 0xb4, 0xf3, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf4, 0xd5, 0x4c, 0x41, 0x44,
<|file_name|>Dag.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.lops.compile; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.sysml.api.DMLScript; import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM; import org.apache.sysml.conf.ConfigurationManager; import org.apache.sysml.conf.DMLConfig; import org.apache.sysml.hops.AggBinaryOp; import org.apache.sysml.hops.BinaryOp; import org.apache.sysml.hops.Hop.FileFormatTypes; import org.apache.sysml.hops.HopsException; import org.apache.sysml.hops.OptimizerUtils; import org.apache.sysml.lops.AppendM; import org.apache.sysml.lops.BinaryM; import org.apache.sysml.lops.CombineBinary; import org.apache.sysml.lops.Data; import org.apache.sysml.lops.Data.OperationTypes; import org.apache.sysml.lops.FunctionCallCP; import org.apache.sysml.lops.Lop; import org.apache.sysml.lops.Lop.Type; import org.apache.sysml.lops.LopProperties.ExecLocation; import org.apache.sysml.lops.LopProperties.ExecType; import org.apache.sysml.lops.LopsException; import org.apache.sysml.lops.MapMult; import org.apache.sysml.lops.OutputParameters; import org.apache.sysml.lops.OutputParameters.Format; import org.apache.sysml.lops.PMMJ; import org.apache.sysml.lops.ParameterizedBuiltin; import org.apache.sysml.lops.PickByCount; import org.apache.sysml.lops.SortKeys; import org.apache.sysml.lops.Unary; import org.apache.sysml.parser.DataExpression; import org.apache.sysml.parser.Expression; import org.apache.sysml.parser.Expression.DataType; import org.apache.sysml.parser.ParameterizedBuiltinFunctionExpression; import org.apache.sysml.parser.StatementBlock; import org.apache.sysml.runtime.DMLRuntimeException; import org.apache.sysml.runtime.controlprogram.parfor.ProgramConverter; import org.apache.sysml.runtime.controlprogram.parfor.util.IDSequence; import org.apache.sysml.runtime.instructions.CPInstructionParser; import org.apache.sysml.runtime.instructions.Instruction; import org.apache.sysml.runtime.instructions.Instruction.INSTRUCTION_TYPE; import org.apache.sysml.runtime.instructions.InstructionParser; import org.apache.sysml.runtime.instructions.MRJobInstruction; import org.apache.sysml.runtime.instructions.SPInstructionParser; import org.apache.sysml.runtime.instructions.cp.CPInstruction; import org.apache.sysml.runtime.instructions.cp.CPInstruction.CPINSTRUCTION_TYPE; import org.apache.sysml.runtime.instructions.cp.VariableCPInstruction; import org.apache.sysml.runtime.matrix.MatrixCharacteristics; import org.apache.sysml.runtime.matrix.data.InputInfo; import org.apache.sysml.runtime.matrix.data.OutputInfo; import org.apache.sysml.runtime.matrix.sort.PickFromCompactInputFormat; /** * * Class to maintain a DAG of lops and compile it into * runtime instructions, incl piggybacking into jobs. * * @param <N> the class parameter has no affect and is * only kept for documentation purposes. */ public class Dag<N extends Lop> { private static final Log LOG = LogFactory.getLog(Dag.class.getName()); private static final int CHILD_BREAKS_ALIGNMENT = 2; private static final int CHILD_DOES_NOT_BREAK_ALIGNMENT = 1; private static final int MRCHILD_NOT_FOUND = 0; private static final int MR_CHILD_FOUND_BREAKS_ALIGNMENT = 4; private static final int MR_CHILD_FOUND_DOES_NOT_BREAK_ALIGNMENT = 5; private static IDSequence job_id = null; private static IDSequence var_index = null; private int total_reducers = -1; private String scratch = ""; private String scratchFilePath = null; private double gmrMapperFootprint = 0; static { job_id = new IDSequence(); var_index = new IDSequence(); } // hash set for all nodes in dag private ArrayList<Lop> nodes = null; /* * Hashmap to translates the nodes in the DAG to a sequence of numbers * key: Lop ID * value: Sequence Number (0 ... |DAG|) * * This map is primarily used in performing DFS on the DAG, and subsequently in performing ancestor-descendant checks. */ private HashMap<Long, Integer> IDMap = null; private static class NodeOutput { String fileName; String varName; OutputInfo outInfo; ArrayList<Instruction> preInstructions; // instructions added before a MR instruction ArrayList<Instruction> postInstructions; // instructions added after a MR instruction ArrayList<Instruction> lastInstructions; NodeOutput() { fileName = null; varName = null; outInfo = null; preInstructions = new ArrayList<Instruction>(); postInstructions = new ArrayList<Instruction>(); lastInstructions = new ArrayList<Instruction>(); } public String getFileName() { return fileName; } public void setFileName(String fileName) { this.fileName = fileName; } public String getVarName() { return varName; } public void setVarName(String varName) { this.varName = varName; } public OutputInfo getOutInfo() { return outInfo; } public void setOutInfo(OutputInfo outInfo) { this.outInfo = outInfo; } public ArrayList<Instruction> getPreInstructions() { return preInstructions; } public void addPreInstruction(Instruction inst) { preInstructions.add(inst); } public ArrayList<Instruction> getPostInstructions() { return postInstructions; } public void addPostInstruction(Instruction inst) { postInstructions.add(inst); } public ArrayList<Instruction> getLastInstructions() { return lastInstructions; } public void addLastInstruction(Instruction inst) { lastInstructions.add(inst); } } public Dag() { //allocate internal data structures nodes = new ArrayList<Lop>(); IDMap = new HashMap<Long, Integer>(); // get number of reducers from dml config total_reducers = ConfigurationManager.getNumReducers(); } /////// // filename handling private String getFilePath() { if ( scratchFilePath == null ) { scratchFilePath = scratch + Lop.FILE_SEPARATOR + Lop.PROCESS_PREFIX + DMLScript.getUUID() + Lop.FILE_SEPARATOR + Lop.FILE_SEPARATOR + ProgramConverter.CP_ROOT_THREAD_ID + Lop.FILE_SEPARATOR; } return scratchFilePath; } public static String getNextUniqueFilenameSuffix() { return "temp" + job_id.getNextID(); } public String getNextUniqueFilename() { return getFilePath() + getNextUniqueFilenameSuffix(); } public static String getNextUniqueVarname(DataType dt) { return (dt==DataType.MATRIX ? Lop.MATRIX_VAR_NAME_PREFIX : Lop.FRAME_VAR_NAME_PREFIX) + var_index.getNextID(); } /////// // Dag modifications /** * Method to add a node to the DAG. * * @param node low-level operator * @return true if node was not already present, false if not. */ public boolean addNode(Lop node) { if (nodes.contains(node)) return false; nodes.add(node); return true; } /** * Method to compile a dag generically * * @param sb statement block * @param config dml configuration * @return list of instructions * @throws LopsException if LopsException occurs * @throws IOException if IOException occurs * @throws DMLRuntimeException if DMLRuntimeException occurs */ public ArrayList<Instruction> getJobs(StatementBlock sb, DMLConfig config) throws LopsException, IOException, DMLRuntimeException { if (config != null) { total_reducers = config.getIntValue(DMLConfig.NUM_REDUCERS); scratch = config.getTextValue(DMLConfig.SCRATCH_SPACE) + "/"; } // hold all nodes in a vector (needed for ordering) ArrayList<Lop> node_v = new ArrayList<Lop>(); node_v.addAll(nodes); /* * Sort the nodes by topological order. * * 1) All nodes with level i appear prior to the nodes in level i+1. * 2) All nodes within a level are ordered by their ID i.e., in the order * they are created */ doTopologicalSort_strict_order(node_v); // do greedy grouping of operations ArrayList<Instruction> inst = doGreedyGrouping(sb, node_v); return inst; } private static void deleteUpdatedTransientReadVariables(StatementBlock sb, ArrayList<Lop> nodeV, ArrayList<Instruction> inst) throws DMLRuntimeException { if ( sb == null ) return; if( LOG.isTraceEnabled() ) LOG.trace("In delete updated variables"); // CANDIDATE list of variables which could have been updated in this statement block HashMap<String, Lop> labelNodeMapping = new HashMap<String, Lop>(); // ACTUAL list of variables whose value is updated, AND the old value of the variable // is no longer accessible/used. HashSet<String> updatedLabels = new HashSet<String>(); HashMap<String, Lop> updatedLabelsLineNum = new HashMap<String, Lop>(); // first capture all transient read variables for ( Lop node : nodeV ) { if (node.getExecLocation() == ExecLocation.Data && ((Data) node).isTransient() && ((Data) node).getOperationType() == OperationTypes.READ && ((Data) node).getDataType() == DataType.MATRIX) { // "node" is considered as updated ONLY IF the old value is not used any more // So, make sure that this READ node does not feed into any (transient/persistent) WRITE boolean hasWriteParent=false; for(Lop p : node.getOutputs()) { if(p.getExecLocation() == ExecLocation.Data) { // if the "p" is of type Data, then it has to be a WRITE hasWriteParent = true; break; } } if ( !hasWriteParent ) { // node has no parent of type WRITE, so this is a CANDIDATE variable // add it to labelNodeMapping so that it is considered in further processing labelNodeMapping.put(node.getOutputParameters().getLabel(), node); } } } // capture updated transient write variables for ( Lop node : nodeV ) { if (node.getExecLocation() == ExecLocation.Data && ((Data) node).isTransient() && ((Data) node).getOperationType() == OperationTypes.WRITE && ((Data) node).getDataType() == DataType.MATRIX && labelNodeMapping.containsKey(node.getOutputParameters().getLabel()) // check to make sure corresponding (i.e., with the same label/name) transient read is present && !labelNodeMapping.containsValue(node.getInputs().get(0)) // check to avoid cases where transient read feeds into a transient write ) { updatedLabels.add(node.getOutputParameters().getLabel()); updatedLabelsLineNum.put(node.getOutputParameters().getLabel(), node); } } // generate RM instructions Instruction rm_inst = null; for ( String label : updatedLabels ) { rm_inst = VariableCPInstruction.prepareRemoveInstruction(label); rm_inst.setLocation(updatedLabelsLineNum.get(label)); if( LOG.isTraceEnabled() ) LOG.trace(rm_inst.toString()); inst.add(rm_inst); } } private static void generateRemoveInstructions(StatementBlock sb, ArrayList<Instruction> deleteInst) throws DMLRuntimeException { if ( sb == null ) return; if( LOG.isTraceEnabled() ) LOG.trace("In generateRemoveInstructions()"); Instruction inst = null; // RULE 1: if in IN and not in OUT, then there should be an rmvar or rmfilevar inst // (currently required for specific cases of external functions) for (String varName : sb.liveIn().getVariableNames()) { if (!sb.liveOut().containsVariable(varName)) { // DataType dt = in.getVariable(varName).getDataType(); // if( !(dt==DataType.MATRIX || dt==DataType.UNKNOWN) ) // continue; //skip rm instructions for non-matrix objects inst = VariableCPInstruction.prepareRemoveInstruction(varName); inst.setLocation(sb.getEndLine(), sb.getEndLine(), -1, -1); deleteInst.add(inst); if( LOG.isTraceEnabled() ) LOG.trace(" Adding " + inst.toString()); } } // RULE 2: if in KILL and not in IN and not in OUT, then there should be an rmvar or rmfilevar inst // (currently required for specific cases of nested loops) // i.e., local variables which are created within the block, and used entirely within the block /*for (String varName : sb.getKill().getVariableNames()) { if ((!sb.liveIn().containsVariable(varName)) && (!sb.liveOut().containsVariable(varName))) { // DataType dt = // sb.getKill().getVariable(varName).getDataType(); // if( !(dt==DataType.MATRIX || dt==DataType.UNKNOWN) ) // continue; //skip rm instructions for non-matrix objects inst = createCleanupInstruction(varName); deleteInst.add(inst); if (DMLScript.DEBUG) System.out.println("Adding instruction (r2) " + inst.toString()); } }*/ } private static ArrayList<ArrayList<Lop>> createNodeVectors(int size) { ArrayList<ArrayList<Lop>> arr = new ArrayList<ArrayList<Lop>>(); // for each job type, we need to create a vector. // additionally, create another vector for execNodes for (int i = 0; i < size; i++) { arr.add(new ArrayList<Lop>()); } return arr; } private static void clearNodeVectors(ArrayList<ArrayList<Lop>> arr) { for (ArrayList<Lop> tmp : arr) { tmp.clear(); } } private static boolean isCompatible(ArrayList<Lop> nodes, JobType jt, int from, int to) throws LopsException { int base = jt.getBase(); for ( Lop node : nodes ) { if ((node.getCompatibleJobs() & base) == 0) { if( LOG.isTraceEnabled() ) LOG.trace("Not compatible "+ node.toString()); return false; } } return true; } /** * Function that determines if the two input nodes can be executed together * in at least one job. * * @param node1 low-level operator 1 * @param node2 low-level operator 2 * @return true if nodes can be executed together */ private static boolean isCompatible(Lop node1, Lop node2) { return( (node1.getCompatibleJobs() & node2.getCompatibleJobs()) > 0); } /** * Function that checks if the given node executes in the job specified by jt. * * @param node low-level operator * @param jt job type * @return true if node executes in the specified job type */ private static boolean isCompatible(Lop node, JobType jt) { if ( jt == JobType.GMRCELL ) jt = JobType.GMR; return ((node.getCompatibleJobs() & jt.getBase()) > 0); } /* * Add node, and its relevant children to job-specific node vectors. */ private void addNodeByJobType(Lop node, ArrayList<ArrayList<Lop>> arr, ArrayList<Lop> execNodes, boolean eliminate) throws LopsException { if (!eliminate) { // Check if this lop defines a MR job. if ( node.definesMRJob() ) { // find the corresponding JobType JobType jt = JobType.findJobTypeFromLop(node); if ( jt == null ) { throw new LopsException(node.printErrorLocation() + "No matching JobType is found for a the lop type: " + node.getType() + " \n"); } // Add "node" to corresponding job vector if ( jt == JobType.GMR ) { if ( node.hasNonBlockedInputs() ) { int gmrcell_index = JobType.GMRCELL.getId(); arr.get(gmrcell_index).add(node); int from = arr.get(gmrcell_index).size(); addChildren(node, arr.get(gmrcell_index), execNodes); int to = arr.get(gmrcell_index).size(); if (!isCompatible(arr.get(gmrcell_index),JobType.GMR, from, to)) // check against GMR only, not against GMRCELL throw new LopsException(node.printErrorLocation() + "Error during compatibility check \n"); } else { // if "node" (in this case, a group lop) has any inputs from RAND // then add it to RAND job. Otherwise, create a GMR job if (hasChildNode(node, arr.get(JobType.DATAGEN.getId()) )) { arr.get(JobType.DATAGEN.getId()).add(node); // we should NOT call 'addChildren' because appropriate // child nodes would have got added to RAND job already } else { int gmr_index = JobType.GMR.getId(); arr.get(gmr_index).add(node); int from = arr.get(gmr_index).size(); addChildren(node, arr.get(gmr_index), execNodes); int to = arr.get(gmr_index).size(); if (!isCompatible(arr.get(gmr_index),JobType.GMR, from, to)) throw new LopsException(node.printErrorLocation() + "Error during compatibility check \n"); } } } else { int index = jt.getId(); arr.get(index).add(node); int from = arr.get(index).size(); addChildren(node, arr.get(index), execNodes); int to = arr.get(index).size(); // check if all added nodes are compatible with current job if (!isCompatible(arr.get(index), jt, from, to)) { throw new LopsException( "Unexpected error in addNodeByType."); } } return; } } if ( eliminate ) { // Eliminated lops are directly added to GMR queue. // Note that eliminate flag is set only for 'group' lops if ( node.hasNonBlockedInputs() ) arr.get(JobType.GMRCELL.getId()).add(node); else arr.get(JobType.GMR.getId()).add(node); return; } /* * If this lop does not define a job, check if it uses the output of any * specialized job. i.e., if this lop has a child node in any of the * job-specific vector, then add it to the vector. Note: This lop must * be added to ONLY ONE of the job-specific vectors. */ int numAdded = 0; for ( JobType j : JobType.values() ) { if ( j.getId() > 0 && hasDirectChildNode(node, arr.get(j.getId()))) { if (isCompatible(node, j)) { arr.get(j.getId()).add(node); numAdded += 1; } } } if (numAdded > 1) { throw new LopsException("Unexpected error in addNodeByJobType(): A given lop can ONLY be added to a single job vector (numAdded = " + numAdded + ")." ); } } /* * Remove the node from all job-specific node vectors. This method is * invoked from removeNodesForNextIteration(). */ private static void removeNodeByJobType(Lop node, ArrayList<ArrayList<Lop>> arr) { for ( JobType jt : JobType.values()) if ( jt.getId() > 0 ) arr.get(jt.getId()).remove(node); } /** * As some jobs only write one output, all operations in the mapper need to * be redone and cannot be marked as finished. * * @param execNodes list of exec low-level operators * @param jobNodes list of job low-level operators * @param finishedNodes list of finished low-level operators * @throws LopsException if LopsException occurs */ private void handleSingleOutputJobs(ArrayList<Lop> execNodes, ArrayList<ArrayList<Lop>> jobNodes, ArrayList<Lop> finishedNodes) throws LopsException { /* * If the input of a MMCJ/MMRJ job (must have executed in a Mapper) is used * by multiple lops then we should mark it as not-finished. */ ArrayList<Lop> nodesWithUnfinishedOutputs = new ArrayList<Lop>(); int[] jobIndices = {JobType.MMCJ.getId()}; Lop.Type[] lopTypes = { Lop.Type.MMCJ}; // TODO: SortByValue should be treated similar to MMCJ, since it can // only sort one file now for ( int jobi=0; jobi < jobIndices.length; jobi++ ) { int jindex = jobIndices[jobi]; if (!jobNodes.get(jindex).isEmpty()) { ArrayList<Lop> vec = jobNodes.get(jindex); // first find all nodes with more than one parent that is not finished. for (int i = 0; i < vec.size(); i++) { Lop node = vec.get(i); if (node.getExecLocation() == ExecLocation.MapOrReduce || node.getExecLocation() == ExecLocation.Map) { Lop MRparent = getParentNode(node, execNodes, ExecLocation.MapAndReduce); if ( MRparent != null && MRparent.getType() == lopTypes[jobi]) { int numParents = node.getOutputs().size(); if (numParents > 1) { for (int j = 0; j < numParents; j++) { if (!finishedNodes.contains(node.getOutputs() .get(j))) nodesWithUnfinishedOutputs.add(node); } } } } } // need to redo all nodes in nodesWithOutput as well as their children for ( Lop node : vec ) { if (node.getExecLocation() == ExecLocation.MapOrReduce || node.getExecLocation() == ExecLocation.Map) { if (nodesWithUnfinishedOutputs.contains(node)) finishedNodes.remove(node); if (hasParentNode(node, nodesWithUnfinishedOutputs)) finishedNodes.remove(node); } } } } } /** * Method to check if a lop can be eliminated from checking * * @param node low-level operator * @param execNodes list of exec nodes * @return true if lop can be eliminated */ private static boolean canEliminateLop(Lop node, ArrayList<Lop> execNodes) { // this function can only eliminate "aligner" lops such a group if (!node.isAligner()) return false; // find the child whose execLoc = 'MapAndReduce' int ret = getChildAlignment(node, execNodes, ExecLocation.MapAndReduce); if (ret == CHILD_BREAKS_ALIGNMENT) return false; else if (ret == CHILD_DOES_NOT_BREAK_ALIGNMENT) return true; else if (ret == MRCHILD_NOT_FOUND) return false; else if (ret == MR_CHILD_FOUND_BREAKS_ALIGNMENT) return false; else if (ret == MR_CHILD_FOUND_DOES_NOT_BREAK_ALIGNMENT) return true; else throw new RuntimeException("Should not happen. \n"); } /** * Method to generate createvar instructions, which creates a new entry * in the symbol table. One instruction is generated for every LOP that is * 1) type Data and * 2) persistent and * 3) matrix and * 4) read * * Transient reads needn't be considered here since the previous program * block would already create appropriate entries in the symbol table. * * @param nodes_v list of nodes * @param inst list of instructions * @throws LopsException if LopsException occurs * @throws IOException if IOException occurs */ private static void generateInstructionsForInputVariables(ArrayList<Lop> nodes_v, ArrayList<Instruction> inst) throws LopsException, IOException { for(Lop n : nodes_v) { if (n.getExecLocation() == ExecLocation.Data && !((Data) n).isTransient() && ((Data) n).getOperationType() == OperationTypes.READ && (n.getDataType() == DataType.MATRIX || n.getDataType() == DataType.FRAME) ) { if ( !((Data)n).isLiteral() ) { try { String inst_string = n.getInstructions(); CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(inst_string); currInstr.setLocation(n); inst.add(currInstr); } catch (DMLRuntimeException e) { throw new LopsException(n.printErrorLocation() + "error generating instructions from input variables in Dag -- \n", e); } } } } } /** * Determine whether to send <code>node</code> to MR or to process it in the control program. * It is sent to MR in the following cases: * * 1) if input lop gets processed in MR then <code>node</code> can be piggybacked * * 2) if the exectype of write lop itself is marked MR i.e., memory estimate > memory budget. * * @param node low-level operator * @return true if lop should be sent to MR */ private static boolean sendWriteLopToMR(Lop node) { if ( DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE ) return false; Lop in = node.getInputs().get(0); Format nodeFormat = node.getOutputParameters().getFormat(); // Case of a transient read feeding into only one output persistent binaryblock write // Move the temporary file on HDFS to required persistent location, insteadof copying. if ( in.getExecLocation() == ExecLocation.Data && in.getOutputs().size() == 1 && !((Data)node).isTransient() && ((Data)in).isTransient() && ((Data)in).getOutputParameters().isBlocked() && node.getOutputParameters().isBlocked() ) { return false; } //send write lop to MR if (1) it is marked with exec type MR (based on its memory estimate), or //(2) if the input lop is in MR and the write format allows to pack it into the same job (this does //not apply to csv write because MR csvwrite is a separate MR job type) return (node.getExecType() == ExecType.MR || (in.getExecType() == ExecType.MR && nodeFormat != Format.CSV)); } /** * Computes the memory footprint required to execute <code>node</code> in the mapper. * It is used only for those nodes that use inputs from distributed cache. The returned * value is utilized in limiting the number of instructions piggybacked onto a single GMR mapper. * * @param node low-level operator * @return memory footprint */ private static double computeFootprintInMapper(Lop node) { // Memory limits must be checked only for nodes that use distributed cache if ( ! node.usesDistributedCache() ) // default behavior return 0.0; OutputParameters in1dims = node.getInputs().get(0).getOutputParameters(); OutputParameters in2dims = node.getInputs().get(1).getOutputParameters(); double footprint = 0; if ( node instanceof MapMult ) { int dcInputIndex = node.distributedCacheInputIndex()[0]; footprint = AggBinaryOp.getMapmmMemEstimate( in1dims.getNumRows(), in1dims.getNumCols(), in1dims.getRowsInBlock(), in1dims.getColsInBlock(), in1dims.getNnz(), in2dims.getNumRows(), in2dims.getNumCols(), in2dims.getRowsInBlock(), in2dims.getColsInBlock(), in2dims.getNnz(), dcInputIndex, false); } else if ( node instanceof PMMJ ) { int dcInputIndex = node.distributedCacheInputIndex()[0]; footprint = AggBinaryOp.getMapmmMemEstimate( in1dims.getNumRows(), 1, in1dims.getRowsInBlock(), in1dims.getColsInBlock(), in1dims.getNnz(), in2dims.getNumRows(), in2dims.getNumCols(), in2dims.getRowsInBlock(), in2dims.getColsInBlock(), in2dims.getNnz(), dcInputIndex, true); } else if ( node instanceof AppendM ) { footprint = BinaryOp.footprintInMapper( in1dims.getNumRows(), in1dims.getNumCols(), in2dims.getNumRows(), in2dims.getNumCols(), in1dims.getRowsInBlock(), in1dims.getColsInBlock()); } else if ( node instanceof BinaryM ) { footprint = BinaryOp.footprintInMapper( in1dims.getNumRows(), in1dims.getNumCols(), in2dims.getNumRows(), in2dims.getNumCols(), in1dims.getRowsInBlock(), in1dims.getColsInBlock()); } else { // default behavior return 0.0; } return footprint; } /** * Determines if <code>node</code> can be executed in current round of MR jobs or if it needs to be queued for later rounds. * If the total estimated footprint (<code>node</code> and previously added nodes in GMR) is less than available memory on * the mappers then <code>node</code> can be executed in current round, and <code>true</code> is returned. Otherwise, * <code>node</code> must be queued and <code>false</code> is returned. * * @param node low-level operator * @param footprintInMapper mapper footprint * @return true if node can be executed in current round of jobs */ private static boolean checkMemoryLimits(Lop node, double footprintInMapper) { boolean addNode = true; // Memory limits must be checked only for nodes that use distributed cache if ( ! node.usesDistributedCache() ) // default behavior return addNode; double memBudget = Math.min(AggBinaryOp.MAPMULT_MEM_MULTIPLIER, BinaryOp.APPEND_MEM_MULTIPLIER) * OptimizerUtils.getRemoteMemBudgetMap(true); if ( footprintInMapper <= memBudget ) return addNode; else return !addNode; } /** * Method to group a vector of sorted lops. * * @param sb statement block * @param node_v list of low-level operators * @return list of instructions * @throws LopsException if LopsException occurs * @throws IOException if IOException occurs * @throws DMLRuntimeException if DMLRuntimeException occurs */ private ArrayList<Instruction> doGreedyGrouping(StatementBlock sb, ArrayList<Lop> node_v) throws LopsException, IOException, DMLRuntimeException { if( LOG.isTraceEnabled() ) LOG.trace("Grouping DAG ============"); // nodes to be executed in current iteration ArrayList<Lop> execNodes = new ArrayList<Lop>(); // nodes that have already been processed ArrayList<Lop> finishedNodes = new ArrayList<Lop>(); // nodes that are queued for the following iteration ArrayList<Lop> queuedNodes = new ArrayList<Lop>(); ArrayList<ArrayList<Lop>> jobNodes = createNodeVectors(JobType.getNumJobTypes()); // list of instructions ArrayList<Instruction> inst = new ArrayList<Instruction>(); //ArrayList<Instruction> preWriteDeleteInst = new ArrayList<Instruction>(); ArrayList<Instruction> writeInst = new ArrayList<Instruction>(); ArrayList<Instruction> deleteInst = new ArrayList<Instruction>(); ArrayList<Instruction> endOfBlockInst = new ArrayList<Instruction>(); // remove files for transient reads that are updated. deleteUpdatedTransientReadVariables(sb, node_v, writeInst); generateRemoveInstructions(sb, endOfBlockInst); generateInstructionsForInputVariables(node_v, inst); boolean done = false; String indent = " "; while (!done) { if( LOG.isTraceEnabled() ) LOG.trace("Grouping nodes in DAG"); execNodes.clear(); queuedNodes.clear(); clearNodeVectors(jobNodes); gmrMapperFootprint=0; for ( Lop node : node_v ) { // finished nodes don't need to be processed if (finishedNodes.contains(node)) continue; if( LOG.isTraceEnabled() ) LOG.trace("Processing node (" + node.getID() + ") " + node.toString() + " exec nodes size is " + execNodes.size()); //if node defines MR job, make sure it is compatible with all //its children nodes in execNodes if(node.definesMRJob() && !compatibleWithChildrenInExecNodes(execNodes, node)) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing node " + node.toString() + " (code 1)"); queuedNodes.add(node); removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes); continue; } // if child is queued, this node will be processed in the later // iteration if (hasChildNode(node,queuedNodes)) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing node " + node.toString() + " (code 2)"); queuedNodes.add(node); // if node has more than two inputs, // remove children that will be needed in a future // iterations // may also have to remove parent nodes of these children removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes); continue; } // if inputs come from different jobs, then queue if ( node.getInputs().size() >= 2) { int jobid = Integer.MIN_VALUE; boolean queueit = false; for(int idx=0; idx < node.getInputs().size(); idx++) { int input_jobid = jobType(node.getInputs().get(idx), jobNodes); if (input_jobid != -1) { if ( jobid == Integer.MIN_VALUE ) jobid = input_jobid; else if ( jobid != input_jobid ) { queueit = true; break; } } } if ( queueit ) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing node " + node.toString() + " (code 3)"); queuedNodes.add(node); removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes); continue; } } // See if this lop can be eliminated // This check is for "aligner" lops (e.g., group) boolean eliminate = false; eliminate = canEliminateLop(node, execNodes); if (eliminate) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Adding -"+ node.toString()); execNodes.add(node); finishedNodes.add(node); addNodeByJobType(node, jobNodes, execNodes, eliminate); continue; } // If the node defines a MR Job then make sure none of its // children that defines a MR Job are present in execNodes if (node.definesMRJob()) { if (hasMRJobChildNode(node, execNodes)) { // "node" must NOT be queued when node=group and the child that defines job is Rand // this is because "group" can be pushed into the "Rand" job. if (! (node.getType() == Lop.Type.Grouping && checkDataGenAsChildNode(node,execNodes)) ) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing node " + node.toString() + " (code 4)"); queuedNodes.add(node); removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes); continue; } } } // if "node" has more than one input, and has a descendant lop // in execNodes that is of type RecordReader // then all its inputs must be ancestors of RecordReader. If // not, queue "node" if (node.getInputs().size() > 1 && hasChildNode(node, execNodes, ExecLocation.RecordReader)) { // get the actual RecordReader lop Lop rr_node = getChildNode(node, execNodes, ExecLocation.RecordReader); // all inputs of "node" must be ancestors of rr_node boolean queue_it = false; for (Lop n : node.getInputs()) { // each input should be ancestor of RecordReader lop if (!n.equals(rr_node) && !isChild(rr_node, n, IDMap)) { queue_it = true; // i.e., "node" must be queued break; } } if (queue_it) { // queue node if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing -" + node.toString() + " (code 5)"); queuedNodes.add(node); // TODO: does this have to be modified to handle // recordreader lops? removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes); continue; } else { // nothing here.. subsequent checks have to be performed // on "node" ; } } // data node, always add if child not queued // only write nodes are kept in execnodes if (node.getExecLocation() == ExecLocation.Data) { Data dnode = (Data) node; boolean dnode_queued = false; if ( dnode.getOperationType() == OperationTypes.READ ) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Adding Data -"+ node.toString()); // TODO: avoid readScalar instruction, and read it on-demand just like the way Matrices are read in control program if ( node.getDataType() == DataType.SCALAR //TODO: LEO check the following condition is still needed && node.getOutputParameters().getFile_name() != null ) { // this lop corresponds to reading a scalar from HDFS file // add it to execNodes so that "readScalar" instruction gets generated execNodes.add(node); // note: no need to add it to any job vector } } else if (dnode.getOperationType() == OperationTypes.WRITE) { // Skip the transient write <code>node</code> if the input is a // transient read with the same variable name. i.e., a dummy copy. // Hence, <code>node</code> can be avoided. // TODO: this case should ideally be handled in the language layer // prior to the construction of Hops Dag Lop input = dnode.getInputs().get(0); if ( dnode.isTransient() && input.getExecLocation() == ExecLocation.Data && ((Data)input).isTransient() && dnode.getOutputParameters().getLabel().equals(input.getOutputParameters().getLabel()) ) { // do nothing, <code>node</code> must not processed any further. ; } else if ( execNodes.contains(input) && !isCompatible(node, input) && sendWriteLopToMR(node)) { // input is in execNodes but it is not compatible with write lop. So, queue the write lop. if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing -" + node.toString()); queuedNodes.add(node); dnode_queued = true; } else { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Adding Data -"+ node.toString()); execNodes.add(node); if ( sendWriteLopToMR(node) ) { addNodeByJobType(node, jobNodes, execNodes, false); } } } if (!dnode_queued) finishedNodes.add(node); continue; } // map or reduce node, can always be piggybacked with parent if (node.getExecLocation() == ExecLocation.MapOrReduce) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Adding -"+ node.toString()); execNodes.add(node); finishedNodes.add(node); addNodeByJobType(node, jobNodes, execNodes, false); continue; } // RecordReader node, add, if no parent needs reduce, else queue if (node.getExecLocation() == ExecLocation.RecordReader) { // "node" should not have any children in // execNodes .. it has to be the first one in the job! if (!hasChildNode(node, execNodes, ExecLocation.Map) && !hasChildNode(node, execNodes, ExecLocation.MapAndReduce)) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Adding -"+ node.toString()); execNodes.add(node); finishedNodes.add(node); addNodeByJobType(node, jobNodes, execNodes, false); } else { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing -"+ node.toString() + " (code 6)"); queuedNodes.add(node); removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes); } continue; } // map node, add, if no parent needs reduce, else queue if (node.getExecLocation() == ExecLocation.Map) { boolean queueThisNode = false; int subcode = -1; if ( node.usesDistributedCache() ) { // if an input to <code>node</code> comes from distributed cache // then that input must get executed in one of the previous jobs. int[] dcInputIndexes = node.distributedCacheInputIndex(); for( int dcInputIndex : dcInputIndexes ){ Lop dcInput = node.getInputs().get(dcInputIndex-1); if ( (dcInput.getType() != Lop.Type.Data && dcInput.getExecType()==ExecType.MR) && execNodes.contains(dcInput) ) { queueThisNode = true; subcode = 1; } } // Limit the number of distributed cache inputs based on the available memory in mappers double memsize = computeFootprintInMapper(node); //gmrMapperFootprint += computeFootprintInMapper(node); if ( gmrMapperFootprint>0 && !checkMemoryLimits(node, gmrMapperFootprint+memsize ) ) { queueThisNode = true; subcode = 2; } if(!queueThisNode) gmrMapperFootprint += memsize; } if (!queueThisNode && !hasChildNode(node, execNodes,ExecLocation.MapAndReduce)&& !hasMRJobChildNode(node, execNodes)) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Adding -"+ node.toString()); execNodes.add(node); finishedNodes.add(node); addNodeByJobType(node, jobNodes, execNodes, false); } else { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing -"+ node.toString() + " (code 7 - " + "subcode " + subcode + ")"); queuedNodes.add(node); removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes); } continue; } // reduce node, make sure no parent needs reduce, else queue if (node.getExecLocation() == ExecLocation.MapAndReduce) { // TODO: statiko -- keep the middle condition // discuss about having a lop that is MapAndReduce but does // not define a job if( LOG.isTraceEnabled() ) LOG.trace(indent + "Adding -"+ node.toString()); execNodes.add(node); finishedNodes.add(node); addNodeByJobType(node, jobNodes, execNodes, eliminate); continue; } // aligned reduce, make sure a parent that is reduce exists if (node.getExecLocation() == ExecLocation.Reduce) { if ( compatibleWithChildrenInExecNodes(execNodes, node) && (hasChildNode(node, execNodes, ExecLocation.MapAndReduce) || hasChildNode(node, execNodes, ExecLocation.Map) ) ) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Adding -"+ node.toString()); execNodes.add(node); finishedNodes.add(node); addNodeByJobType(node, jobNodes, execNodes, false); } else { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing -"+ node.toString() + " (code 8)"); queuedNodes.add(node); removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes); } continue; } // add Scalar to execNodes if it has no child in exec nodes // that will be executed in a MR job. if (node.getExecLocation() == ExecLocation.ControlProgram) { for ( Lop lop : node.getInputs() ) { if (execNodes.contains(lop) && !(lop.getExecLocation() == ExecLocation.Data) && !(lop.getExecLocation() == ExecLocation.ControlProgram)) { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing -"+ node.toString() + " (code 9)"); queuedNodes.add(node); removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes); break; } } if (queuedNodes.contains(node)) continue; if( LOG.isTraceEnabled() ) LOG.trace(indent + "Adding - scalar"+ node.toString()); execNodes.add(node); addNodeByJobType(node, jobNodes, execNodes, false); finishedNodes.add(node); continue; } } // no work to do if ( execNodes.isEmpty() ) { if( !queuedNodes.isEmpty() ) { //System.err.println("Queued nodes should be 0"); throw new LopsException("Queued nodes should not be 0 at this point \n"); } if( LOG.isTraceEnabled() ) LOG.trace("All done! queuedNodes = "+ queuedNodes.size()); done = true; } else { // work to do if( LOG.isTraceEnabled() ) LOG.trace("Generating jobs for group -- Node count="+ execNodes.size()); // first process scalar instructions generateControlProgramJobs(execNodes, inst, writeInst, deleteInst); // copy unassigned lops in execnodes to gmrnodes for (int i = 0; i < execNodes.size(); i++) { Lop node = execNodes.get(i); if (jobType(node, jobNodes) == -1) { if ( isCompatible(node, JobType.GMR) ) { if ( node.hasNonBlockedInputs() ) { jobNodes.get(JobType.GMRCELL.getId()).add(node); addChildren(node, jobNodes.get(JobType.GMRCELL.getId()), execNodes); } else { jobNodes.get(JobType.GMR.getId()).add(node); addChildren(node, jobNodes.get(JobType.GMR.getId()), execNodes); } } else { if( LOG.isTraceEnabled() ) LOG.trace(indent + "Queueing -" + node.toString() + " (code 10)"); execNodes.remove(i); finishedNodes.remove(node); queuedNodes.add(node); removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes); } } } // next generate MR instructions if (!execNodes.isEmpty()) generateMRJobs(execNodes, inst, writeInst, deleteInst, jobNodes); handleSingleOutputJobs(execNodes, jobNodes, finishedNodes); } } // add write and delete inst at the very end. //inst.addAll(preWriteDeleteInst); inst.addAll(writeInst); inst.addAll(deleteInst); inst.addAll(endOfBlockInst); return inst; } private boolean compatibleWithChildrenInExecNodes(ArrayList<Lop> execNodes, Lop node) { for( Lop tmpNode : execNodes ) { // for lops that execute in control program, compatibleJobs property is set to LopProperties.INVALID // we should not consider such lops in this check if (isChild(tmpNode, node, IDMap) && tmpNode.getExecLocation() != ExecLocation.ControlProgram //&& tmpNode.getCompatibleJobs() != LopProperties.INVALID && (tmpNode.getCompatibleJobs() & node.getCompatibleJobs()) == 0) return false; } return true; } /** * Exclude rmvar instruction for varname from deleteInst, if exists * * @param varName variable name * @param deleteInst list of instructions */ private static void excludeRemoveInstruction(String varName, ArrayList<Instruction> deleteInst) { //for(Instruction inst : deleteInst) { for(int i=0; i < deleteInst.size(); i++) { Instruction inst = deleteInst.get(i); if ((inst.getType() == INSTRUCTION_TYPE.CONTROL_PROGRAM || inst.getType() == INSTRUCTION_TYPE.SPARK) && ((CPInstruction)inst).getCPInstructionType() == CPINSTRUCTION_TYPE.Variable && ((VariableCPInstruction)inst).isRemoveVariable(varName) ) { deleteInst.remove(i); } } } /** * Generate rmvar instructions for the inputs, if their consumer count becomes zero. * * @param node low-level operator * @param inst list of instructions * @param delteInst list of instructions * @throws DMLRuntimeException if DMLRuntimeException occurs */ private void processConsumersForInputs(Lop node, ArrayList<Instruction> inst, ArrayList<Instruction> delteInst) throws DMLRuntimeException { // reduce the consumer count for all input lops // if the count becomes zero, then then variable associated w/ input can be removed for(Lop in : node.getInputs() ) { if(DMLScript.ENABLE_DEBUG_MODE) { processConsumers(in, inst, delteInst, node); } else { processConsumers(in, inst, delteInst, null); } } } private static void processConsumers(Lop node, ArrayList<Instruction> inst, ArrayList<Instruction> deleteInst, Lop locationInfo) throws DMLRuntimeException { // reduce the consumer count for all input lops // if the count becomes zero, then then variable associated w/ input can be removed if ( node.removeConsumer() == 0 ) { if ( node.getExecLocation() == ExecLocation.Data && ((Data)node).isLiteral() ) { return; } String label = node.getOutputParameters().getLabel(); Instruction currInstr = VariableCPInstruction.prepareRemoveInstruction(label); if (locationInfo != null) currInstr.setLocation(locationInfo); else currInstr.setLocation(node); inst.add(currInstr); excludeRemoveInstruction(label, deleteInst); } } /** * Method to generate instructions that are executed in Control Program. At * this point, this DAG has no dependencies on the MR dag. ie. none of the * inputs are outputs of MR jobs * * @param execNodes list of low-level operators * @param inst list of instructions * @param writeInst list of write instructions * @param deleteInst list of delete instructions * @throws LopsException if LopsException occurs * @throws DMLRuntimeException if DMLRuntimeException occurs */ private void generateControlProgramJobs(ArrayList<Lop> execNodes, ArrayList<Instruction> inst, ArrayList<Instruction> writeInst, ArrayList<Instruction> deleteInst) throws LopsException, DMLRuntimeException { // nodes to be deleted from execnodes ArrayList<Lop> markedNodes = new ArrayList<Lop>(); // variable names to be deleted ArrayList<String> var_deletions = new ArrayList<String>(); HashMap<String, Lop> var_deletionsLineNum = new HashMap<String, Lop>(); boolean doRmVar = false; for (int i = 0; i < execNodes.size(); i++) { Lop node = execNodes.get(i); doRmVar = false; // mark input scalar read nodes for deletion // TODO: statiko -- check if this condition ever evaluated to TRUE if (node.getExecLocation() == ExecLocation.Data && ((Data) node).getOperationType() == Data.OperationTypes.READ && ((Data) node).getDataType() == DataType.SCALAR && node.getOutputParameters().getFile_name() == null ) { markedNodes.add(node); continue; } // output scalar instructions and mark nodes for deletion if (node.getExecLocation() == ExecLocation.ControlProgram) { if (node.getDataType() == DataType.SCALAR) { // Output from lops with SCALAR data type must // go into Temporary Variables (Var0, Var1, etc.) NodeOutput out = setupNodeOutputs(node, ExecType.CP, false, false); inst.addAll(out.getPreInstructions()); // dummy deleteInst.addAll(out.getLastInstructions()); } else { // Output from lops with non-SCALAR data type must // go into Temporary Files (temp0, temp1, etc.) NodeOutput out = setupNodeOutputs(node, ExecType.CP, false, false); inst.addAll(out.getPreInstructions()); boolean hasTransientWriteParent = false; for ( Lop parent : node.getOutputs() ) { if ( parent.getExecLocation() == ExecLocation.Data && ((Data)parent).getOperationType() == Data.OperationTypes.WRITE && ((Data)parent).isTransient() ) { hasTransientWriteParent = true; break; } } if ( !hasTransientWriteParent ) { deleteInst.addAll(out.getLastInstructions()); } else { var_deletions.add(node.getOutputParameters().getLabel()); var_deletionsLineNum.put(node.getOutputParameters().getLabel(), node); } } String inst_string = ""; // Lops with arbitrary number of inputs (ParameterizedBuiltin, GroupedAggregate, DataGen) // are handled separately, by simply passing ONLY the output variable to getInstructions() if (node.getType() == Lop.Type.ParameterizedBuiltin || node.getType() == Lop.Type.GroupedAgg || node.getType() == Lop.Type.DataGen ){ inst_string = node.getInstructions(node.getOutputParameters().getLabel()); } // Lops with arbitrary number of inputs and outputs are handled // separately as well by passing arrays of inputs and outputs else if ( node.getType() == Lop.Type.FunctionCallCP ) { String[] inputs = new String[node.getInputs().size()]; String[] outputs = new String[node.getOutputs().size()]; int count = 0; for( Lop in : node.getInputs() ) inputs[count++] = in.getOutputParameters().getLabel(); count = 0; for( Lop out : node.getOutputs() ) { outputs[count++] = out.getOutputParameters().getLabel(); } inst_string = node.getInstructions(inputs, outputs); } else if (node.getType() == Lop.Type.MULTIPLE_CP) { // ie, MultipleCP class inst_string = node.getInstructions(node.getOutputParameters().getLabel()); } else { if ( node.getInputs().isEmpty() ) { // currently, such a case exists only for Rand lop inst_string = node.getInstructions(node.getOutputParameters().getLabel()); } else if (node.getInputs().size() == 1) { inst_string = node.getInstructions(node.getInputs() .get(0).getOutputParameters().getLabel(), node.getOutputParameters().getLabel()); } else if (node.getInputs().size() == 2) { inst_string = node.getInstructions( node.getInputs().get(0).getOutputParameters().getLabel(), node.getInputs().get(1).getOutputParameters().getLabel(), node.getOutputParameters().getLabel()); } else if (node.getInputs().size() == 3 || node.getType() == Type.Ternary) { inst_string = node.getInstructions( node.getInputs().get(0).getOutputParameters().getLabel(), node.getInputs().get(1).getOutputParameters().getLabel(), node.getInputs().get(2).getOutputParameters().getLabel(), node.getOutputParameters().getLabel()); } else if (node.getInputs().size() == 4) { inst_string = node.getInstructions( node.getInputs().get(0).getOutputParameters().getLabel(), node.getInputs().get(1).getOutputParameters().getLabel(), node.getInputs().get(2).getOutputParameters().getLabel(), node.getInputs().get(3).getOutputParameters().getLabel(), node.getOutputParameters().getLabel()); } else if (node.getInputs().size() == 5) { inst_string = node.getInstructions( node.getInputs().get(0).getOutputParameters().getLabel(), node.getInputs().get(1).getOutputParameters().getLabel(), node.getInputs().get(2).getOutputParameters().getLabel(), node.getInputs().get(3).getOutputParameters().getLabel(), node.getInputs().get(4).getOutputParameters().getLabel(), node.getOutputParameters().getLabel()); } else if (node.getInputs().size() == 6) { inst_string = node.getInstructions( node.getInputs().get(0).getOutputParameters().getLabel(), node.getInputs().get(1).getOutputParameters().getLabel(), node.getInputs().get(2).getOutputParameters().getLabel(), node.getInputs().get(3).getOutputParameters().getLabel(), node.getInputs().get(4).getOutputParameters().getLabel(), node.getInputs().get(5).getOutputParameters().getLabel(), node.getOutputParameters().getLabel()); } else if (node.getInputs().size() == 7) { inst_string = node.getInstructions( node.getInputs().get(0).getOutputParameters().getLabel(), node.getInputs().get(1).getOutputParameters().getLabel(), node.getInputs().get(2).getOutputParameters().getLabel(), node.getInputs().get(3).getOutputParameters().getLabel(), node.getInputs().get(4).getOutputParameters().getLabel(), node.getInputs().get(5).getOutputParameters().getLabel(), node.getInputs().get(6).getOutputParameters().getLabel(), node.getOutputParameters().getLabel()); } else { String[] inputs = new String[node.getInputs().size()]; for( int j=0; j<node.getInputs().size(); j++ ) inputs[j] = node.getInputs().get(j).getOutputParameters().getLabel(); inst_string = node.getInstructions(inputs, node.getOutputParameters().getLabel()); } } try { if( LOG.isTraceEnabled() ) LOG.trace("Generating instruction - "+ inst_string); Instruction currInstr = InstructionParser.parseSingleInstruction(inst_string); if(currInstr == null) { throw new LopsException("Error parsing the instruction:" + inst_string); } if (node._beginLine != 0) currInstr.setLocation(node); else if ( !node.getOutputs().isEmpty() ) currInstr.setLocation(node.getOutputs().get(0)); else if ( !node.getInputs().isEmpty() ) currInstr.setLocation(node.getInputs().get(0)); inst.add(currInstr); } catch (Exception e) { throw new LopsException(node.printErrorLocation() + "Problem generating simple inst - " + inst_string, e); } markedNodes.add(node); doRmVar = true; //continue; } else if (node.getExecLocation() == ExecLocation.Data ) { Data dnode = (Data)node; Data.OperationTypes op = dnode.getOperationType(); if ( op == Data.OperationTypes.WRITE ) { NodeOutput out = null; if ( sendWriteLopToMR(node) ) { // In this case, Data WRITE lop goes into MR, and // we don't have to do anything here doRmVar = false; } else { out = setupNodeOutputs(node, ExecType.CP, false, false); if ( dnode.getDataType() == DataType.SCALAR ) { // processing is same for both transient and persistent scalar writes writeInst.addAll(out.getLastInstructions()); //inst.addAll(out.getLastInstructions()); doRmVar = false; } else { // setupNodeOutputs() handles both transient and persistent matrix writes if ( dnode.isTransient() ) { //inst.addAll(out.getPreInstructions()); // dummy ? deleteInst.addAll(out.getLastInstructions()); doRmVar = false; } else { // In case of persistent write lop, write instruction will be generated // and that instruction must be added to <code>inst</code> so that it gets // executed immediately. If it is added to <code>deleteInst</code> then it // gets executed at the end of program block's execution inst.addAll(out.getLastInstructions()); doRmVar = true; } } markedNodes.add(node); //continue; } } else { // generate a temp label to hold the value that is read from HDFS if ( node.getDataType() == DataType.SCALAR ) { node.getOutputParameters().setLabel(Lop.SCALAR_VAR_NAME_PREFIX + var_index.getNextID()); String io_inst = node.getInstructions(node.getOutputParameters().getLabel(), node.getOutputParameters().getFile_name()); CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(io_inst); currInstr.setLocation(node); inst.add(currInstr); Instruction tempInstr = VariableCPInstruction.prepareRemoveInstruction(node.getOutputParameters().getLabel()); tempInstr.setLocation(node); deleteInst.add(tempInstr); } else { throw new LopsException("Matrix READs are not handled in CP yet!"); } markedNodes.add(node); doRmVar = true; //continue; } } // see if rmvar instructions can be generated for node's inputs if(doRmVar) processConsumersForInputs(node, inst, deleteInst); doRmVar = false; } for ( String var : var_deletions ) { Instruction rmInst = VariableCPInstruction.prepareRemoveInstruction(var); if( LOG.isTraceEnabled() ) LOG.trace(" Adding var_deletions: " + rmInst.toString()); rmInst.setLocation(var_deletionsLineNum.get(var)); deleteInst.add(rmInst); } // delete all marked nodes for ( Lop node : markedNodes ) { execNodes.remove(node); } } /** * Method to remove all child nodes of a queued node that should be executed * in a following iteration. * * @param node low-level operator * @param finishedNodes list of finished nodes * @param execNodes list of exec nodes * @param queuedNodes list of queued nodes * @param jobvec list of lists of low-level operators * @throws LopsException if LopsException occurs */ private void removeNodesForNextIteration(Lop node, ArrayList<Lop> finishedNodes, ArrayList<Lop> execNodes, ArrayList<Lop> queuedNodes, ArrayList<ArrayList<Lop>> jobvec) throws LopsException { // only queued nodes with multiple inputs need to be handled. if (node.getInputs().size() == 1) return; //if all children are queued, then there is nothing to do. boolean allQueued = true; for( Lop input : node.getInputs() ) { if( !queuedNodes.contains(input) ) { allQueued = false; break; } } if ( allQueued ) return; if( LOG.isTraceEnabled() ) LOG.trace(" Before remove nodes for next iteration -- size of execNodes " + execNodes.size()); // Determine if <code>node</code> has inputs from the same job or multiple jobs int jobid = Integer.MIN_VALUE; boolean inputs_in_same_job = true; for( Lop input : node.getInputs() ) { int input_jobid = jobType(input, jobvec); if ( jobid == Integer.MIN_VALUE ) jobid = input_jobid; else if ( jobid != input_jobid ) { inputs_in_same_job = false; break; } } // Determine if there exist any unassigned inputs to <code>node</code> // Evaluate only those lops that execute in MR. boolean unassigned_inputs = false; for( Lop input : node.getInputs() ) { //if ( input.getExecLocation() != ExecLocation.ControlProgram && jobType(input, jobvec) == -1 ) { if ( input.getExecType() == ExecType.MR && !execNodes.contains(input)) { //jobType(input, jobvec) == -1 ) { unassigned_inputs = true; break; } } // Determine if any node's children are queued boolean child_queued = false; for( Lop input : node.getInputs() ) { if (queuedNodes.contains(input) ) { child_queued = true; break; } } if (LOG.isTraceEnabled()) { LOG.trace(" Property Flags:"); LOG.trace(" Inputs in same job: " + inputs_in_same_job); LOG.trace(" Unassigned inputs: " + unassigned_inputs); LOG.trace(" Child queued: " + child_queued); } // Evaluate each lop in <code>execNodes</code> for removal. // Add lops to be removed to <code>markedNodes</code>. ArrayList<Lop> markedNodes = new ArrayList<Lop>(); for (Lop tmpNode : execNodes ) { if (LOG.isTraceEnabled()) { LOG.trace(" Checking for removal (" + tmpNode.getID() + ") " + tmpNode.toString()); } // if tmpNode is not a descendant of 'node', then there is no advantage in removing tmpNode for later iterations. if(!isChild(tmpNode, node, IDMap)) continue; // handle group input lops if(node.getInputs().contains(tmpNode) && tmpNode.isAligner()) { markedNodes.add(tmpNode); if( LOG.isTraceEnabled() ) LOG.trace(" Removing for next iteration (code 1): (" + tmpNode.getID() + ") " + tmpNode.toString()); } //if (child_queued) { // if one of the children are queued, // remove some child nodes on other leg that may be needed later on. // For e.g. Group lop. if (!hasOtherQueuedParentNode(tmpNode, queuedNodes, node) && branchHasNoOtherUnExecutedParents(tmpNode, node, execNodes, finishedNodes)) { boolean queueit = false; int code = -1; switch(node.getExecLocation()) { case Map: if(branchCanBePiggyBackedMap(tmpNode, node, execNodes, queuedNodes, markedNodes)) queueit = true; code=2; break; case MapAndReduce: if(branchCanBePiggyBackedMapAndReduce(tmpNode, node, execNodes, queuedNodes)&& !tmpNode.definesMRJob()) queueit = true; code=3; break; case Reduce: if(branchCanBePiggyBackedReduce(tmpNode, node, execNodes, queuedNodes)) queueit = true; code=4; break; default: //do nothing } if(queueit) { if( LOG.isTraceEnabled() ) LOG.trace(" Removing for next iteration (code " + code + "): (" + tmpNode.getID() + ") " + tmpNode.toString()); markedNodes.add(tmpNode); } } /* * "node" has no other queued children. * * If inputs are in the same job and "node" is of type * MapAndReduce, then remove nodes of all types other than * Reduce, MapAndReduce, and the ones that define a MR job as * they can be piggybacked later. * * e.g: A=Rand, B=Rand, C=A%*%B Here, both inputs of MMCJ lop * come from Rand job, and they should not be removed. * * Other examples: -- MMCJ whose children are of type * MapAndReduce (say GMR) -- Inputs coming from two different * jobs .. GMR & REBLOCK */ //boolean himr = hasOtherMapAndReduceParentNode(tmpNode, execNodes,node); //boolean bcbp = branchCanBePiggyBackedMapAndReduce(tmpNode, node, execNodes, finishedNodes); //System.out.println(" .. " + inputs_in_same_job + "," + himr + "," + bcbp); if ((inputs_in_same_job || unassigned_inputs) && node.getExecLocation() == ExecLocation.MapAndReduce && !hasOtherMapAndReduceParentNode(tmpNode, execNodes,node) // don't remove since it already piggybacked with a MapReduce node && branchCanBePiggyBackedMapAndReduce(tmpNode, node, execNodes, queuedNodes) && !tmpNode.definesMRJob()) { if( LOG.isTraceEnabled() ) LOG.trace(" Removing for next iteration (code 5): ("+ tmpNode.getID() + ") " + tmpNode.toString()); markedNodes.add(tmpNode); } } // for i // we also need to delete all parent nodes of marked nodes for ( Lop enode : execNodes ) { if( LOG.isTraceEnabled() ) { LOG.trace(" Checking for removal - (" + enode.getID() + ") " + enode.toString()); } if (hasChildNode(enode, markedNodes) && !markedNodes.contains(enode)) { markedNodes.add(enode); if( LOG.isTraceEnabled() ) LOG.trace(" Removing for next iteration (code 6) (" + enode.getID() + ") " + enode.toString()); } } if ( execNodes.size() != markedNodes.size() ) { // delete marked nodes from finishedNodes and execNodes // add to queued nodes for(Lop n : markedNodes) { if ( n.usesDistributedCache() ) gmrMapperFootprint -= computeFootprintInMapper(n); finishedNodes.remove(n); execNodes.remove(n); removeNodeByJobType(n, jobvec); queuedNodes.add(n); } } } private boolean branchCanBePiggyBackedReduce(Lop tmpNode, Lop node, ArrayList<Lop> execNodes, ArrayList<Lop> queuedNodes) { if(node.getExecLocation() != ExecLocation.Reduce) return false; // if tmpNode is descendant of any queued child of node, then branch can not be piggybacked for(Lop ni : node.getInputs()) { if(queuedNodes.contains(ni) && isChild(tmpNode, ni, IDMap)) return false; } for( Lop n : execNodes ) { if(n.equals(node)) continue; if(n.equals(tmpNode) && n.getExecLocation() != ExecLocation.Map && n.getExecLocation() != ExecLocation.MapOrReduce) return false; // check if n is on the branch tmpNode->*->node if(isChild(n, node, IDMap) && isChild(tmpNode, n, IDMap)) { if(!node.getInputs().contains(tmpNode) // redundant && n.getExecLocation() != ExecLocation.Map && n.getExecLocation() != ExecLocation.MapOrReduce) return false; } } return true; } private boolean branchCanBePiggyBackedMap(Lop tmpNode, Lop node, ArrayList<Lop> execNodes, ArrayList<Lop> queuedNodes, ArrayList<Lop> markedNodes) { if(node.getExecLocation() != ExecLocation.Map) return false; // if tmpNode is descendant of any queued child of node, then branch can not be piggybacked for(Lop ni : node.getInputs()) { if(queuedNodes != null && queuedNodes.contains(ni) && isChild(tmpNode, ni, IDMap)) return false; } // since node.location=Map: only Map & MapOrReduce lops must be considered if( tmpNode.definesMRJob() || (tmpNode.getExecLocation() != ExecLocation.Map && tmpNode.getExecLocation() != ExecLocation.MapOrReduce)) return false; // if there exist a node "dcInput" that is // -- a) parent of tmpNode, and b) feeds into "node" via distributed cache // then, tmpNode should not be removed. // "dcInput" must be executed prior to "node", and removal of tmpNode does not make that happen. if(node.usesDistributedCache() ) { for(int dcInputIndex : node.distributedCacheInputIndex()) { Lop dcInput = node.getInputs().get(dcInputIndex-1); if(isChild(tmpNode, dcInput, IDMap)) return false; } } // if tmpNode requires an input from distributed cache, // remove tmpNode only if that input can fit into mappers' memory. If not, if ( tmpNode.usesDistributedCache() ) { double memsize = computeFootprintInMapper(tmpNode); if (node.usesDistributedCache() ) memsize += computeFootprintInMapper(node); if ( markedNodes != null ) { for(Lop n : markedNodes) { if ( n.usesDistributedCache() ) memsize += computeFootprintInMapper(n); } } if ( !checkMemoryLimits(node, memsize ) ) { return false; } } return ( (tmpNode.getCompatibleJobs() & node.getCompatibleJobs()) > 0); } /** * Function that checks if <code>tmpNode</code> can be piggybacked with MapAndReduce * lop <code>node</code>. * * Decision depends on the exec location of <code>tmpNode</code>. If the exec location is: * MapAndReduce: CAN NOT be piggybacked since it defines its own MR job * Reduce: CAN NOT be piggybacked since it must execute before <code>node</code> * Map or MapOrReduce: CAN be piggybacked ONLY IF it is comatible w/ <code>tmpNode</code> * * @param tmpNode temporary low-level operator * @param node low-level operator * @param execNodes list of exec nodes<|fim▁hole|> * @return true if tmpNode can be piggbacked on node */ private boolean branchCanBePiggyBackedMapAndReduce(Lop tmpNode, Lop node, ArrayList<Lop> execNodes, ArrayList<Lop> queuedNodes) { if (node.getExecLocation() != ExecLocation.MapAndReduce) return false; JobType jt = JobType.findJobTypeFromLop(node); for ( Lop n : execNodes ) { if (n.equals(node)) continue; // Evaluate only nodes on the branch between tmpNode->..->node if (n.equals(tmpNode) || (isChild(n, node, IDMap) && isChild(tmpNode, n, IDMap))) { if ( hasOtherMapAndReduceParentNode(tmpNode, queuedNodes,node) ) return false; ExecLocation el = n.getExecLocation(); if (el != ExecLocation.Map && el != ExecLocation.MapOrReduce) return false; else if (!isCompatible(n, jt)) return false; } } return true; } private boolean branchHasNoOtherUnExecutedParents(Lop tmpNode, Lop node, ArrayList<Lop> execNodes, ArrayList<Lop> finishedNodes) { //if tmpNode has more than one unfinished output, return false if(tmpNode.getOutputs().size() > 1) { int cnt = 0; for (Lop output : tmpNode.getOutputs() ) if (!finishedNodes.contains(output)) cnt++; if(cnt != 1) return false; } //check to see if any node between node and tmpNode has more than one unfinished output for( Lop n : execNodes ) { if(n.equals(node) || n.equals(tmpNode)) continue; if(isChild(n, node, IDMap) && isChild(tmpNode, n, IDMap)) { int cnt = 0; for (Lop output : n.getOutputs() ) { if (!finishedNodes.contains(output)) cnt++; } if(cnt != 1) return false; } } return true; } /** * Method to return the job index for a lop. * * @param lops low-level operator * @param jobvec list of lists of low-level operators * @return job index for a low-level operator * @throws LopsException if LopsException occurs */ private static int jobType(Lop lops, ArrayList<ArrayList<Lop>> jobvec) throws LopsException { for ( JobType jt : JobType.values()) { int i = jt.getId(); if (i > 0 && jobvec.get(i) != null && jobvec.get(i).contains(lops)) { return i; } } return -1; } /** * Method to see if there is a node of type MapAndReduce between tmpNode and node * in given node collection * * @param tmpNode temporary low-level operator * @param nodeList list of low-level operators * @param node low-level operator * @return true if MapAndReduce node between tmpNode and node in nodeList */ private boolean hasOtherMapAndReduceParentNode(Lop tmpNode, ArrayList<Lop> nodeList, Lop node) { if ( tmpNode.getExecLocation() == ExecLocation.MapAndReduce) return true; for ( Lop n : tmpNode.getOutputs() ) { if ( nodeList.contains(n) && isChild(n,node,IDMap)) { if(!n.equals(node) && n.getExecLocation() == ExecLocation.MapAndReduce) return true; else return hasOtherMapAndReduceParentNode(n, nodeList, node); } } return false; } /** * Method to check if there is a queued node that is a parent of both tmpNode and node * * @param tmpNode temporary low-level operator * @param queuedNodes list of queued nodes * @param node low-level operator * @return true if there is a queued node that is a parent of tmpNode and node */ private boolean hasOtherQueuedParentNode(Lop tmpNode, ArrayList<Lop> queuedNodes, Lop node) { if ( queuedNodes.isEmpty() ) return false; boolean[] nodeMarked = node.get_reachable(); boolean[] tmpMarked = tmpNode.get_reachable(); long nodeid = IDMap.get(node.getID()); long tmpid = IDMap.get(tmpNode.getID()); for ( Lop qnode : queuedNodes ) { int id = IDMap.get(qnode.getID()); if ((id != nodeid && nodeMarked[id]) && (id != tmpid && tmpMarked[id]) ) return true; } return false; } /** * Method to print the lops grouped by job type * * @param jobNodes list of lists of low-level operators * @throws DMLRuntimeException if DMLRuntimeException occurs */ private static void printJobNodes(ArrayList<ArrayList<Lop>> jobNodes) throws DMLRuntimeException { if (LOG.isTraceEnabled()){ for ( JobType jt : JobType.values() ) { int i = jt.getId(); if (i > 0 && jobNodes.get(i) != null && !jobNodes.get(i).isEmpty() ) { LOG.trace(jt.getName() + " Job Nodes:"); for (int j = 0; j < jobNodes.get(i).size(); j++) { LOG.trace(" " + jobNodes.get(i).get(j).getID() + ") " + jobNodes.get(i).get(j).toString()); } } } } } /** * Method to check if there exists any lops with ExecLocation=RecordReader * * @param nodes list of low-level operators * @param loc exec location * @return true if there is a node with RecordReader exec location */ private static boolean hasANode(ArrayList<Lop> nodes, ExecLocation loc) { for ( Lop n : nodes ) { if (n.getExecLocation() == ExecLocation.RecordReader) return true; } return false; } private ArrayList<ArrayList<Lop>> splitGMRNodesByRecordReader(ArrayList<Lop> gmrnodes) { // obtain the list of record reader nodes ArrayList<Lop> rrnodes = new ArrayList<Lop>(); for (Lop gmrnode : gmrnodes ) { if (gmrnode.getExecLocation() == ExecLocation.RecordReader) rrnodes.add(gmrnode); } // We allocate one extra vector to hold lops that do not depend on any // recordreader lops ArrayList<ArrayList<Lop>> splitGMR = createNodeVectors(rrnodes.size() + 1); // flags to indicate whether a lop has been added to one of the node vectors boolean[] flags = new boolean[gmrnodes.size()]; Arrays.fill(flags, false); // first, obtain all ancestors of recordreader lops for (int rrid = 0; rrid < rrnodes.size(); rrid++) { // prepare node list for i^th record reader lop // add record reader lop splitGMR.get(rrid).add(rrnodes.get(rrid)); for (int j = 0; j < gmrnodes.size(); j++) { if (rrnodes.get(rrid).equals(gmrnodes.get(j))) flags[j] = true; else if (isChild(rrnodes.get(rrid), gmrnodes.get(j), IDMap)) { splitGMR.get(rrid).add(gmrnodes.get(j)); flags[j] = true; } } } // add all remaining lops to a separate job int jobindex = rrnodes.size(); // the last node vector for (int i = 0; i < gmrnodes.size(); i++) { if (!flags[i]) { splitGMR.get(jobindex).add(gmrnodes.get(i)); flags[i] = true; } } return splitGMR; } /** * Method to generate hadoop jobs. Exec nodes can contains a mixture of node * types requiring different mr jobs. This method breaks the job into * sub-types and then invokes the appropriate method to generate * instructions. * * @param execNodes list of exec nodes * @param inst list of instructions * @param writeinst list of write instructions * @param deleteinst list of delete instructions * @param jobNodes list of list of low-level operators * @throws LopsException if LopsException occurs * @throws DMLRuntimeException if DMLRuntimeException occurs */ private void generateMRJobs(ArrayList<Lop> execNodes, ArrayList<Instruction> inst, ArrayList<Instruction> writeinst, ArrayList<Instruction> deleteinst, ArrayList<ArrayList<Lop>> jobNodes) throws LopsException, DMLRuntimeException { printJobNodes(jobNodes); ArrayList<Instruction> rmvarinst = new ArrayList<Instruction>(); for (JobType jt : JobType.values()) { // do nothing, if jt = INVALID or ANY if ( jt == JobType.INVALID || jt == JobType.ANY ) continue; int index = jt.getId(); // job id is used as an index into jobNodes ArrayList<Lop> currNodes = jobNodes.get(index); // generate MR job if (currNodes != null && !currNodes.isEmpty() ) { if( LOG.isTraceEnabled() ) LOG.trace("Generating " + jt.getName() + " job"); if (jt.allowsRecordReaderInstructions() && hasANode(jobNodes.get(index), ExecLocation.RecordReader)) { // split the nodes by recordReader lops ArrayList<ArrayList<Lop>> rrlist = splitGMRNodesByRecordReader(jobNodes.get(index)); for (int i = 0; i < rrlist.size(); i++) { generateMapReduceInstructions(rrlist.get(i), inst, writeinst, deleteinst, rmvarinst, jt); } } else if ( jt.allowsSingleShuffleInstruction() ) { // These jobs allow a single shuffle instruction. // We should split the nodes so that a separate job is produced for each shuffle instruction. Lop.Type splittingLopType = jt.getShuffleLopType(); ArrayList<Lop> nodesForASingleJob = new ArrayList<Lop>(); for (int i = 0; i < jobNodes.get(index).size(); i++) { if (jobNodes.get(index).get(i).getType() == splittingLopType) { nodesForASingleJob.clear(); // Add the lop that defines the split nodesForASingleJob.add(jobNodes.get(index).get(i)); /* * Add the splitting lop's children. This call is redundant when jt=SORT * because a sort job ALWAYS has a SINGLE lop in the entire job * i.e., there are no children to add when jt=SORT. */ addChildren(jobNodes.get(index).get(i), nodesForASingleJob, jobNodes.get(index)); if ( jt.isCompatibleWithParentNodes() ) { /* * If the splitting lop is compatible with parent nodes * then they must be added to the job. For example, MMRJ lop * may have a Data(Write) lop as its parent, which can be * executed along with MMRJ. */ addParents(jobNodes.get(index).get(i), nodesForASingleJob, jobNodes.get(index)); } generateMapReduceInstructions(nodesForASingleJob, inst, writeinst, deleteinst, rmvarinst, jt); } } } else { // the default case generateMapReduceInstructions(jobNodes.get(index), inst, writeinst, deleteinst, rmvarinst, jt); } } } inst.addAll(rmvarinst); } /** * Method to add all parents of "node" in exec_n to node_v. * * @param node low-level operator * @param node_v list of nodes * @param exec_n list of nodes */ private void addParents(Lop node, ArrayList<Lop> node_v, ArrayList<Lop> exec_n) { for (Lop enode : exec_n ) { if (isChild(node, enode, IDMap)) { if (!node_v.contains(enode)) { if( LOG.isTraceEnabled() ) LOG.trace("Adding parent - " + enode.toString()); node_v.add(enode); } } } } /** * Method to add all relevant data nodes for set of exec nodes. * * @param node low-level operator * @param node_v list of nodes * @param exec_n list of nodes */ private static void addChildren(Lop node, ArrayList<Lop> node_v, ArrayList<Lop> exec_n) { // add child in exec nodes that is not of type scalar if (exec_n.contains(node) && node.getExecLocation() != ExecLocation.ControlProgram) { if (!node_v.contains(node)) { node_v.add(node); if(LOG.isTraceEnabled()) LOG.trace(" Added child " + node.toString()); } } if (!exec_n.contains(node)) return; // recurse for (Lop n : node.getInputs() ) { addChildren(n, node_v, exec_n); } } /** * Method that determines the output format for a given node. * * @param node low-level operator * @param cellModeOverride override mode * @return output info * @throws LopsException if LopsException occurs */ private static OutputInfo getOutputInfo(Lop node, boolean cellModeOverride) throws LopsException { if ( (node.getDataType() == DataType.SCALAR && node.getExecType() == ExecType.CP) || node instanceof FunctionCallCP ) return null; OutputInfo oinfo = null; OutputParameters oparams = node.getOutputParameters(); if (oparams.isBlocked()) { if ( !cellModeOverride ) oinfo = OutputInfo.BinaryBlockOutputInfo; else { // output format is overridden, for example, due to recordReaderInstructions in the job oinfo = OutputInfo.BinaryCellOutputInfo; // record decision of overriding in lop's outputParameters so that // subsequent jobs that use this lop's output know the correct format. // TODO: ideally, this should be done by having a member variable in Lop // which stores the outputInfo. try { oparams.setDimensions(oparams.getNumRows(), oparams.getNumCols(), -1, -1, oparams.getNnz(), oparams.getUpdateType()); } catch(HopsException e) { throw new LopsException(node.printErrorLocation() + "error in getOutputInfo in Dag ", e); } } } else { if (oparams.getFormat() == Format.TEXT || oparams.getFormat() == Format.MM) oinfo = OutputInfo.TextCellOutputInfo; else if ( oparams.getFormat() == Format.CSV ) { oinfo = OutputInfo.CSVOutputInfo; } else { oinfo = OutputInfo.BinaryCellOutputInfo; } } /* Instead of following hardcoding, one must get this information from Lops */ if (node.getType() == Type.SortKeys && node.getExecType() == ExecType.MR) { if( ((SortKeys)node).getOpType() == SortKeys.OperationTypes.Indexes) oinfo = OutputInfo.BinaryBlockOutputInfo; else oinfo = OutputInfo.OutputInfoForSortOutput; } else if (node.getType() == Type.CombineBinary) { // Output format of CombineBinary (CB) depends on how the output is consumed CombineBinary combine = (CombineBinary) node; if ( combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreSort ) { oinfo = OutputInfo.OutputInfoForSortInput; } else if ( combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreCentralMoment || combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreCovUnweighted || combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreGroupedAggUnweighted ) { oinfo = OutputInfo.WeightedPairOutputInfo; } } else if ( node.getType() == Type.CombineTernary) { oinfo = OutputInfo.WeightedPairOutputInfo; } else if (node.getType() == Type.CentralMoment || node.getType() == Type.CoVariance ) { // CMMR always operate in "cell mode", // and the output is always in cell format oinfo = OutputInfo.BinaryCellOutputInfo; } return oinfo; } private String prepareAssignVarInstruction(Lop input, Lop node) { StringBuilder sb = new StringBuilder(); sb.append(ExecType.CP); sb.append(Lop.OPERAND_DELIMITOR); sb.append("assignvar"); sb.append(Lop.OPERAND_DELIMITOR); sb.append( input.prepScalarInputOperand(ExecType.CP) ); sb.append(Lop.OPERAND_DELIMITOR); sb.append(node.prepOutputOperand()); return sb.toString(); } /** * Method to setup output filenames and outputInfos, and to generate related instructions * * @param node low-level operator * @param et exec type * @param cellModeOverride override mode * @param copyTWrite ? * @return node output * @throws DMLRuntimeException if DMLRuntimeException occurs * @throws LopsException if LopsException occurs */ private NodeOutput setupNodeOutputs(Lop node, ExecType et, boolean cellModeOverride, boolean copyTWrite) throws DMLRuntimeException, LopsException { OutputParameters oparams = node.getOutputParameters(); NodeOutput out = new NodeOutput(); node.setConsumerCount(node.getOutputs().size()); // Compute the output format for this node out.setOutInfo(getOutputInfo(node, cellModeOverride)); // If node is NOT of type Data then we must generate // a variable to hold the value produced by this node // note: functioncallcp requires no createvar, rmvar since // since outputs are explicitly specified if (node.getExecLocation() != ExecLocation.Data ) { if (node.getDataType() == DataType.SCALAR) { oparams.setLabel(Lop.SCALAR_VAR_NAME_PREFIX + var_index.getNextID()); out.setVarName(oparams.getLabel()); Instruction currInstr = VariableCPInstruction.prepareRemoveInstruction(oparams.getLabel()); currInstr.setLocation(node); out.addLastInstruction(currInstr); } else if(node instanceof ParameterizedBuiltin && ((ParameterizedBuiltin)node).getOp() == org.apache.sysml.lops.ParameterizedBuiltin.OperationTypes.TRANSFORM) { ParameterizedBuiltin pbi = (ParameterizedBuiltin)node; Lop input = pbi.getNamedInput(ParameterizedBuiltinFunctionExpression.TF_FN_PARAM_DATA); if(input.getDataType()== DataType.FRAME) { // Output of transform is in CSV format, which gets subsequently reblocked // TODO: change it to output binaryblock Data dataInput = (Data) input; oparams.setFile_name(getNextUniqueFilename()); oparams.setLabel(getNextUniqueVarname(DataType.MATRIX)); // generate an instruction that creates a symbol table entry for the new variable in CSV format Data delimLop = (Data) dataInput.getNamedInputLop( DataExpression.DELIM_DELIMITER, DataExpression.DEFAULT_DELIM_DELIMITER); Instruction createvarInst = VariableCPInstruction.prepareCreateVariableInstruction( oparams.getLabel(), oparams.getFile_name(), true, DataType.MATRIX, OutputInfo.outputInfoToString(OutputInfo.CSVOutputInfo), new MatrixCharacteristics(oparams.getNumRows(), oparams.getNumCols(), -1, -1, oparams.getNnz()), oparams.getUpdateType(), false, delimLop.getStringValue(), true ); createvarInst.setLocation(node); out.addPreInstruction(createvarInst); // temp file as well as the variable has to be deleted at the end Instruction currInstr = VariableCPInstruction.prepareRemoveInstruction(oparams.getLabel()); currInstr.setLocation(node); out.addLastInstruction(currInstr); // finally, add the generated filename and variable name to the list of outputs out.setFileName(oparams.getFile_name()); out.setVarName(oparams.getLabel()); } else { throw new LopsException("Input to transform() has an invalid type: " + input.getDataType() + ", it must be FRAME."); } } else if(!(node instanceof FunctionCallCP)) //general case { // generate temporary filename and a variable name to hold the // output produced by "rootNode" oparams.setFile_name(getNextUniqueFilename()); oparams.setLabel(getNextUniqueVarname(node.getDataType())); // generate an instruction that creates a symbol table entry for the new variable //String createInst = prepareVariableInstruction("createvar", node); //out.addPreInstruction(CPInstructionParser.parseSingleInstruction(createInst)); int rpb = (int) oparams.getRowsInBlock(); int cpb = (int) oparams.getColsInBlock(); Instruction createvarInst = VariableCPInstruction.prepareCreateVariableInstruction( oparams.getLabel(), oparams.getFile_name(), true, node.getDataType(), OutputInfo.outputInfoToString(getOutputInfo(node, false)), new MatrixCharacteristics(oparams.getNumRows(), oparams.getNumCols(), rpb, cpb, oparams.getNnz()), oparams.getUpdateType() ); createvarInst.setLocation(node); out.addPreInstruction(createvarInst); // temp file as well as the variable has to be deleted at the end Instruction currInstr = VariableCPInstruction.prepareRemoveInstruction(oparams.getLabel()); currInstr.setLocation(node); out.addLastInstruction(currInstr); // finally, add the generated filename and variable name to the list of outputs out.setFileName(oparams.getFile_name()); out.setVarName(oparams.getLabel()); } else { // If the function call is set with output lops (e.g., multi return builtin), // generate a createvar instruction for each function output FunctionCallCP fcall = (FunctionCallCP) node; if ( fcall.getFunctionOutputs() != null ) { for( Lop fnOut: fcall.getFunctionOutputs()) { OutputParameters fnOutParams = fnOut.getOutputParameters(); //OutputInfo oinfo = getOutputInfo((N)fnOut, false); Instruction createvarInst = VariableCPInstruction.prepareCreateVariableInstruction( fnOutParams.getLabel(), getFilePath() + fnOutParams.getLabel(), true, fnOut.getDataType(), OutputInfo.outputInfoToString(getOutputInfo(fnOut, false)), new MatrixCharacteristics(fnOutParams.getNumRows(), fnOutParams.getNumCols(), (int)fnOutParams.getRowsInBlock(), (int)fnOutParams.getColsInBlock(), fnOutParams.getNnz()), oparams.getUpdateType() ); if (node._beginLine != 0) createvarInst.setLocation(node); else createvarInst.setLocation(fnOut); out.addPreInstruction(createvarInst); } } } } // rootNode is of type Data else { if ( node.getDataType() == DataType.SCALAR ) { // generate assignment operations for final and transient writes if ( oparams.getFile_name() == null && !(node instanceof Data && ((Data)node).isPersistentWrite()) ) { String io_inst = prepareAssignVarInstruction(node.getInputs().get(0), node); CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(io_inst); if (node._beginLine != 0) currInstr.setLocation(node); else if ( !node.getInputs().isEmpty() ) currInstr.setLocation(node.getInputs().get(0)); out.addLastInstruction(currInstr); } else { //CP PERSISTENT WRITE SCALARS Lop fname = ((Data)node).getNamedInputLop(DataExpression.IO_FILENAME); String io_inst = node.getInstructions(node.getInputs().get(0).getOutputParameters().getLabel(), fname.getOutputParameters().getLabel()); CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(io_inst); if (node._beginLine != 0) currInstr.setLocation(node); else if ( !node.getInputs().isEmpty() ) currInstr.setLocation(node.getInputs().get(0)); out.addLastInstruction(currInstr); } } else { if ( ((Data)node).isTransient() ) { if ( et == ExecType.CP ) { // If transient matrix write is in CP then its input MUST be executed in CP as well. // get variable and filename associated with the input String inputFileName = node.getInputs().get(0).getOutputParameters().getFile_name(); String inputVarName = node.getInputs().get(0).getOutputParameters().getLabel(); String constVarName = oparams.getLabel(); String constFileName = inputFileName + constVarName; /* * Symbol Table state must change as follows: * * FROM: * mvar1 -> temp21 * * TO: * mVar1 -> temp21 * tVarH -> temp21 */ Instruction currInstr = VariableCPInstruction.prepareCopyInstruction(inputVarName, constVarName); currInstr.setLocation(node); out.addLastInstruction(currInstr); out.setFileName(constFileName); } else { if(copyTWrite) { Instruction currInstr = VariableCPInstruction.prepareCopyInstruction(node.getInputs().get(0).getOutputParameters().getLabel(), oparams.getLabel()); currInstr.setLocation(node); out.addLastInstruction(currInstr); return out; } /* * Since the "rootNode" is a transient data node, we first need to generate a * temporary filename as well as a variable name to hold the <i>immediate</i> * output produced by "rootNode". These generated HDFS filename and the * variable name must be changed at the end of an iteration/program block * so that the subsequent iteration/program block can correctly access the * generated data. Therefore, we need to distinguish between the following: * * 1) Temporary file name & variable name: They hold the immediate output * produced by "rootNode". Both names are generated below. * * 2) Constant file name & variable name: They are constant across iterations. * Variable name is given by rootNode's label that is created in the upper layers. * File name is generated by concatenating "temporary file name" and "constant variable name". * * Temporary files must be moved to constant files at the end of the iteration/program block. */ // generate temporary filename & var name String tempVarName = oparams.getLabel() + "temp"; String tempFileName = getNextUniqueFilename(); //String createInst = prepareVariableInstruction("createvar", tempVarName, node.getDataType(), node.getValueType(), tempFileName, oparams, out.getOutInfo()); //out.addPreInstruction(CPInstructionParser.parseSingleInstruction(createInst)); int rpb = (int) oparams.getRowsInBlock(); int cpb = (int) oparams.getColsInBlock(); Instruction createvarInst = VariableCPInstruction.prepareCreateVariableInstruction( tempVarName, tempFileName, true, node.getDataType(), OutputInfo.outputInfoToString(out.getOutInfo()), new MatrixCharacteristics(oparams.getNumRows(), oparams.getNumCols(), rpb, cpb, oparams.getNnz()), oparams.getUpdateType() ); createvarInst.setLocation(node); out.addPreInstruction(createvarInst); String constVarName = oparams.getLabel(); String constFileName = tempFileName + constVarName; oparams.setFile_name(getFilePath() + constFileName); /* * Since this is a node that denotes a transient read/write, we need to make sure * that the data computed for a given variable in a given iteration is passed on * to the next iteration. This is done by generating miscellaneous instructions * that gets executed at the end of the program block. * * The state of the symbol table must change * * FROM: * tVarA -> temp21tVarA (old copy of temp21) * tVarAtemp -> temp21 (new copy that should override the old copy) * * TO: * tVarA -> temp21tVarA */ // rename the temp variable to constant variable (e.g., cpvar tVarAtemp tVarA) /*Instruction currInstr = VariableCPInstruction.prepareCopyInstruction(tempVarName, constVarName); if(DMLScript.ENABLE_DEBUG_MODE) { currInstr.setLineNum(node._beginLine); } out.addLastInstruction(currInstr); Instruction tempInstr = VariableCPInstruction.prepareRemoveInstruction(tempVarName); if(DMLScript.ENABLE_DEBUG_MODE) { tempInstr.setLineNum(node._beginLine); } out.addLastInstruction(tempInstr);*/ // Generate a single mvvar instruction (e.g., mvvar tempA A) // instead of two instructions "cpvar tempA A" and "rmvar tempA" Instruction currInstr = VariableCPInstruction.prepareMoveInstruction(tempVarName, constVarName); currInstr.setLocation(node); out.addLastInstruction(currInstr); // finally, add the temporary filename and variable name to the list of outputs out.setFileName(tempFileName); out.setVarName(tempVarName); } } // rootNode is not a transient write. It is a persistent write. else { if(et == ExecType.MR) { //MR PERSISTENT WRITE // create a variable to hold the result produced by this "rootNode" oparams.setLabel("pVar" + var_index.getNextID() ); //String createInst = prepareVariableInstruction("createvar", node); //out.addPreInstruction(CPInstructionParser.parseSingleInstruction(createInst)); int rpb = (int) oparams.getRowsInBlock(); int cpb = (int) oparams.getColsInBlock(); Lop fnameLop = ((Data)node).getNamedInputLop(DataExpression.IO_FILENAME); String fnameStr = (fnameLop instanceof Data && ((Data)fnameLop).isLiteral()) ? fnameLop.getOutputParameters().getLabel() : Lop.VARIABLE_NAME_PLACEHOLDER + fnameLop.getOutputParameters().getLabel() + Lop.VARIABLE_NAME_PLACEHOLDER; Instruction createvarInst; // for MatrixMarket format, the creatvar will output the result to a temporary file in textcell format // the CP write instruction (post instruction) after the MR instruction will merge the result into a single // part MM format file on hdfs. if (oparams.getFormat() == Format.CSV) { String tempFileName = getNextUniqueFilename(); String createInst = node.getInstructions(tempFileName); createvarInst= CPInstructionParser.parseSingleInstruction(createInst); //NOTE: no instruction patching because final write from cp instruction String writeInst = node.getInstructions(oparams.getLabel(), fnameLop.getOutputParameters().getLabel() ); CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(writeInst); currInstr.setLocation(node); out.addPostInstruction(currInstr); // remove the variable CPInstruction tempInstr = CPInstructionParser.parseSingleInstruction( "CP" + Lop.OPERAND_DELIMITOR + "rmfilevar" + Lop.OPERAND_DELIMITOR + oparams.getLabel() + Lop.VALUETYPE_PREFIX + Expression.ValueType.UNKNOWN + Lop.OPERAND_DELIMITOR + "true" + Lop.VALUETYPE_PREFIX + "BOOLEAN"); tempInstr.setLocation(node); out.addLastInstruction(tempInstr); } else if (oparams.getFormat() == Format.MM ) { createvarInst= VariableCPInstruction.prepareCreateVariableInstruction( oparams.getLabel(), getNextUniqueFilename(), false, node.getDataType(), OutputInfo.outputInfoToString(getOutputInfo(node, false)), new MatrixCharacteristics(oparams.getNumRows(), oparams.getNumCols(), rpb, cpb, oparams.getNnz()), oparams.getUpdateType() ); //NOTE: no instruction patching because final write from cp instruction String writeInst = node.getInstructions(oparams.getLabel(), fnameLop.getOutputParameters().getLabel()); CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(writeInst); currInstr.setLocation(node); out.addPostInstruction(currInstr); // remove the variable CPInstruction tempInstr = CPInstructionParser.parseSingleInstruction( "CP" + Lop.OPERAND_DELIMITOR + "rmfilevar" + Lop.OPERAND_DELIMITOR + oparams.getLabel() + Lop.VALUETYPE_PREFIX + Expression.ValueType.UNKNOWN + Lop.OPERAND_DELIMITOR + "true" + Lop.VALUETYPE_PREFIX + "BOOLEAN"); tempInstr.setLocation(node); out.addLastInstruction(tempInstr); } else { createvarInst= VariableCPInstruction.prepareCreateVariableInstruction( oparams.getLabel(), fnameStr, false, node.getDataType(), OutputInfo.outputInfoToString(getOutputInfo(node, false)), new MatrixCharacteristics(oparams.getNumRows(), oparams.getNumCols(), rpb, cpb, oparams.getNnz()), oparams.getUpdateType() ); // remove the variable CPInstruction currInstr = CPInstructionParser.parseSingleInstruction( "CP" + Lop.OPERAND_DELIMITOR + "rmfilevar" + Lop.OPERAND_DELIMITOR + oparams.getLabel() + Lop.VALUETYPE_PREFIX + Expression.ValueType.UNKNOWN + Lop.OPERAND_DELIMITOR + "false" + Lop.VALUETYPE_PREFIX + "BOOLEAN"); currInstr.setLocation(node); out.addLastInstruction(currInstr); } createvarInst.setLocation(node); out.addPreInstruction(createvarInst); // finally, add the filename and variable name to the list of outputs out.setFileName(oparams.getFile_name()); out.setVarName(oparams.getLabel()); } else { //CP PERSISTENT WRITE // generate a write instruction that writes matrix to HDFS Lop fname = ((Data)node).getNamedInputLop(DataExpression.IO_FILENAME); Instruction currInstr = null; Lop inputLop = node.getInputs().get(0); // Case of a transient read feeding into only one output persistent binaryblock write // Move the temporary file on HDFS to required persistent location, insteadof copying. if (inputLop.getExecLocation() == ExecLocation.Data && inputLop.getOutputs().size() == 1 && ((Data)inputLop).isTransient() && ((Data)inputLop).getOutputParameters().isBlocked() && node.getOutputParameters().isBlocked() ) { // transient read feeding into persistent write in blocked representation // simply, move the file //prepare filename (literal or variable in order to support dynamic write) String fnameStr = (fname instanceof Data && ((Data)fname).isLiteral()) ? fname.getOutputParameters().getLabel() : Lop.VARIABLE_NAME_PLACEHOLDER + fname.getOutputParameters().getLabel() + Lop.VARIABLE_NAME_PLACEHOLDER; currInstr = (CPInstruction) VariableCPInstruction.prepareMoveInstruction( inputLop.getOutputParameters().getLabel(), fnameStr, "binaryblock" ); } else { String io_inst = node.getInstructions( node.getInputs().get(0).getOutputParameters().getLabel(), fname.getOutputParameters().getLabel()); if(node.getExecType() == ExecType.SPARK) // This will throw an exception if the exectype of hop is set incorrectly // Note: the exec type and exec location of lops needs to be set to SPARK and ControlProgram respectively currInstr = SPInstructionParser.parseSingleInstruction(io_inst); else currInstr = CPInstructionParser.parseSingleInstruction(io_inst); } if ( !node.getInputs().isEmpty() && node.getInputs().get(0)._beginLine != 0) currInstr.setLocation(node.getInputs().get(0)); else currInstr.setLocation(node); out.addLastInstruction(currInstr); } } } } return out; } /** * Method to generate MapReduce job instructions from a given set of nodes. * * @param execNodes list of exec nodes * @param inst list of instructions * @param writeinst list of write instructions * @param deleteinst list of delete instructions * @param rmvarinst list of rmvar instructions * @param jt job type * @throws LopsException if LopsException occurs * @throws DMLRuntimeException if DMLRuntimeException occurs */ private void generateMapReduceInstructions(ArrayList<Lop> execNodes, ArrayList<Instruction> inst, ArrayList<Instruction> writeinst, ArrayList<Instruction> deleteinst, ArrayList<Instruction> rmvarinst, JobType jt) throws LopsException, DMLRuntimeException { ArrayList<Byte> resultIndices = new ArrayList<Byte>(); ArrayList<String> inputs = new ArrayList<String>(); ArrayList<String> outputs = new ArrayList<String>(); ArrayList<InputInfo> inputInfos = new ArrayList<InputInfo>(); ArrayList<OutputInfo> outputInfos = new ArrayList<OutputInfo>(); ArrayList<Long> numRows = new ArrayList<Long>(); ArrayList<Long> numCols = new ArrayList<Long>(); ArrayList<Long> numRowsPerBlock = new ArrayList<Long>(); ArrayList<Long> numColsPerBlock = new ArrayList<Long>(); ArrayList<String> mapperInstructions = new ArrayList<String>(); ArrayList<String> randInstructions = new ArrayList<String>(); ArrayList<String> recordReaderInstructions = new ArrayList<String>(); int numReducers = 0; int replication = 1; ArrayList<String> inputLabels = new ArrayList<String>(); ArrayList<String> outputLabels = new ArrayList<String>(); ArrayList<Instruction> renameInstructions = new ArrayList<Instruction>(); ArrayList<Instruction> variableInstructions = new ArrayList<Instruction>(); ArrayList<Instruction> postInstructions = new ArrayList<Instruction>(); ArrayList<Integer> MRJobLineNumbers = null; if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers = new ArrayList<Integer>(); } ArrayList<Lop> inputLops = new ArrayList<Lop>(); boolean cellModeOverride = false; /* Find the nodes that produce an output */ ArrayList<Lop> rootNodes = new ArrayList<Lop>(); getOutputNodes(execNodes, rootNodes, jt); if( LOG.isTraceEnabled() ) LOG.trace("# of root nodes = " + rootNodes.size()); /* Remove transient writes that are simple copy of transient reads */ if (jt == JobType.GMR || jt == JobType.GMRCELL) { ArrayList<Lop> markedNodes = new ArrayList<Lop>(); // only keep data nodes that are results of some computation. for ( Lop rnode : rootNodes ) { if (rnode.getExecLocation() == ExecLocation.Data && ((Data) rnode).isTransient() && ((Data) rnode).getOperationType() == OperationTypes.WRITE && ((Data) rnode).getDataType() == DataType.MATRIX) { // no computation, just a copy if (rnode.getInputs().get(0).getExecLocation() == ExecLocation.Data && ((Data) rnode.getInputs().get(0)).isTransient() && rnode.getOutputParameters().getLabel().equals( rnode.getInputs().get(0).getOutputParameters().getLabel())) { markedNodes.add(rnode); } } } // delete marked nodes rootNodes.removeAll(markedNodes); markedNodes.clear(); if ( rootNodes.isEmpty() ) return; } // structure that maps node to their indices that will be used in the instructions HashMap<Lop, Integer> nodeIndexMapping = new HashMap<Lop, Integer>(); /* Determine all input data files */ for ( Lop rnode : rootNodes ) { getInputPathsAndParameters(rnode, execNodes, inputs, inputInfos, numRows, numCols, numRowsPerBlock, numColsPerBlock, nodeIndexMapping, inputLabels, inputLops, MRJobLineNumbers); } // In case of RAND job, instructions are defined in the input file if (jt == JobType.DATAGEN) randInstructions = inputs; int[] start_index = new int[1]; start_index[0] = inputs.size(); /* Get RecordReader Instructions */ // currently, recordreader instructions are allowed only in GMR jobs if (jt == JobType.GMR || jt == JobType.GMRCELL) { for ( Lop rnode : rootNodes ) { getRecordReaderInstructions(rnode, execNodes, inputs, recordReaderInstructions, nodeIndexMapping, start_index, inputLabels, inputLops, MRJobLineNumbers); if ( recordReaderInstructions.size() > 1 ) throw new LopsException("MapReduce job can only have a single recordreader instruction: " + recordReaderInstructions.toString()); } } /* * Handle cases when job's output is FORCED to be cell format. * - If there exist a cell input, then output can not be blocked. * Only exception is when jobType = REBLOCK/CSVREBLOCK (for obvisous reason) * or when jobType = RAND since RandJob takes a special input file, * whose format should not be used to dictate the output format. * - If there exists a recordReader instruction * - If jobtype = GroupedAgg. This job can only run in cell mode. */ // if ( jt != JobType.REBLOCK && jt != JobType.CSV_REBLOCK && jt != JobType.DATAGEN && jt != JobType.TRANSFORM) { for (int i=0; i < inputInfos.size(); i++) if ( inputInfos.get(i) == InputInfo.BinaryCellInputInfo || inputInfos.get(i) == InputInfo.TextCellInputInfo ) cellModeOverride = true; } if ( !recordReaderInstructions.isEmpty() || jt == JobType.GROUPED_AGG ) cellModeOverride = true; /* Get Mapper Instructions */ for (int i = 0; i < rootNodes.size(); i++) { getMapperInstructions(rootNodes.get(i), execNodes, inputs, mapperInstructions, nodeIndexMapping, start_index, inputLabels, inputLops, MRJobLineNumbers); } if (LOG.isTraceEnabled()) { LOG.trace(" Input strings: " + inputs.toString()); if (jt == JobType.DATAGEN) LOG.trace(" Rand instructions: " + getCSVString(randInstructions)); if (jt == JobType.GMR) LOG.trace(" RecordReader instructions: " + getCSVString(recordReaderInstructions)); LOG.trace(" Mapper instructions: " + getCSVString(mapperInstructions)); } /* Get Shuffle and Reducer Instructions */ ArrayList<String> shuffleInstructions = new ArrayList<String>(); ArrayList<String> aggInstructionsReducer = new ArrayList<String>(); ArrayList<String> otherInstructionsReducer = new ArrayList<String>(); for( Lop rn : rootNodes ) { int resultIndex = getAggAndOtherInstructions( rn, execNodes, shuffleInstructions, aggInstructionsReducer, otherInstructionsReducer, nodeIndexMapping, start_index, inputLabels, inputLops, MRJobLineNumbers); if ( resultIndex == -1) throw new LopsException("Unexpected error in piggybacking!"); if ( rn.getExecLocation() == ExecLocation.Data && ((Data)rn).getOperationType() == Data.OperationTypes.WRITE && ((Data)rn).isTransient() && rootNodes.contains(rn.getInputs().get(0)) ) { // Both rn (a transient write) and its input are root nodes. // Instead of creating two copies of the data, simply generate a cpvar instruction NodeOutput out = setupNodeOutputs(rn, ExecType.MR, cellModeOverride, true); writeinst.addAll(out.getLastInstructions()); } else { resultIndices.add(Byte.valueOf((byte)resultIndex)); // setup output filenames and outputInfos and generate related instructions NodeOutput out = setupNodeOutputs(rn, ExecType.MR, cellModeOverride, false); outputLabels.add(out.getVarName()); outputs.add(out.getFileName()); outputInfos.add(out.getOutInfo()); if (LOG.isTraceEnabled()) { LOG.trace(" Output Info: " + out.getFileName() + ";" + OutputInfo.outputInfoToString(out.getOutInfo()) + ";" + out.getVarName()); } renameInstructions.addAll(out.getLastInstructions()); variableInstructions.addAll(out.getPreInstructions()); postInstructions.addAll(out.getPostInstructions()); } } /* Determine if the output dimensions are known */ byte[] resultIndicesByte = new byte[resultIndices.size()]; for (int i = 0; i < resultIndicesByte.length; i++) { resultIndicesByte[i] = resultIndices.get(i).byteValue(); } if (LOG.isTraceEnabled()) { LOG.trace(" Shuffle Instructions: " + getCSVString(shuffleInstructions)); LOG.trace(" Aggregate Instructions: " + getCSVString(aggInstructionsReducer)); LOG.trace(" Other instructions =" + getCSVString(otherInstructionsReducer)); LOG.trace(" Output strings: " + outputs.toString()); LOG.trace(" ResultIndices = " + resultIndices.toString()); } /* Prepare the MapReduce job instruction */ MRJobInstruction mr = new MRJobInstruction(jt); // check if this is a map-only job. If not, set the number of reducers if ( !shuffleInstructions.isEmpty() || !aggInstructionsReducer.isEmpty() || !otherInstructionsReducer.isEmpty() ) numReducers = total_reducers; // set inputs, outputs, and other other properties for the job mr.setInputOutputLabels(inputLabels.toArray(new String[0]), outputLabels.toArray(new String[0])); mr.setOutputs(resultIndicesByte); mr.setDimsUnknownFilePrefix(getFilePath()); mr.setNumberOfReducers(numReducers); mr.setReplication(replication); // set instructions for recordReader and mapper mr.setRecordReaderInstructions(getCSVString(recordReaderInstructions)); mr.setMapperInstructions(getCSVString(mapperInstructions)); //compute and set mapper memory requirements (for consistency of runtime piggybacking) if( jt == JobType.GMR ) { double mem = 0; for( Lop n : execNodes ) mem += computeFootprintInMapper(n); mr.setMemoryRequirements(mem); } if ( jt == JobType.DATAGEN ) mr.setRandInstructions(getCSVString(randInstructions)); // set shuffle instructions mr.setShuffleInstructions(getCSVString(shuffleInstructions)); // set reducer instruction mr.setAggregateInstructionsInReducer(getCSVString(aggInstructionsReducer)); mr.setOtherInstructionsInReducer(getCSVString(otherInstructionsReducer)); if(DMLScript.ENABLE_DEBUG_MODE) { // set line number information for each MR instruction mr.setMRJobInstructionsLineNumbers(MRJobLineNumbers); } /* Add the prepared instructions to output set */ inst.addAll(variableInstructions); inst.add(mr); inst.addAll(postInstructions); deleteinst.addAll(renameInstructions); for (Lop l : inputLops) { if(DMLScript.ENABLE_DEBUG_MODE) { processConsumers(l, rmvarinst, deleteinst, l); } else { processConsumers(l, rmvarinst, deleteinst, null); } } } /** * converts an array list into a Lop.INSTRUCTION_DELIMITOR separated string * * @param inputStrings list of input strings * @return Lop.INSTRUCTION_DELIMITOR separated string */ private static String getCSVString(ArrayList<String> inputStrings) { StringBuilder sb = new StringBuilder(); for ( String str : inputStrings ) { if( str != null ) { if( sb.length()>0 ) sb.append(Lop.INSTRUCTION_DELIMITOR); sb.append( str ); } } return sb.toString(); } /** * Method to populate aggregate and other instructions in reducer. * * @param node low-level operator * @param execNodes list of exec nodes * @param shuffleInstructions list of shuffle instructions * @param aggInstructionsReducer ? * @param otherInstructionsReducer ? * @param nodeIndexMapping node index mapping * @param start_index start index * @param inputLabels list of input labels * @param inputLops list of input lops * @param MRJobLineNumbers MR job line numbers * @return -1 if problem * @throws LopsException if LopsException occurs */ private int getAggAndOtherInstructions(Lop node, ArrayList<Lop> execNodes, ArrayList<String> shuffleInstructions, ArrayList<String> aggInstructionsReducer, ArrayList<String> otherInstructionsReducer, HashMap<Lop, Integer> nodeIndexMapping, int[] start_index, ArrayList<String> inputLabels, ArrayList<Lop> inputLops, ArrayList<Integer> MRJobLineNumbers) throws LopsException { int ret_val = -1; if (nodeIndexMapping.containsKey(node)) return nodeIndexMapping.get(node); // if not an input source and not in exec nodes, return. if (!execNodes.contains(node)) return ret_val; ArrayList<Integer> inputIndices = new ArrayList<Integer>(); // recurse // For WRITE, since the first element from input is the real input (the other elements // are parameters for the WRITE operation), so we only need to take care of the // first element. if (node.getType() == Lop.Type.Data && ((Data)node).getOperationType() == Data.OperationTypes.WRITE) { ret_val = getAggAndOtherInstructions(node.getInputs().get(0), execNodes, shuffleInstructions, aggInstructionsReducer, otherInstructionsReducer, nodeIndexMapping, start_index, inputLabels, inputLops, MRJobLineNumbers); inputIndices.add(ret_val); } else { for ( Lop cnode : node.getInputs() ) { ret_val = getAggAndOtherInstructions(cnode, execNodes, shuffleInstructions, aggInstructionsReducer, otherInstructionsReducer, nodeIndexMapping, start_index, inputLabels, inputLops, MRJobLineNumbers); inputIndices.add(ret_val); } } if (node.getExecLocation() == ExecLocation.Data ) { if ( ((Data)node).getFileFormatType() == FileFormatTypes.CSV && !(node.getInputs().get(0) instanceof ParameterizedBuiltin && ((ParameterizedBuiltin)node.getInputs().get(0)).getOp() == org.apache.sysml.lops.ParameterizedBuiltin.OperationTypes.TRANSFORM)) { // Generate write instruction, which goes into CSV_WRITE Job int output_index = start_index[0]; shuffleInstructions.add(node.getInstructions(inputIndices.get(0), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } nodeIndexMapping.put(node, output_index); start_index[0]++; return output_index; } else { return ret_val; } } if (node.getExecLocation() == ExecLocation.MapAndReduce) { /* Generate Shuffle Instruction for "node", and return the index associated with produced output */ boolean instGenerated = true; int output_index = start_index[0]; switch(node.getType()) { /* Lop types that take a single input */ case ReBlock: case CSVReBlock: case SortKeys: case CentralMoment: case CoVariance: case GroupedAgg: case DataPartition: shuffleInstructions.add(node.getInstructions(inputIndices.get(0), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } break; case ParameterizedBuiltin: if( ((ParameterizedBuiltin)node).getOp() == org.apache.sysml.lops.ParameterizedBuiltin.OperationTypes.TRANSFORM ) { shuffleInstructions.add(node.getInstructions(output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } } break; /* Lop types that take two inputs */ case MMCJ: case MMRJ: case CombineBinary: shuffleInstructions.add(node.getInstructions(inputIndices.get(0), inputIndices.get(1), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } break; /* Lop types that take three inputs */ case CombineTernary: shuffleInstructions.add(node.getInstructions(inputIndices .get(0), inputIndices.get(1), inputIndices.get(2), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } break; default: instGenerated = false; break; } if ( instGenerated ) { nodeIndexMapping.put(node, output_index); start_index[0]++; return output_index; } else { return inputIndices.get(0); } } /* Get instructions for aligned reduce and other lops below the reduce. */ if (node.getExecLocation() == ExecLocation.Reduce || node.getExecLocation() == ExecLocation.MapOrReduce || hasChildNode(node, execNodes, ExecLocation.MapAndReduce)) { if (inputIndices.size() == 1) { int output_index = start_index[0]; start_index[0]++; if (node.getType() == Type.Aggregate) { aggInstructionsReducer.add(node.getInstructions( inputIndices.get(0), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } } else { otherInstructionsReducer.add(node.getInstructions( inputIndices.get(0), output_index)); } if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } nodeIndexMapping.put(node, output_index); return output_index; } else if (inputIndices.size() == 2) { int output_index = start_index[0]; start_index[0]++; otherInstructionsReducer.add(node.getInstructions(inputIndices .get(0), inputIndices.get(1), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } nodeIndexMapping.put(node, output_index); // populate list of input labels. // only Unary lops can contribute to labels if (node instanceof Unary && node.getInputs().size() > 1) { int index = 0; for (int i = 0; i < node.getInputs().size(); i++) { if (node.getInputs().get(i).getDataType() == DataType.SCALAR) { index = i; break; } } if (node.getInputs().get(index).getExecLocation() == ExecLocation.Data && !((Data) (node.getInputs().get(index))).isLiteral()) { inputLabels.add(node.getInputs().get(index).getOutputParameters().getLabel()); inputLops.add(node.getInputs().get(index)); } if (node.getInputs().get(index).getExecLocation() != ExecLocation.Data) { inputLabels.add(node.getInputs().get(index).getOutputParameters().getLabel()); inputLops.add(node.getInputs().get(index)); } } return output_index; } else if (inputIndices.size() == 3 || node.getType() == Type.Ternary) { int output_index = start_index[0]; start_index[0]++; if (node.getType() == Type.Ternary ) { // in case of CTABLE_TRANSFORM_SCALAR_WEIGHT: inputIndices.get(2) would be -1 otherInstructionsReducer.add(node.getInstructions( inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } nodeIndexMapping.put(node, output_index); } else if( node.getType() == Type.ParameterizedBuiltin ){ otherInstructionsReducer.add(node.getInstructions( inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } nodeIndexMapping.put(node, output_index); } else { otherInstructionsReducer.add(node.getInstructions( inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } nodeIndexMapping.put(node, output_index); return output_index; } return output_index; } else if (inputIndices.size() == 4) { int output_index = start_index[0]; start_index[0]++; otherInstructionsReducer.add(node.getInstructions( inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), inputIndices.get(3), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } nodeIndexMapping.put(node, output_index); return output_index; } else throw new LopsException("Invalid number of inputs to a lop: " + inputIndices.size()); } return -1; } /** * Method to get record reader instructions for a MR job. * * @param node low-level operator * @param execNodes list of exec nodes * @param inputStrings list of input strings * @param recordReaderInstructions list of record reader instructions * @param nodeIndexMapping node index mapping * @param start_index start index * @param inputLabels list of input labels * @param inputLops list of input lops * @param MRJobLineNumbers MR job line numbers * @return -1 if problem * @throws LopsException if LopsException occurs */ private static int getRecordReaderInstructions(Lop node, ArrayList<Lop> execNodes, ArrayList<String> inputStrings, ArrayList<String> recordReaderInstructions, HashMap<Lop, Integer> nodeIndexMapping, int[] start_index, ArrayList<String> inputLabels, ArrayList<Lop> inputLops, ArrayList<Integer> MRJobLineNumbers) throws LopsException { // if input source, return index if (nodeIndexMapping.containsKey(node)) return nodeIndexMapping.get(node); // not input source and not in exec nodes, then return. if (!execNodes.contains(node)) return -1; ArrayList<Integer> inputIndices = new ArrayList<Integer>(); int max_input_index = -1; //N child_for_max_input_index = null; // get mapper instructions for (int i = 0; i < node.getInputs().size(); i++) { // recurse Lop childNode = node.getInputs().get(i); int ret_val = getRecordReaderInstructions(childNode, execNodes, inputStrings, recordReaderInstructions, nodeIndexMapping, start_index, inputLabels, inputLops, MRJobLineNumbers); inputIndices.add(ret_val); if (ret_val > max_input_index) { max_input_index = ret_val; //child_for_max_input_index = childNode; } } // only lops with execLocation as RecordReader can contribute // instructions if ((node.getExecLocation() == ExecLocation.RecordReader)) { int output_index = max_input_index; // cannot reuse index if this is true // need to add better indexing schemes output_index = start_index[0]; start_index[0]++; nodeIndexMapping.put(node, output_index); // populate list of input labels. // only Ranagepick lop can contribute to labels if (node.getType() == Type.PickValues) { PickByCount pbc = (PickByCount) node; if (pbc.getOperationType() == PickByCount.OperationTypes.RANGEPICK) { int scalarIndex = 1; // always the second input is a scalar // if data lop not a literal -- add label if (node.getInputs().get(scalarIndex).getExecLocation() == ExecLocation.Data && !((Data) (node.getInputs().get(scalarIndex))).isLiteral()) { inputLabels.add(node.getInputs().get(scalarIndex).getOutputParameters().getLabel()); inputLops.add(node.getInputs().get(scalarIndex)); } // if not data lop, then this is an intermediate variable. if (node.getInputs().get(scalarIndex).getExecLocation() != ExecLocation.Data) { inputLabels.add(node.getInputs().get(scalarIndex).getOutputParameters().getLabel()); inputLops.add(node.getInputs().get(scalarIndex)); } } } // get recordreader instruction. if (node.getInputs().size() == 2) { recordReaderInstructions.add(node.getInstructions(inputIndices .get(0), inputIndices.get(1), output_index)); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } } else throw new LopsException( "Unexpected number of inputs while generating a RecordReader Instruction"); return output_index; } return -1; } /** * Method to get mapper instructions for a MR job. * * @param node low-level operator * @param execNodes list of exec nodes * @param inputStrings list of input strings * @param instructionsInMapper list of instructions in mapper * @param nodeIndexMapping ? * @param start_index starting index * @param inputLabels input labels * @param MRJoblineNumbers MR job line numbers * @return -1 if problem * @throws LopsException if LopsException occurs */ private int getMapperInstructions(Lop node, ArrayList<Lop> execNodes, ArrayList<String> inputStrings, ArrayList<String> instructionsInMapper, HashMap<Lop, Integer> nodeIndexMapping, int[] start_index, ArrayList<String> inputLabels, ArrayList<Lop> inputLops, ArrayList<Integer> MRJobLineNumbers) throws LopsException { // if input source, return index if (nodeIndexMapping.containsKey(node)) return nodeIndexMapping.get(node); // not input source and not in exec nodes, then return. if (!execNodes.contains(node)) return -1; ArrayList<Integer> inputIndices = new ArrayList<Integer>(); int max_input_index = -1; // get mapper instructions for( Lop childNode : node.getInputs()) { int ret_val = getMapperInstructions(childNode, execNodes, inputStrings, instructionsInMapper, nodeIndexMapping, start_index, inputLabels, inputLops, MRJobLineNumbers); inputIndices.add(ret_val); if (ret_val > max_input_index) { max_input_index = ret_val; } } // only map and map-or-reduce without a reduce child node can contribute // to mapper instructions. if ((node.getExecLocation() == ExecLocation.Map || node .getExecLocation() == ExecLocation.MapOrReduce) && !hasChildNode(node, execNodes, ExecLocation.MapAndReduce) && !hasChildNode(node, execNodes, ExecLocation.Reduce) ) { int output_index = max_input_index; // cannot reuse index if this is true // need to add better indexing schemes // if (child_for_max_input_index.getOutputs().size() > 1) { output_index = start_index[0]; start_index[0]++; // } nodeIndexMapping.put(node, output_index); // populate list of input labels. // only Unary lops can contribute to labels if (node instanceof Unary && node.getInputs().size() > 1) { // Following code must be executed only for those Unary // operators that have more than one input // It should not be executed for "true" unary operators like // cos(A). int index = 0; for (int i1 = 0; i1 < node.getInputs().size(); i1++) { if (node.getInputs().get(i1).getDataType() == DataType.SCALAR) { index = i1; break; } } // if data lop not a literal -- add label if (node.getInputs().get(index).getExecLocation() == ExecLocation.Data && !((Data) (node.getInputs().get(index))).isLiteral()) { inputLabels.add(node.getInputs().get(index).getOutputParameters().getLabel()); inputLops.add(node.getInputs().get(index)); } // if not data lop, then this is an intermediate variable. if (node.getInputs().get(index).getExecLocation() != ExecLocation.Data) { inputLabels.add(node.getInputs().get(index).getOutputParameters().getLabel()); inputLops.add(node.getInputs().get(index)); } } // get mapper instruction. if (node.getInputs().size() == 1) instructionsInMapper.add(node.getInstructions(inputIndices .get(0), output_index)); else if (node.getInputs().size() == 2) { instructionsInMapper.add(node.getInstructions(inputIndices .get(0), inputIndices.get(1), output_index)); } else if (node.getInputs().size() == 3) instructionsInMapper.add(node.getInstructions(inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), output_index)); else if ( node.getInputs().size() == 4) { // Example: Reshape instructionsInMapper.add(node.getInstructions( inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), inputIndices.get(3), output_index )); } else if ( node.getInputs().size() == 5) { // Example: RangeBasedReIndex A[row_l:row_u, col_l:col_u] instructionsInMapper.add(node.getInstructions( inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), inputIndices.get(3), inputIndices.get(4), output_index )); } else if ( node.getInputs().size() == 7 ) { // Example: RangeBasedReIndex A[row_l:row_u, col_l:col_u] = B instructionsInMapper.add(node.getInstructions( inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), inputIndices.get(3), inputIndices.get(4), inputIndices.get(5), inputIndices.get(6), output_index )); } else throw new LopsException("Node with " + node.getInputs().size() + " inputs is not supported in dag.java."); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } return output_index; } return -1; } // Method to populate inputs and also populates node index mapping. private static void getInputPathsAndParameters(Lop node, ArrayList<Lop> execNodes, ArrayList<String> inputStrings, ArrayList<InputInfo> inputInfos, ArrayList<Long> numRows, ArrayList<Long> numCols, ArrayList<Long> numRowsPerBlock, ArrayList<Long> numColsPerBlock, HashMap<Lop, Integer> nodeIndexMapping, ArrayList<String> inputLabels, ArrayList<Lop> inputLops, ArrayList<Integer> MRJobLineNumbers) throws LopsException { // treat rand as an input. if (node.getType() == Type.DataGen && execNodes.contains(node) && !nodeIndexMapping.containsKey(node)) { numRows.add(node.getOutputParameters().getNumRows()); numCols.add(node.getOutputParameters().getNumCols()); numRowsPerBlock.add(node.getOutputParameters().getRowsInBlock()); numColsPerBlock.add(node.getOutputParameters().getColsInBlock()); inputStrings.add(node.getInstructions(inputStrings.size(), inputStrings.size())); if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers.add(node._beginLine); } inputInfos.add(InputInfo.TextCellInputInfo); nodeIndexMapping.put(node, inputStrings.size() - 1); return; } // get input file names if (!execNodes.contains(node) && !nodeIndexMapping.containsKey(node) && !(node.getExecLocation() == ExecLocation.Data) && (!(node.getExecLocation() == ExecLocation.ControlProgram && node .getDataType() == DataType.SCALAR)) || (!execNodes.contains(node) && node.getExecLocation() == ExecLocation.Data && ((Data) node).getOperationType() == Data.OperationTypes.READ && ((Data) node).getDataType() != DataType.SCALAR && !nodeIndexMapping .containsKey(node))) { if (node.getOutputParameters().getFile_name() != null) { inputStrings.add(node.getOutputParameters().getFile_name()); } else { // use label name inputStrings.add(Lop.VARIABLE_NAME_PLACEHOLDER + node.getOutputParameters().getLabel() + Lop.VARIABLE_NAME_PLACEHOLDER); } inputLabels.add(node.getOutputParameters().getLabel()); inputLops.add(node); numRows.add(node.getOutputParameters().getNumRows()); numCols.add(node.getOutputParameters().getNumCols()); numRowsPerBlock.add(node.getOutputParameters().getRowsInBlock()); numColsPerBlock.add(node.getOutputParameters().getColsInBlock()); InputInfo nodeInputInfo = null; // Check if file format type is binary or text and update infos if (node.getOutputParameters().isBlocked()) { if (node.getOutputParameters().getFormat() == Format.BINARY) nodeInputInfo = InputInfo.BinaryBlockInputInfo; else throw new LopsException("Invalid format (" + node.getOutputParameters().getFormat() + ") encountered for a node/lop (ID=" + node.getID() + ") with blocked output."); } else { if (node.getOutputParameters().getFormat() == Format.TEXT) nodeInputInfo = InputInfo.TextCellInputInfo; else nodeInputInfo = InputInfo.BinaryCellInputInfo; } /* * Hardcode output Key and Value Classes for SortKeys */ // TODO: statiko -- remove this hardcoding -- i.e., lops must encode // the information on key/value classes if (node.getType() == Type.SortKeys) { // SortKeys is the input to some other lop (say, L) // InputInfo of L is the ouputInfo of SortKeys, which is // (compactformat, doubleWriteable, IntWritable) nodeInputInfo = new InputInfo(PickFromCompactInputFormat.class, DoubleWritable.class, IntWritable.class); } else if (node.getType() == Type.CombineBinary) { // CombineBinary is the input to some other lop (say, L) // InputInfo of L is the ouputInfo of CombineBinary // And, the outputInfo of CombineBinary depends on the operation! CombineBinary combine = (CombineBinary) node; if ( combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreSort ) { nodeInputInfo = new InputInfo(SequenceFileInputFormat.class, DoubleWritable.class, IntWritable.class); } else if ( combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreCentralMoment || combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreCovUnweighted || combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreGroupedAggUnweighted ) { nodeInputInfo = InputInfo.WeightedPairInputInfo; } } else if ( node.getType() == Type.CombineTernary ) { nodeInputInfo = InputInfo.WeightedPairInputInfo; } inputInfos.add(nodeInputInfo); nodeIndexMapping.put(node, inputStrings.size() - 1); return; } // if exec nodes does not contain node at this point, return. if (!execNodes.contains(node)) return; // process children recursively for ( Lop lop : node.getInputs() ) { getInputPathsAndParameters(lop, execNodes, inputStrings, inputInfos, numRows, numCols, numRowsPerBlock, numColsPerBlock, nodeIndexMapping, inputLabels, inputLops, MRJobLineNumbers); } } /** * Method to find all terminal nodes. * * @param execNodes list of exec nodes * @param rootNodes list of root nodes * @param jt job type */ private static void getOutputNodes(ArrayList<Lop> execNodes, ArrayList<Lop> rootNodes, JobType jt) { for ( Lop node : execNodes ) { // terminal node if (node.getOutputs().isEmpty() && !rootNodes.contains(node)) { rootNodes.add(node); } else { // check for nodes with at least one child outside execnodes int cnt = 0; for (Lop lop : node.getOutputs() ) { cnt += (!execNodes.contains(lop)) ? 1 : 0; } if (cnt > 0 && !rootNodes.contains(node) // not already a rootnode && !(node.getExecLocation() == ExecLocation.Data && ((Data) node).getOperationType() == OperationTypes.READ && ((Data) node).getDataType() == DataType.MATRIX) ) // Not a matrix Data READ { if ( jt.allowsSingleShuffleInstruction() && node.getExecLocation() != ExecLocation.MapAndReduce) continue; if (cnt < node.getOutputs().size()) { if(!node.getProducesIntermediateOutput()) rootNodes.add(node); } else rootNodes.add(node); } } } } /** * check to see if a is the child of b (i.e., there is a directed path from a to b) * * @param a child lop * @param b parent lop * @param IDMap id map * @return true if a child of b */ private static boolean isChild(Lop a, Lop b, HashMap<Long, Integer> IDMap) { int bID = IDMap.get(b.getID()); return a.get_reachable()[bID]; } /** * Method to topologically sort lops * * @param v list of lops */ @SuppressWarnings({ "unchecked", "rawtypes" }) private void doTopologicalSort_strict_order(ArrayList<Lop> v) { //int numNodes = v.size(); /* * Step 1: compute the level for each node in the DAG. Level for each node is * computed as lops are created. So, this step is need not be performed here. * Step 2: sort the nodes by level, and within a level by node ID. */ // Step1: Performed at the time of creating Lops // Step2: sort nodes by level, and then by node ID Lop[] nodearray = v.toArray(new Lop[0]); Arrays.sort(nodearray, new LopComparator()); // Copy sorted nodes into "v" and construct a mapping between Lop IDs and sequence of numbers v.clear(); IDMap.clear(); for (int i = 0; i < nodearray.length; i++) { v.add(nodearray[i]); IDMap.put(v.get(i).getID(), i); } /* * Compute of All-pair reachability graph (Transitive Closure) of the DAG. * - Perform a depth-first search (DFS) from every node $u$ in the DAG * - and construct the list of reachable nodes from the node $u$ * - store the constructed reachability information in $u$.reachable[] boolean array */ // // for (int i = 0; i < nodearray.length; i++) { boolean[] arr = v.get(i).create_reachable(nodearray.length); Arrays.fill(arr, false); dagDFS(v.get(i), arr); } // print the nodes in sorted order if (LOG.isTraceEnabled()) { for ( Lop vnode : v ) { StringBuilder sb = new StringBuilder(); sb.append(vnode.getID()); sb.append("("); sb.append(vnode.getLevel()); sb.append(") "); sb.append(vnode.getType()); sb.append("("); for(Lop vin : vnode.getInputs()) { sb.append(vin.getID()); sb.append(","); } sb.append("), "); LOG.trace(sb.toString()); } LOG.trace("topological sort -- done"); } } /** * Method to perform depth-first traversal from a given node in the DAG. * Store the reachability information in marked[] boolean array. * * @param root low-level operator * @param marked reachability results */ private void dagDFS(Lop root, boolean[] marked) { //contains check currently required for globalopt, will be removed when cleaned up if( !IDMap.containsKey(root.getID()) ) return; int mapID = IDMap.get(root.getID()); if ( marked[mapID] ) return; marked[mapID] = true; for( Lop lop : root.getOutputs() ) { dagDFS(lop, marked); } } private static boolean hasDirectChildNode(Lop node, ArrayList<Lop> childNodes) { if ( childNodes.isEmpty() ) return false; for( Lop cnode : childNodes ) { if ( cnode.getOutputs().contains(node)) return true; } return false; } private boolean hasChildNode(Lop node, ArrayList<Lop> nodes) { return hasChildNode(node, nodes, ExecLocation.INVALID); } private boolean hasChildNode(Lop node, ArrayList<Lop> childNodes, ExecLocation type) { if ( childNodes.isEmpty() ) return false; int index = IDMap.get(node.getID()); for( Lop cnode : childNodes ) { if ( (type == ExecLocation.INVALID || cnode.getExecLocation() == type) && cnode.get_reachable()[index]) return true; } return false; } private Lop getChildNode(Lop node, ArrayList<Lop> childNodes, ExecLocation type) { if ( childNodes.isEmpty() ) return null; int index = IDMap.get(node.getID()); for( Lop cnode : childNodes ) { if ( cnode.getExecLocation() == type && cnode.get_reachable()[index]) return cnode; } return null; } /* * Returns a node "n" such that * 1) n \in parentNodes * 2) n is an ancestor of "node" * 3) n.ExecLocation = type * * Returns null if no such "n" exists * */ private Lop getParentNode(Lop node, ArrayList<Lop> parentNodes, ExecLocation type) { if ( parentNodes.isEmpty() ) return null; for( Lop pn : parentNodes ) { int index = IDMap.get( pn.getID() ); if ( pn.getExecLocation() == type && node.get_reachable()[index]) return pn; } return null; } // Checks if "node" has any descendants in nodesVec with definedMRJob flag // set to true private boolean hasMRJobChildNode(Lop node, ArrayList<Lop> nodesVec) { if ( nodesVec.isEmpty() ) return false; int index = IDMap.get(node.getID()); for( Lop n : nodesVec ) { if ( n.definesMRJob() && n.get_reachable()[index]) return true; } return false; } private boolean checkDataGenAsChildNode(Lop node, ArrayList<Lop> nodesVec) { if( nodesVec.isEmpty() ) return true; int index = IDMap.get(node.getID()); boolean onlyDatagen = true; for( Lop n : nodesVec ) { if ( n.definesMRJob() && n.get_reachable()[index] && JobType.findJobTypeFromLop(n) != JobType.DATAGEN ) onlyDatagen = false; } // return true also when there is no lop in "nodesVec" that defines a MR job. return onlyDatagen; } private static int getChildAlignment(Lop node, ArrayList<Lop> execNodes, ExecLocation type) { for (Lop n : node.getInputs() ) { if (!execNodes.contains(n)) continue; if (execNodes.contains(n) && n.getExecLocation() == type) { if (n.getBreaksAlignment()) return MR_CHILD_FOUND_BREAKS_ALIGNMENT; else return MR_CHILD_FOUND_DOES_NOT_BREAK_ALIGNMENT; } else { int ret = getChildAlignment(n, execNodes, type); if (ret == MR_CHILD_FOUND_DOES_NOT_BREAK_ALIGNMENT || ret == CHILD_DOES_NOT_BREAK_ALIGNMENT) { if (n.getBreaksAlignment()) return CHILD_BREAKS_ALIGNMENT; else return CHILD_DOES_NOT_BREAK_ALIGNMENT; } else if (ret == MRCHILD_NOT_FOUND || ret == CHILD_BREAKS_ALIGNMENT || ret == MR_CHILD_FOUND_BREAKS_ALIGNMENT) return ret; else throw new RuntimeException("Something wrong in getChildAlignment()."); } } return MRCHILD_NOT_FOUND; } private boolean hasParentNode(Lop node, ArrayList<Lop> parentNodes) { if ( parentNodes.isEmpty() ) return false; for( Lop pnode : parentNodes ) { int index = IDMap.get( pnode.getID() ); if ( node.get_reachable()[index]) return true; } return false; } }<|fim▁end|>
* @param queuedNodes list of queued nodes
<|file_name|>opt3.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import subprocess short_name = 'Opt 3' disp_name = 'Option 3 Submenu' otype = 'Routine' need = ['need 1: ', 'need 2: ', 'need 3: '] answers = [] def run(): global answers while True: subprocess.call('clear') i = 0 while i < len(need): ans = input(need[i]) if validate(ans): answers.append(ans) i += 1 final = 'Doing something with ' for a in answers: final = '{}, {}'.format(final, a) print(final) input() return def validate(char): if char: return True<|fim▁hole|> return False<|fim▁end|>
<|file_name|>logger.py<|end_file_name|><|fim▁begin|>""" Component that will help set the level of logging for components. For more details about this component, please refer to the documentation at https://home-assistant.io/components/logger/ """ import logging from collections import OrderedDict import voluptuous as vol import homeassistant.helpers.config_validation as cv DOMAIN = 'logger' DATA_LOGGER = 'logger' SERVICE_SET_DEFAULT_LEVEL = 'set_default_level' SERVICE_SET_LEVEL = 'set_level' LOGSEVERITY = { 'CRITICAL': 50, 'FATAL': 50, 'ERROR': 40, 'WARNING': 30, 'WARN': 30, 'INFO': 20, 'DEBUG': 10, 'NOTSET': 0 } LOGGER_DEFAULT = 'default' LOGGER_LOGS = 'logs' ATTR_LEVEL = 'level' _VALID_LOG_LEVEL = vol.All(vol.Upper, vol.In(LOGSEVERITY)) SERVICE_SET_DEFAULT_LEVEL_SCHEMA = vol.Schema({ATTR_LEVEL: _VALID_LOG_LEVEL}) SERVICE_SET_LEVEL_SCHEMA = vol.Schema({cv.string: _VALID_LOG_LEVEL}) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Optional(LOGGER_DEFAULT): _VALID_LOG_LEVEL, vol.Optional(LOGGER_LOGS): vol.Schema({cv.string: _VALID_LOG_LEVEL}), }), }, extra=vol.ALLOW_EXTRA) def set_level(hass, logs): """Set log level for components."""<|fim▁hole|> class HomeAssistantLogFilter(logging.Filter): """A log filter.""" def __init__(self, logfilter): """Initialize the filter.""" super().__init__() self.logfilter = logfilter def filter(self, record): """Filter the log entries.""" # Log with filtered severity if LOGGER_LOGS in self.logfilter: for filtername in self.logfilter[LOGGER_LOGS]: logseverity = self.logfilter[LOGGER_LOGS][filtername] if record.name.startswith(filtername): return record.levelno >= logseverity # Log with default severity default = self.logfilter[LOGGER_DEFAULT] return record.levelno >= default async def async_setup(hass, config): """Set up the logger component.""" logfilter = {} def set_default_log_level(level): """Set the default log level for components.""" logfilter[LOGGER_DEFAULT] = LOGSEVERITY[level] def set_log_levels(logpoints): """Set the specified log levels.""" logs = {} # Preserve existing logs if LOGGER_LOGS in logfilter: logs.update(logfilter[LOGGER_LOGS]) # Add new logpoints mapped to correct severity for key, value in logpoints.items(): logs[key] = LOGSEVERITY[value] logfilter[LOGGER_LOGS] = OrderedDict( sorted( logs.items(), key=lambda t: len(t[0]), reverse=True ) ) # Set default log severity if LOGGER_DEFAULT in config.get(DOMAIN): set_default_log_level(config.get(DOMAIN)[LOGGER_DEFAULT]) else: set_default_log_level('DEBUG') logger = logging.getLogger('') logger.setLevel(logging.NOTSET) # Set log filter for all log handler for handler in logging.root.handlers: handler.setLevel(logging.NOTSET) handler.addFilter(HomeAssistantLogFilter(logfilter)) if LOGGER_LOGS in config.get(DOMAIN): set_log_levels(config.get(DOMAIN)[LOGGER_LOGS]) async def async_service_handler(service): """Handle logger services.""" if service.service == SERVICE_SET_DEFAULT_LEVEL: set_default_log_level(service.data.get(ATTR_LEVEL)) else: set_log_levels(service.data) hass.services.async_register( DOMAIN, SERVICE_SET_DEFAULT_LEVEL, async_service_handler, schema=SERVICE_SET_DEFAULT_LEVEL_SCHEMA) hass.services.async_register( DOMAIN, SERVICE_SET_LEVEL, async_service_handler, schema=SERVICE_SET_LEVEL_SCHEMA) return True<|fim▁end|>
hass.services.call(DOMAIN, SERVICE_SET_LEVEL, logs)
<|file_name|>problem04.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin/python3 # Find the number of elements of a list def lenOf(mylist):<|fim▁hole|>print (lenOf("")) print (lenOf([123,123,123]))<|fim▁end|>
return (len(mylist)) print (lenOf("Hello"))
<|file_name|>CustomRequestScopeAttr.java<|end_file_name|><|fim▁begin|>package org.innovateuk.ifs.registration.service; import org.springframework.web.context.request.RequestAttributes; import java.util.HashMap; import java.util.Map; /** * This solves the java.lang.IllegalStateException: Cannot ask for request attribute - request is not active anymore! * Error, Request attributes are reset before the organisation is updates and removed afterwards. * @see https://stackoverflow.com/questions/44121654/inherited-servletrquestattributes-is-marked-completed-before-child-thread-finish * @see https://medium.com/@pranav_maniar/spring-accessing-request-scope-beans-outside-of-web-request-faad27b5ed57 * */ public class CustomRequestScopeAttr implements RequestAttributes { private Map<String, Object> requestAttributeMap = new HashMap<>(); @Override public Object getAttribute(String name, int scope) { if (scope == RequestAttributes.SCOPE_REQUEST) { return this.requestAttributeMap.get(name); } return null; } @Override public void setAttribute(String name, Object value, int scope) { if (scope == RequestAttributes.SCOPE_REQUEST) { this.requestAttributeMap.put(name, value); } } @Override<|fim▁hole|> this.requestAttributeMap.remove(name); } } @Override public String[] getAttributeNames(int scope) { if (scope == RequestAttributes.SCOPE_REQUEST) { return this.requestAttributeMap.keySet().toArray(new String[0]); } return new String[0]; } @Override public void registerDestructionCallback(String name, Runnable callback, int scope) { // Not Supported } @Override public Object resolveReference(String key) { // Not supported return null; } @Override public String getSessionId() { return null; } @Override public Object getSessionMutex() { return null; } }<|fim▁end|>
public void removeAttribute(String name, int scope) { if (scope == RequestAttributes.SCOPE_REQUEST) {
<|file_name|>Category.py<|end_file_name|><|fim▁begin|># SecuML # Copyright (C) 2016-2017 ANSSI # # SecuML is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # SecuML is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with SecuML. If not, see <http://www.gnu.org/licenses/>. import copy import pandas as pd import scipy from SecuML.core.Tools import matrix_tools from .AnnotationQuery import AnnotationQuery class Category(object): def __init__(self, label=None, family=None): self.assignLabelFamily(label, family) self.instances_ids = [] self.probas = [] self.entropy = [] self.likelihood = [] self.df = None self.annotation_queries = {} self.annotated_instances = [] self.num_annotated_instances = 0 # To display the annotation queries in the web GUI self.queries = [] self.queries_confidence = [] def generateAnnotationQuery(self, instance_id, predicted_proba, suggested_label, suggested_family, confidence=None): return AnnotationQuery(instance_id, predicted_proba, suggested_label, suggested_family, confidence=confidence) def assignLabelFamily(self, label, family): self.family = family if label != 'all': self.label = label else: self.label = label def numInstances(self): return len(self.instances_ids) def setWeight(self, weight): self.weight = weight def setNumAnnotations(self, num_annotations): self.num_annotations = num_annotations def addInstance(self, instance_id, probas, annotated): self.instances_ids.append(instance_id) entropy = None proba = None likelihood = None if probas is not None: entropy = scipy.stats.entropy(probas) proba = max(probas) self.entropy.append(entropy) self.probas.append(proba) self.likelihood.append(likelihood) if annotated: self.annotated_instances.append(instance_id) self.num_annotated_instances += 1 def finalComputation(self): self.df = pd.DataFrame({'proba': self.probas, 'entropy': self.entropy, 'likelihood': self.likelihood}, index=list(map(str, self.instances_ids))) def annotateAuto(self, iteration): for k, queries in self.annotation_queries.items(): for q, query in enumerate(queries): query.annotateAuto(iteration, self.label) def getManualAnnotations(self, iteration): for k, queries in self.annotation_queries.items(): for q, query in enumerate(queries): query.getManualAnnotation(iteration) def checkAnnotationQueriesAnswered(self, iteration): for k, queries in self.annotation_queries.items(): for q, query in enumerate(queries): if not query.checkAnswered(iteration): return False return True def setLikelihood(self, likelihood): self.likelihood = likelihood self.df['likelihood'] = likelihood def getLikelihood(self, instances): df = pd.DataFrame({'likelihood': self.likelihood}, index=list(map(str, self.instances_ids))) selected_df = df.loc[list(map(str, instances)), :] return selected_df['likelihood'].tolist() def getCategoryLabel(self): return self.label def getCategoryFamily(self): return self.family def toJson(self): obj = {} obj['label'] = self.label obj['family'] = self.family obj['annotation_queries'] = {} for kind, queries in self.annotation_queries.items(): obj['annotation_queries'][kind] = [] for q, query in enumerate(queries): obj['annotation_queries'][kind].append(query.toJson()) return obj @staticmethod def fromJson(obj): category = Category() category.instances_ids = obj['instances_ids'] category.label = obj['label'] return category def exportAnnotationQueries(self): annotation_queries = {} annotation_queries['instance_ids'] = self.queries annotation_queries['confidence'] = self.queries_confidence annotation_queries['label'] = self.label return annotation_queries def generateAnnotationQueries(self, cluster_strategy): queries_types = cluster_strategy.split('_') num_queries_types = len(queries_types) total_num_queries = 0 annotated_instances = copy.deepcopy(self.annotated_instances) for q, queries_type in enumerate(queries_types): if q == (num_queries_types - 1): num_queries = self.num_annotations - total_num_queries else: num_queries = self.num_annotations // num_queries_types if queries_type == 'center': queries = self.queryHighLikelihoodInstances( annotated_instances, num_queries) elif queries_type == 'anomalous': queries = self.queryLowLikelihoodInstances( annotated_instances, num_queries) elif queries_type == 'uncertain': queries = self.queryUncertainInstances( annotated_instances, num_queries) elif queries_type == 'random': queries = self.queryRandomInstances( annotated_instances, num_queries) else: raise ValueError() annotated_instances += queries total_num_queries += len(queries) assert(total_num_queries == self.num_annotations) def queryUncertainInstances(self, drop_instances, num_instances): if num_instances == 0: return [] queries_df = self.getSelectedInstancesDataframe(drop_instances) matrix_tools.sortDataFrame(queries_df, 'entropy', False, True) queries_df = queries_df.head(num_instances) self.addAnnotationQueries('uncertain', 'low', queries_df) return list(map(int, queries_df.index.values.tolist())) def queryHighLikelihoodInstances(self, drop_instances, num_instances): if num_instances == 0: return [] queries_df = self.getSelectedInstancesDataframe(drop_instances) matrix_tools.sortDataFrame(queries_df, 'likelihood', False, True) queries_df = queries_df.head(num_instances) self.addAnnotationQueries('high_likelihood', 'high', queries_df) return list(map(int, queries_df.index.values.tolist())) def queryLowLikelihoodInstances(self, drop_instances, num_instances): if num_instances == 0: return [] queries_df = self.getSelectedInstancesDataframe(drop_instances) matrix_tools.sortDataFrame(queries_df, 'likelihood', True, True) queries_df = queries_df.head(num_instances) self.addAnnotationQueries('low_likelihood', 'low', queries_df) return list(map(int, queries_df.index.values.tolist())) def queryRandomInstances(self, drop_instances, num_instances): if num_instances == 0: return [] queries_df = self.getSelectedInstancesDataframe(drop_instances) queries_df = queries_df.sample(n=num_instances, axis=0) self.addAnnotationQueries('random', 'low', queries_df) return list(map(int, queries_df.index.values.tolist())) def addAnnotationQueries(self, kind, confidence, queries_df):<|fim▁hole|> self.label, self.family, confidence=confidence) self.annotation_queries[kind].append(query) self.queries.append(int(index)) self.queries_confidence.append(confidence) def getSelectedInstancesDataframe(self, drop_instances): if drop_instances is None: selected_instances = self.instances_ids else: selected_instances = [ x for x in self.instances_ids if x not in drop_instances] selected_df = self.df.loc[list(map(str, selected_instances)), :] return selected_df<|fim▁end|>
if kind not in list(self.annotation_queries.keys()): self.annotation_queries[kind] = [] for index, row in queries_df.iterrows(): query = self.generateAnnotationQuery(int(index), row['likelihood'],
<|file_name|>pop3Dclrt.cpp<|end_file_name|><|fim▁begin|>/* program to compute the CLRT from 3D SFS input is a global 3D sfs file and the corresponding local windows 3D sfs file */ #include <iostream> #include <fstream> #include <vector> #include <string> #include <sstream> #include <cmath> #include <iomanip> using namespace std; // parse a string line to a vector of doubles void line_to_vector (string line, vector<double> & v) { stringstream ssline(line); double d; while(ssline >> d) { v.push_back(d); } } // remove the first and last values from the input vector void zero_freq_remove (vector<double> & v) { v.erase(v.begin() + 0); v.erase(v.begin() + v.size()-1); } // help factor calculation for window SFS scaling double calculate_help_fact (double globalSNPsnr, string filepath) { double help_fact; double windowsSNPsnr; int windowsnr = 0; // will count the number of windows in the windows SFS file string line; // string to store each line of the files vector<double> sfs; // vector to store the elements of each line ifstream windows_sfs (filepath); // input windows SFS file // GET WINDOWS SNPs NUMBER while (getline(windows_sfs, line)) // read windows SFS file line by line { line_to_vector(line, sfs); // parse line zero_freq_remove(sfs); // remove first and last SFS values for (int i = 0; i < sfs.size(); i++) windowsSNPsnr += sfs[i]; // sums up SNPs windowsnr += 1; sfs.clear(); } help_fact = globalSNPsnr / (windowsSNPsnr/windowsnr); // global SNPs number / mean windows SNPs number windows_sfs.close(); return help_fact; } // composite likelihood calculation double calculate_CL (vector<double> & v, double & nrSNPs, double & help_fact) { double CL = 0; double pk = 0; // the CL will be the sum of all p^k values for (int i = 0; i < v.size(); i++) { pk = log( pow((v[i]/nrSNPs), v[i]/help_fact) ); // log transformation CL+=pk; } return CL; } // CLRT calculation double calculate_CLRT (double X, double Y) { double CLRT; CLRT = 2*(X-Y); <|fim▁hole|>// help printout void info() { fprintf(stderr,"Required arguments:\tGlobal 3D SFS file path\n"); fprintf(stderr,"\t\t\tWindows 3D SFS file path\n"); fprintf(stderr,"\t\t\tOutput file name\n"); } // // // MAIN // // // int main (int argc, char *argv[]) { // HELP PRINTOUT if (argc==1) { info(); return 0; } // CHECKING CORRECT ARGUMENT NUMBER if (argc < 4) { cout << "Error: not enough arguments\n"; return 0; // terminate } // OPENING FILES, SETTING VARIABLES ifstream global_sfs (argv[1]); // input files (the global ifstream windows_sfs (argv[2]); // and windows SFS files) if ( !global_sfs.is_open() ) { // checking that the first file was successfully open cout<<"Could not open the global SFS file\n"; return 0; // terminate } if ( !windows_sfs.is_open() ) { // checking that the second file was successfully open cout<<"Could not open the windows SFS file\n"; return 0; // terminate } ofstream clrt_output(argv[3], ios::trunc); // output file to store test results string line; // string to store each line of the files vector<double> sfs; // vector to store the elements of each line double global_CL; // double to store the global CL value double windows_CL; // double to store the CL for each window in turn double CLRT; // double to store the test results // CALCULATE THE GLOBAL COMPOSITE LIKELIHOOD getline(global_sfs, line); // retrieving the first and only line of the line_to_vector(line, sfs); // global SFS file and storing it into a vector zero_freq_remove(sfs); // remove first and last SFS values double globalSNPsnr = 0; for (int i = 0; i < sfs.size(); i++) globalSNPsnr += sfs[i]; // number of SNPs in global SFS double help_fact = calculate_help_fact(globalSNPsnr, argv[2]); global_CL = calculate_CL(sfs, globalSNPsnr, help_fact); sfs.clear(); // CALCULATE WINDOWS CL, CALCULATE AND STORE THE CL RATIO TEST RESULT help_fact = 1; while (getline(windows_sfs, line)) // read windows SFS file line by line { line_to_vector(line, sfs); // parse line zero_freq_remove(sfs); // remove first and last SFS values double windowSNPsnr = 0; for (int i = 0; i < sfs.size(); i++) windowSNPsnr += sfs[i]; // number of SNPs in windows SFS windows_CL = calculate_CL(sfs, windowSNPsnr, help_fact); CLRT = calculate_CLRT(windows_CL, global_CL); clrt_output << setprecision(10) << CLRT << " \n"; sfs.clear(); } // CLOSING FILES global_sfs.close(); windows_sfs.close(); clrt_output.close(); return 0; }<|fim▁end|>
return CLRT; }
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Support for the (unofficial) Tado API.""" import asyncio from datetime import timedelta import logging from PyTado.interface import Tado from requests import RequestException import requests.exceptions import voluptuous as vol from homeassistant.components.climate.const import PRESET_AWAY, PRESET_HOME from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.dispatcher import dispatcher_send from homeassistant.helpers.event import async_track_time_interval from homeassistant.util import Throttle from .const import ( CONF_FALLBACK, DATA, DOMAIN, SIGNAL_TADO_UPDATE_RECEIVED, UPDATE_LISTENER, UPDATE_TRACK, ) _LOGGER = logging.getLogger(__name__) TADO_COMPONENTS = ["sensor", "climate", "water_heater"] MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=10) SCAN_INTERVAL = timedelta(seconds=15) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.All( cv.ensure_list, [ { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_FALLBACK, default=True): cv.boolean, } ], ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass: HomeAssistant, config: dict): """Set up the Tado component.""" hass.data.setdefault(DOMAIN, {}) if DOMAIN not in config: return True for conf in config[DOMAIN]: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=conf, ) ) return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up Tado from a config entry.""" _async_import_options_from_data_if_missing(hass, entry) username = entry.data[CONF_USERNAME] password = entry.data[CONF_PASSWORD] fallback = entry.options.get(CONF_FALLBACK, True) tadoconnector = TadoConnector(hass, username, password, fallback) try: await hass.async_add_executor_job(tadoconnector.setup) except KeyError: _LOGGER.error("Failed to login to tado") return False except RuntimeError as exc: _LOGGER.error("Failed to setup tado: %s", exc) return ConfigEntryNotReady except requests.exceptions.HTTPError as ex: if ex.response.status_code > 400 and ex.response.status_code < 500: _LOGGER.error("Failed to login to tado: %s", ex) return False raise ConfigEntryNotReady # Do first update await hass.async_add_executor_job(tadoconnector.update) # Poll for updates in the background update_track = async_track_time_interval( hass, lambda now: tadoconnector.update(), SCAN_INTERVAL, ) update_listener = entry.add_update_listener(_async_update_listener) hass.data[DOMAIN][entry.entry_id] = { DATA: tadoconnector, UPDATE_TRACK: update_track, UPDATE_LISTENER: update_listener, } for component in TADO_COMPONENTS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True @callback def _async_import_options_from_data_if_missing(hass: HomeAssistant, entry: ConfigEntry): options = dict(entry.options) if CONF_FALLBACK not in options: options[CONF_FALLBACK] = entry.data.get(CONF_FALLBACK, True) hass.config_entries.async_update_entry(entry, options=options) async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry): """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in TADO_COMPONENTS ] ) ) hass.data[DOMAIN][entry.entry_id][UPDATE_TRACK]() hass.data[DOMAIN][entry.entry_id][UPDATE_LISTENER]() if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok class TadoConnector: """An object to store the Tado data.""" def __init__(self, hass, username, password, fallback): """Initialize Tado Connector.""" self.hass = hass self._username = username self._password = password self._fallback = fallback self.device_id = None self.tado = None self.zones = None self.devices = None self.data = { "zone": {}, "device": {}, } @property def fallback(self): """Return fallback flag to Smart Schedule.""" return self._fallback def setup(self): """Connect to Tado and fetch the zones.""" self.tado = Tado(self._username, self._password) self.tado.setDebugging(True) # Load zones and devices self.zones = self.tado.getZones() self.devices = self.tado.getMe()["homes"] self.device_id = self.devices[0]["id"] @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Update the registered zones.""" for zone in self.zones: self.update_sensor("zone", zone["id"]) for device in self.devices: self.update_sensor("device", device["id"]) def update_sensor(self, sensor_type, sensor): """Update the internal data from Tado.""" _LOGGER.debug("Updating %s %s", sensor_type, sensor) try: if sensor_type == "zone": data = self.tado.getZoneState(sensor) elif sensor_type == "device": devices_data = self.tado.getDevices() if not devices_data: _LOGGER.info("There are no devices to setup on this tado account") return data = devices_data[0] else: _LOGGER.debug("Unknown sensor: %s", sensor_type) return except RuntimeError: _LOGGER.error( "Unable to connect to Tado while updating %s %s", sensor_type, sensor, ) return self.data[sensor_type][sensor] = data _LOGGER.debug( "Dispatching update to %s %s %s: %s", self.device_id, sensor_type, sensor, data, ) dispatcher_send( self.hass, SIGNAL_TADO_UPDATE_RECEIVED.format(self.device_id, sensor_type, sensor), ) def get_capabilities(self, zone_id): """Return the capabilities of the devices.""" return self.tado.getCapabilities(zone_id) def reset_zone_overlay(self, zone_id): """Reset the zone back to the default operation.""" self.tado.resetZoneOverlay(zone_id) self.update_sensor("zone", zone_id) def set_presence( self, presence=PRESET_HOME, ): """Set the presence to home or away.""" if presence == PRESET_AWAY: self.tado.setAway() elif presence == PRESET_HOME: self.tado.setHome() def set_zone_overlay( self, zone_id=None, overlay_mode=None, temperature=None, duration=None, device_type="HEATING", mode=None, fan_speed=None, swing=None, ): """Set a zone overlay.""" _LOGGER.debug( "Set overlay for zone %s: overlay_mode=%s, temp=%s, duration=%s, type=%s, mode=%s fan_speed=%s swing=%s", zone_id, overlay_mode, temperature, duration, device_type, mode, fan_speed, swing, ) try: self.tado.setZoneOverlay( zone_id, overlay_mode, temperature, duration,<|fim▁hole|> fanSpeed=fan_speed, swing=swing, ) except RequestException as exc: _LOGGER.error("Could not set zone overlay: %s", exc) self.update_sensor("zone", zone_id) def set_zone_off(self, zone_id, overlay_mode, device_type="HEATING"): """Set a zone to off.""" try: self.tado.setZoneOverlay( zone_id, overlay_mode, None, None, device_type, "OFF" ) except RequestException as exc: _LOGGER.error("Could not set zone overlay: %s", exc) self.update_sensor("zone", zone_id)<|fim▁end|>
device_type, "ON", mode,
<|file_name|>V3DResourceMemory.cpp<|end_file_name|><|fim▁begin|>#include "V3DResourceMemory.h" #include "V3DDevice.h" #include "V3DBuffer.h" #include "V3DImage.h" #include "V3DAdapter.h" /******************************/ /* public - V3DResourceMemory */ /******************************/ V3DResourceMemory* V3DResourceMemory::Create() { return V3D_NEW_T(V3DResourceMemory); } V3D_RESULT V3DResourceMemory::Initialize(IV3DDevice* pDevice, V3DFlags propertyFlags, uint64_t size, const wchar_t* pDebugName) { V3D_ASSERT(pDevice != nullptr); V3D_ASSERT(propertyFlags != 0); V3D_ASSERT(size != 0); m_pDevice = V3D_TO_ADD_REF(static_cast<V3DDevice*>(pDevice)); V3D_ADD_DEBUG_MEMORY_OBJECT(this, V3D_DEBUG_OBJECT_TYPE_RESOURCE_MEMORY, V3D_SAFE_NAME(this, pDebugName)); m_Source.memoryPropertyFlags = ToVkMemoryPropertyFlags(propertyFlags); // ---------------------------------------------------------------------------------------------------- // ƒƒ‚ƒŠ‚ðŠm•Û // ---------------------------------------------------------------------------------------------------- VkMemoryAllocateInfo allocInfo{}; allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; allocInfo.pNext = nullptr; allocInfo.allocationSize = size; allocInfo.memoryTypeIndex = m_pDevice->GetInternalAdapterPtr()->Vulkan_GetMemoryTypeIndex(m_Source.memoryPropertyFlags); VkResult vkResult = vkAllocateMemory(m_pDevice->GetSource().device, &allocInfo, nullptr, &m_Source.deviceMemory); if (vkResult != VK_SUCCESS) { return ToV3DResult(vkResult); } m_Source.memoryMappedRange.memory = m_Source.deviceMemory; V3D_ADD_DEBUG_OBJECT(m_pDevice->GetInternalInstancePtr(), m_Source.deviceMemory, V3D_SAFE_NAME(this, pDebugName)); // ---------------------------------------------------------------------------------------------------- // ‹Lq‚ðÝ’è // ---------------------------------------------------------------------------------------------------- m_Desc.propertyFlags = propertyFlags; m_Desc.size = size; // ---------------------------------------------------------------------------------------------------- return V3D_OK; } V3D_RESULT V3DResourceMemory::Initialize(IV3DDevice* pDevice, V3DFlags propertyFlags, uint32_t resourceCount, IV3DResource** ppResources, const wchar_t* pDebugName) { V3D_ASSERT(pDevice != nullptr); V3D_ASSERT(propertyFlags != 0); V3D_ASSERT(resourceCount != 0); V3D_ASSERT(ppResources != nullptr); m_pDevice = V3D_TO_ADD_REF(static_cast<V3DDevice*>(pDevice)); V3D_ADD_DEBUG_MEMORY_OBJECT(this, V3D_DEBUG_OBJECT_TYPE_RESOURCE_MEMORY, V3D_SAFE_NAME(this, pDebugName)); // ---------------------------------------------------------------------------------------------------- // ƒŠƒ\[ƒX‚ðƒAƒ‰ƒCƒƒ“ƒg‚̑傫‚¢‡‚Ƀ\[ƒg // ---------------------------------------------------------------------------------------------------- STLVector<IV3DResource*> resources; resources.reserve(resourceCount); for (uint32_t i = 0; i < resourceCount; i++) { #ifdef V3D_DEBUG switch (ppResources[i]->GetResourceDesc().type) { case V3D_RESOURCE_TYPE_BUFFER: if (static_cast<V3DBuffer*>(ppResources[i])->CheckBindMemory() == true) { V3D_LOG_PRINT_ERROR(Log_Error_AlreadyBindResourceMemory, V3D_SAFE_NAME(this, pDebugName), V3D_LOG_TYPE(ppResources), i, static_cast<V3DBuffer*>(ppResources[i])->GetDebugName()); return V3D_ERROR_FAIL; } break; case V3D_RESOURCE_TYPE_IMAGE: if (static_cast<IV3DImageBase*>(ppResources[i])->CheckBindMemory() == true) { V3D_LOG_PRINT_ERROR(Log_Error_AlreadyBindResourceMemory, V3D_SAFE_NAME(this, pDebugName), V3D_LOG_TYPE(ppResources), i, static_cast<IV3DImageBase*>(ppResources[i])->GetDebugName()); return V3D_ERROR_FAIL; } break; } #endif //V3D_DEBUG resources.push_back(ppResources[i]); } std::sort(resources.begin(), resources.end(), [](const IV3DResource* lh, const IV3DResource* rh) { return lh->GetResourceDesc().memoryAlignment > rh->GetResourceDesc().memoryAlignment; }); // ---------------------------------------------------------------------------------------------------- // ƒAƒ‰ƒCƒƒ“ƒg‚ð‹C‚É‚µ‚‚Aƒƒ‚ƒŠ‚̃TƒCƒY‚ð‹‚ß‚é // ---------------------------------------------------------------------------------------------------- uint64_t vkMinAlignment = m_pDevice->GetSource().deviceProps.limits.minMemoryMapAlignment; VkDeviceSize vkAllocSize = 0; STLVector<VkDeviceSize> vkOffsets; vkOffsets.resize(resourceCount); for (uint32_t i = 0; i < resourceCount; i++) { const V3DResourceDesc& resourceDesc = ppResources[i]->GetResourceDesc(); VkDeviceSize vkAlignment = V3D_MAX(vkMinAlignment, resourceDesc.memoryAlignment); if (vkAllocSize % vkAlignment) { vkAllocSize = (vkAllocSize / vkAlignment) * vkAlignment + vkAlignment; } <|fim▁hole|> // ---------------------------------------------------------------------------------------------------- // ƒƒ‚ƒŠ‚ðì¬ // ---------------------------------------------------------------------------------------------------- m_Source.memoryPropertyFlags = ToVkMemoryPropertyFlags(propertyFlags); VkMemoryAllocateInfo allocInfo{}; allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; allocInfo.pNext = nullptr; allocInfo.allocationSize = vkAllocSize; allocInfo.memoryTypeIndex = m_pDevice->GetInternalAdapterPtr()->Vulkan_GetMemoryTypeIndex(m_Source.memoryPropertyFlags); VkResult vkResult = vkAllocateMemory(m_pDevice->GetSource().device, &allocInfo, nullptr, &m_Source.deviceMemory); if (vkResult != VK_SUCCESS) { return ToV3DResult(vkResult); } m_Source.memoryMappedRange.memory = m_Source.deviceMemory; V3D_ADD_DEBUG_OBJECT(m_pDevice->GetInternalInstancePtr(), m_Source.deviceMemory, V3D_SAFE_NAME(this, pDebugName)); // ---------------------------------------------------------------------------------------------------- // ‹Lq‚ðÝ’è // ---------------------------------------------------------------------------------------------------- m_Desc.propertyFlags = propertyFlags; m_Desc.size = vkAllocSize; // ---------------------------------------------------------------------------------------------------- // ƒŠƒ\[ƒX‚ðƒoƒCƒ“ƒh // ---------------------------------------------------------------------------------------------------- V3D_RESULT result = V3D_ERROR_FAIL; for (uint32_t i = 0; i < resourceCount; i++) { IV3DResource* pResource = ppResources[i]; switch (pResource->GetResourceDesc().type) { case V3D_RESOURCE_TYPE_BUFFER: result = static_cast<V3DBuffer*>(pResource)->BindMemory(this, vkOffsets[i]); if (result != V3D_OK) { return result; } break; case V3D_RESOURCE_TYPE_IMAGE: result = static_cast<V3DImage*>(pResource)->BindMemory(this, vkOffsets[i]); if (result != V3D_OK) { return result; } break; } } // ---------------------------------------------------------------------------------------------------- return V3D_OK; } const V3DResourceMemory::Source& V3DResourceMemory::GetSource() const { return m_Source; } V3D_RESULT V3DResourceMemory::Map(uint64_t offset, uint64_t size, void** ppMemory) { if (m_Desc.size < (offset + size)) { return V3D_ERROR_FAIL; } if (m_pMemory != nullptr) { *ppMemory = m_pMemory + offset; return V3D_OK; } if (m_Source.memoryMappedRange.size != 0) { return V3D_ERROR_FAIL; } VkResult vkResult = vkMapMemory(m_pDevice->GetSource().device, m_Source.deviceMemory, offset, size, 0, ppMemory); if (vkResult != VK_SUCCESS) { return ToV3DResult(vkResult); } m_Source.memoryMappedRange.offset = offset; m_Source.memoryMappedRange.size = size; if ((m_Source.memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0) { vkResult = vkInvalidateMappedMemoryRanges(m_pDevice->GetSource().device, 1, &m_Source.memoryMappedRange); if (vkResult != VK_SUCCESS) { return ToV3DResult(vkResult); } } return V3D_OK; } V3D_RESULT V3DResourceMemory::Unmap() { if (m_pMemory != nullptr) { return V3D_OK; } if (m_Source.memoryMappedRange.size == 0) { return V3D_ERROR_FAIL; } V3D_RESULT result = V3D_OK; if ((m_Source.memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0) { VkResult vkResult = vkFlushMappedMemoryRanges(m_pDevice->GetSource().device, 1, &m_Source.memoryMappedRange); if (vkResult != VK_SUCCESS) { result = ToV3DResult(vkResult); } } m_Source.memoryMappedRange.offset = 0; m_Source.memoryMappedRange.size = 0; vkUnmapMemory(m_pDevice->GetSource().device, m_Source.deviceMemory); return result; } #ifdef V3D_DEBUG bool V3DResourceMemory::Debug_CheckMemory(uint64_t offset, uint64_t size) { return (m_Desc.size >= (offset + size)); } #endif //V3D_DEBUG /****************************************/ /* public override - IV3DResourceMemory */ /****************************************/ const V3DResourceMemoryDesc& V3DResourceMemory::GetDesc() const { return m_Desc; } V3D_RESULT V3DResourceMemory::BeginMap() { if (m_Source.memoryMappedRange.size != 0) { return V3D_ERROR_FAIL; } VkResult vkResult = vkMapMemory(m_pDevice->GetSource().device, m_Source.deviceMemory, 0, m_Desc.size, 0, reinterpret_cast<void**>(&m_pMemory)); if (vkResult != VK_SUCCESS) { return ToV3DResult(vkResult); } m_Source.memoryMappedRange.offset = 0; m_Source.memoryMappedRange.size = m_Desc.size; if ((m_Source.memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0) { vkResult = vkInvalidateMappedMemoryRanges(m_pDevice->GetSource().device, 1, &m_Source.memoryMappedRange); if (vkResult != VK_SUCCESS) { return ToV3DResult(vkResult); } } return V3D_OK; } V3D_RESULT V3DResourceMemory::EndMap() { if (m_Source.memoryMappedRange.size == 0) { return V3D_ERROR_FAIL; } V3D_RESULT result = V3D_OK; if ((m_Source.memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0) { VkResult vkResult = vkFlushMappedMemoryRanges(m_pDevice->GetSource().device, 1, &m_Source.memoryMappedRange); if (vkResult != VK_SUCCESS) { result = ToV3DResult(vkResult); } } m_Source.memoryMappedRange.offset = 0; m_Source.memoryMappedRange.size = 0; vkUnmapMemory(m_pDevice->GetSource().device, m_Source.deviceMemory); m_pMemory = nullptr; return result; } /*************************************/ /* public override - IV3DDeviceChild */ /*************************************/ void V3DResourceMemory::GetDevice(IV3DDevice** ppDevice) { (*ppDevice) = V3D_TO_ADD_REF(m_pDevice); } /********************************/ /* public override - IV3DObject */ /********************************/ int64_t V3DResourceMemory::GetRefCount() const { return m_RefCounter; } void V3DResourceMemory::AddRef() { V3D_REF_INC(m_RefCounter); } void V3DResourceMemory::Release() { if (V3D_REF_DEC(m_RefCounter)) { V3D_REF_FENCE(); V3D_DELETE_THIS_T(this, V3DResourceMemory); } } /*******************************/ /* private - V3DResourceMemory */ /*******************************/ V3DResourceMemory::V3DResourceMemory() : m_RefCounter(1), m_pDevice(nullptr), m_Desc({}), m_Source({}), m_pMemory(nullptr) { m_Source.deviceMemory = VK_NULL_HANDLE; m_Source.memoryMappedRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; m_Source.memoryMappedRange.pNext = nullptr; } V3DResourceMemory::~V3DResourceMemory() { if (m_pDevice != nullptr) { m_pDevice->NotifyReleaseResourceMemory(); } if (m_Source.deviceMemory != VK_NULL_HANDLE) { vkFreeMemory(m_pDevice->GetSource().device, m_Source.deviceMemory, nullptr); V3D_REMOVE_DEBUG_OBJECT(m_pDevice->GetInternalInstancePtr(), m_Source.deviceMemory); } V3D_REMOVE_DEBUG_MEMORY_OBJECT(this); V3D_RELEASE(m_pDevice); }<|fim▁end|>
vkOffsets[i] = vkAllocSize; vkAllocSize += resourceDesc.memorySize; }
<|file_name|>dataflow.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A module for propagating forward dataflow information. The analysis //! assumes that the items to be propagated can be represented as bits //! and thus uses bitvectors. Your job is simply to specify the so-called //! GEN and KILL bits for each expression. pub use self::EntryOrExit::*; use middle::cfg; use middle::cfg::CFGIndex; use middle::ty; use std::io; use std::uint; use std::iter::repeat; use syntax::ast; use syntax::ast_util::IdRange; use syntax::visit; use syntax::print::{pp, pprust}; use util::nodemap::NodeMap; #[derive(Copy, Show)] pub enum EntryOrExit { Entry, Exit, } #[derive(Clone)] pub struct DataFlowContext<'a, 'tcx: 'a, O> { tcx: &'a ty::ctxt<'tcx>, /// a name for the analysis using this dataflow instance analysis_name: &'static str, /// the data flow operator oper: O, /// number of bits to propagate per id bits_per_id: uint, /// number of words we will use to store bits_per_id. /// equal to bits_per_id/uint::BITS rounded up. words_per_id: uint, // mapping from node to cfg node index // FIXME (#6298): Shouldn't this go with CFG? nodeid_to_index: NodeMap<CFGIndex>, // Bit sets per cfg node. The following three fields (`gens`, `kills`, // and `on_entry`) all have the same structure. For each id in // `id_range`, there is a range of words equal to `words_per_id`. // So, to access the bits for any given id, you take a slice of // the full vector (see the method `compute_id_range()`). /// bits generated as we exit the cfg node. Updated by `add_gen()`. gens: Vec<uint>, /// bits killed as we exit the cfg node. Updated by `add_kill()`. kills: Vec<uint>, /// bits that are valid on entry to the cfg node. Updated by /// `propagate()`. on_entry: Vec<uint>, } pub trait BitwiseOperator { /// Joins two predecessor bits together, typically either `|` or `&` fn join(&self, succ: uint, pred: uint) -> uint; } /// Parameterization for the precise form of data flow that is used. pub trait DataFlowOperator : BitwiseOperator { /// Specifies the initial value for each bit in the `on_entry` set fn initial_value(&self) -> bool; } struct PropagationContext<'a, 'b: 'a, 'tcx: 'b, O: 'a> { dfcx: &'a mut DataFlowContext<'b, 'tcx, O>, changed: bool } fn to_cfgidx_or_die(id: ast::NodeId, index: &NodeMap<CFGIndex>) -> CFGIndex { let opt_cfgindex = index.get(&id).map(|&i|i); opt_cfgindex.unwrap_or_else(|| { panic!("nodeid_to_index does not have entry for NodeId {}", id); }) } impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { fn has_bitset_for_nodeid(&self, n: ast::NodeId) -> bool { assert!(n != ast::DUMMY_NODE_ID); self.nodeid_to_index.contains_key(&n) } } impl<'a, 'tcx, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, 'tcx, O> { fn pre(&self, ps: &mut pprust::State, node: pprust::AnnNode) -> io::IoResult<()> { let id = match node { pprust::NodeIdent(_) | pprust::NodeName(_) => 0, pprust::NodeExpr(expr) => expr.id, pprust::NodeBlock(blk) => blk.id, pprust::NodeItem(_) => 0, pprust::NodePat(pat) => pat.id }; if self.has_bitset_for_nodeid(id) { assert!(self.bits_per_id > 0); let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index); let (start, end) = self.compute_id_range(cfgidx); let on_entry = self.on_entry.slice(start, end); let entry_str = bits_to_string(on_entry); let gens = self.gens.slice(start, end); let gens_str = if gens.iter().any(|&u| u != 0) { format!(" gen: {}", bits_to_string(gens)) } else { "".to_string() }; let kills = self.kills.slice(start, end); let kills_str = if kills.iter().any(|&u| u != 0) { format!(" kill: {}", bits_to_string(kills)) } else { "".to_string() }; try!(ps.synth_comment(format!("id {}: {}{}{}", id, entry_str, gens_str, kills_str))); try!(pp::space(&mut ps.s)); } Ok(()) } } fn build_nodeid_to_index(decl: Option<&ast::FnDecl>, cfg: &cfg::CFG) -> NodeMap<CFGIndex> { let mut index = NodeMap::new(); // FIXME (#6298): Would it be better to fold formals from decl // into cfg itself? i.e. introduce a fn-based flow-graph in // addition to the current block-based flow-graph, rather than // have to put traversals like this here? match decl { None => {} Some(decl) => add_entries_from_fn_decl(&mut index, decl, cfg.entry) } cfg.graph.each_node(|node_idx, node| { if node.data.id != ast::DUMMY_NODE_ID { index.insert(node.data.id, node_idx); } true }); return index; fn add_entries_from_fn_decl(index: &mut NodeMap<CFGIndex>, decl: &ast::FnDecl, entry: CFGIndex) { //! add mappings from the ast nodes for the formal bindings to //! the entry-node in the graph. struct Formals<'a> { entry: CFGIndex, index: &'a mut NodeMap<CFGIndex>, } let mut formals = Formals { entry: entry, index: index }; visit::walk_fn_decl(&mut formals, decl); impl<'a, 'v> visit::Visitor<'v> for Formals<'a> { fn visit_pat(&mut self, p: &ast::Pat) { self.index.insert(p.id, self.entry); visit::walk_pat(self, p) } } } } impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { pub fn new(tcx: &'a ty::ctxt<'tcx>, analysis_name: &'static str, decl: Option<&ast::FnDecl>, cfg: &cfg::CFG, oper: O, id_range: IdRange, bits_per_id: uint) -> DataFlowContext<'a, 'tcx, O> { let words_per_id = (bits_per_id + uint::BITS - 1) / uint::BITS; let num_nodes = cfg.graph.all_nodes().len(); debug!("DataFlowContext::new(analysis_name: {}, id_range={:?}, \ bits_per_id={}, words_per_id={}) \ num_nodes: {}", analysis_name, id_range, bits_per_id, words_per_id, num_nodes); let entry = if oper.initial_value() { uint::MAX } else {0}; let gens: Vec<_> = repeat(0).take(num_nodes * words_per_id).collect(); let kills: Vec<_> = repeat(0).take(num_nodes * words_per_id).collect(); let on_entry: Vec<_> = repeat(entry).take(num_nodes * words_per_id).collect(); let nodeid_to_index = build_nodeid_to_index(decl, cfg); DataFlowContext { tcx: tcx, analysis_name: analysis_name, words_per_id: words_per_id, nodeid_to_index: nodeid_to_index, bits_per_id: bits_per_id, oper: oper, gens: gens, kills: kills, on_entry: on_entry } } pub fn add_gen(&mut self, id: ast::NodeId, bit: uint) { //! Indicates that `id` generates `bit` debug!("{} add_gen(id={}, bit={})", self.analysis_name, id, bit); assert!(self.nodeid_to_index.contains_key(&id)); assert!(self.bits_per_id > 0); let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index); let (start, end) = self.compute_id_range(cfgidx); let gens = self.gens.slice_mut(start, end); set_bit(gens, bit); } pub fn add_kill(&mut self, id: ast::NodeId, bit: uint) { //! Indicates that `id` kills `bit` debug!("{} add_kill(id={}, bit={})", self.analysis_name, id, bit); assert!(self.nodeid_to_index.contains_key(&id)); assert!(self.bits_per_id > 0); let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index); let (start, end) = self.compute_id_range(cfgidx); let kills = self.kills.slice_mut(start, end); set_bit(kills, bit); } fn apply_gen_kill(&self, cfgidx: CFGIndex, bits: &mut [uint]) { //! Applies the gen and kill sets for `cfgidx` to `bits` debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [before]", self.analysis_name, cfgidx, mut_bits_to_string(bits)); assert!(self.bits_per_id > 0); let (start, end) = self.compute_id_range(cfgidx); let gens = self.gens.slice(start, end); bitwise(bits, gens, &Union); let kills = self.kills.slice(start, end); bitwise(bits, kills, &Subtract); debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [after]", self.analysis_name, cfgidx, mut_bits_to_string(bits)); } fn compute_id_range(&self, cfgidx: CFGIndex) -> (uint, uint) { let n = cfgidx.node_id(); let start = n * self.words_per_id; let end = start + self.words_per_id; assert!(start < self.gens.len()); assert!(end <= self.gens.len()); assert!(self.gens.len() == self.kills.len()); assert!(self.gens.len() == self.on_entry.len()); (start, end) } pub fn each_bit_on_entry<F>(&self, id: ast::NodeId, f: F) -> bool where F: FnMut(uint) -> bool, { //! Iterates through each bit that is set on entry to `id`. //! Only useful after `propagate()` has been called. if !self.has_bitset_for_nodeid(id) { return true; } let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index); self.each_bit_for_node(Entry, cfgidx, f) } pub fn each_bit_for_node<F>(&self, e: EntryOrExit, cfgidx: CFGIndex, f: F) -> bool where F: FnMut(uint) -> bool, { //! Iterates through each bit that is set on entry/exit to `cfgidx`. //! Only useful after `propagate()` has been called. if self.bits_per_id == 0 { // Skip the surprisingly common degenerate case. (Note // compute_id_range requires self.words_per_id > 0.) return true; } let (start, end) = self.compute_id_range(cfgidx); let on_entry = self.on_entry.slice(start, end); let temp_bits; let slice = match e { Entry => on_entry, Exit => { let mut t = on_entry.to_vec(); self.apply_gen_kill(cfgidx, t.as_mut_slice()); temp_bits = t; &temp_bits[] } }; debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}", self.analysis_name, e, cfgidx, bits_to_string(slice)); self.each_bit(slice, f) } pub fn each_gen_bit<F>(&self, id: ast::NodeId, f: F) -> bool where F: FnMut(uint) -> bool, { //! Iterates through each bit in the gen set for `id`. if !self.has_bitset_for_nodeid(id) { return true; } if self.bits_per_id == 0 { // Skip the surprisingly common degenerate case. (Note // compute_id_range requires self.words_per_id > 0.) return true; } let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index); let (start, end) = self.compute_id_range(cfgidx); let gens = self.gens.slice(start, end); debug!("{} each_gen_bit(id={}, gens={})", self.analysis_name, id, bits_to_string(gens)); self.each_bit(gens, f) } fn each_bit<F>(&self, words: &[uint], mut f: F) -> bool where F: FnMut(uint) -> bool, { //! Helper for iterating over the bits in a bit set. //! Returns false on the first call to `f` that returns false; //! if all calls to `f` return true, then returns true. for (word_index, &word) in words.iter().enumerate() { if word != 0 { let base_index = word_index * uint::BITS; for offset in range(0u, uint::BITS) { let bit = 1 << offset; if (word & bit) != 0 { // NB: we round up the total number of bits // that we store in any given bit set so that // it is an even multiple of uint::BITS. This // means that there may be some stray bits at // the end that do not correspond to any // actual value. So before we callback, check // whether the bit_index is greater than the // actual value the user specified and stop // iterating if so. let bit_index = base_index + offset; if bit_index >= self.bits_per_id { return true; } else if !f(bit_index) { return false; } } } } } return true; } pub fn add_kills_from_flow_exits(&mut self, cfg: &cfg::CFG) { //! Whenever you have a `break` or `continue` statement, flow //! exits through any number of enclosing scopes on its way to //! the new destination. This function infers the kill bits of //! those control operators based on the kill bits associated //! with those scopes. //! //! This is usually called (if it is called at all), after //! all add_gen and add_kill calls, but before propagate. debug!("{} add_kills_from_flow_exits", self.analysis_name); if self.bits_per_id == 0 { // Skip the surprisingly common degenerate case. (Note // compute_id_range requires self.words_per_id > 0.) return; } cfg.graph.each_edge(|_edge_index, edge| { let flow_exit = edge.source(); let (start, end) = self.compute_id_range(flow_exit); let mut orig_kills = self.kills.slice(start, end).to_vec(); let mut changed = false; for &node_id in edge.data.exiting_scopes.iter() { let opt_cfg_idx = self.nodeid_to_index.get(&node_id).map(|&i|i); match opt_cfg_idx { Some(cfg_idx) => { let (start, end) = self.compute_id_range(cfg_idx); let kills = self.kills.slice(start, end); if bitwise(orig_kills.as_mut_slice(), kills, &Union) { changed = true; } } None => { debug!("{} add_kills_from_flow_exits flow_exit={:?} \ no cfg_idx for exiting_scope={}", self.analysis_name, flow_exit, node_id); } } } if changed { let bits = self.kills.slice_mut(start, end); debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); bits.clone_from_slice(&orig_kills[]); debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); } true }); } } impl<'a, 'tcx, O:DataFlowOperator+Clone+'static> DataFlowContext<'a, 'tcx, O> { // ^^^^^^^^^^^^^ only needed for pretty printing pub fn propagate(&mut self, cfg: &cfg::CFG, blk: &ast::Block) { //! Performs the data flow analysis. if self.bits_per_id == 0 { // Optimize the surprisingly common degenerate case. return; } { let words_per_id = self.words_per_id; let mut propcx = PropagationContext { dfcx: &mut *self, changed: true }; let mut temp: Vec<_> = repeat(0u).take(words_per_id).collect(); while propcx.changed { propcx.changed = false; propcx.reset(temp.as_mut_slice()); propcx.walk_cfg(cfg, temp.as_mut_slice()); } } debug!("Dataflow result for {}:", self.analysis_name); debug!("{}", { self.pretty_print_to(box io::stderr(), blk).unwrap(); "" }); } fn pretty_print_to(&self, wr: Box<io::Writer+'static>, blk: &ast::Block) -> io::IoResult<()> { let mut ps = pprust::rust_printer_annotated(wr, self); try!(ps.cbox(pprust::indent_unit)); try!(ps.ibox(0u)); try!(ps.print_block(blk)); pp::eof(&mut ps.s) } } impl<'a, 'b, 'tcx, O:DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> { fn walk_cfg(&mut self, cfg: &cfg::CFG, in_out: &mut [uint]) { debug!("DataFlowContext::walk_cfg(in_out={}) {}", bits_to_string(in_out), self.dfcx.analysis_name); assert!(self.dfcx.bits_per_id > 0); <|fim▁hole|> debug!("DataFlowContext::walk_cfg idx={:?} id={} begin in_out={}", node_index, node.data.id, bits_to_string(in_out)); let (start, end) = self.dfcx.compute_id_range(node_index); // Initialize local bitvector with state on-entry. in_out.clone_from_slice(self.dfcx.on_entry.slice(start, end)); // Compute state on-exit by applying transfer function to // state on-entry. self.dfcx.apply_gen_kill(node_index, in_out); // Propagate state on-exit from node into its successors. self.propagate_bits_into_graph_successors_of(in_out, cfg, node_index); true // continue to next node }); } fn reset(&mut self, bits: &mut [uint]) { let e = if self.dfcx.oper.initial_value() {uint::MAX} else {0}; for b in bits.iter_mut() { *b = e; } } fn propagate_bits_into_graph_successors_of(&mut self, pred_bits: &[uint], cfg: &cfg::CFG, cfgidx: CFGIndex) { cfg.graph.each_outgoing_edge(cfgidx, |_e_idx, edge| { self.propagate_bits_into_entry_set_for(pred_bits, edge); true }); } fn propagate_bits_into_entry_set_for(&mut self, pred_bits: &[uint], edge: &cfg::CFGEdge) { let source = edge.source(); let cfgidx = edge.target(); debug!("{} propagate_bits_into_entry_set_for(pred_bits={}, {:?} to {:?})", self.dfcx.analysis_name, bits_to_string(pred_bits), source, cfgidx); assert!(self.dfcx.bits_per_id > 0); let (start, end) = self.dfcx.compute_id_range(cfgidx); let changed = { // (scoping mutable borrow of self.dfcx.on_entry) let on_entry = self.dfcx.on_entry.slice_mut(start, end); bitwise(on_entry, pred_bits, &self.dfcx.oper) }; if changed { debug!("{} changed entry set for {:?} to {}", self.dfcx.analysis_name, cfgidx, bits_to_string(self.dfcx.on_entry.slice(start, end))); self.changed = true; } } } fn mut_bits_to_string(words: &mut [uint]) -> String { bits_to_string(words) } fn bits_to_string(words: &[uint]) -> String { let mut result = String::new(); let mut sep = '['; // Note: this is a little endian printout of bytes. for &word in words.iter() { let mut v = word; for _ in range(0u, uint::BYTES) { result.push(sep); result.push_str(&format!("{:02x}", v & 0xFF)[]); v >>= 8; sep = '-'; } } result.push(']'); return result } #[inline] fn bitwise<Op:BitwiseOperator>(out_vec: &mut [uint], in_vec: &[uint], op: &Op) -> bool { assert_eq!(out_vec.len(), in_vec.len()); let mut changed = false; for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec.iter()) { let old_val = *out_elt; let new_val = op.join(old_val, *in_elt); *out_elt = new_val; changed |= old_val != new_val; } changed } fn set_bit(words: &mut [uint], bit: uint) -> bool { debug!("set_bit: words={} bit={}", mut_bits_to_string(words), bit_str(bit)); let word = bit / uint::BITS; let bit_in_word = bit % uint::BITS; let bit_mask = 1 << bit_in_word; debug!("word={} bit_in_word={} bit_mask={}", word, bit_in_word, word); let oldv = words[word]; let newv = oldv | bit_mask; words[word] = newv; oldv != newv } fn bit_str(bit: uint) -> String { let byte = bit >> 8; let lobits = 1u << (bit & 0xFF); format!("[{}:{}-{:02x}]", bit, byte, lobits) } struct Union; impl BitwiseOperator for Union { fn join(&self, a: uint, b: uint) -> uint { a | b } } struct Subtract; impl BitwiseOperator for Subtract { fn join(&self, a: uint, b: uint) -> uint { a & !b } }<|fim▁end|>
cfg.graph.each_node(|node_index, node| {
<|file_name|>ts_plugin_spec.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google LLC All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {LanguageService} from '../../language_service'; import {getExternalFiles} from '../../ts_plugin'; import {APP_COMPONENT, setup} from './mock_host'; describe('getExternalFiles()', () => { it('should return all typecheck files', () => { const {project, tsLS} = setup(); let externalFiles = getExternalFiles(project); // Initially there are no external files because Ivy compiler hasn't done // a global analysis expect(externalFiles).toEqual([]); // Trigger global analysis const ngLS = new LanguageService(project, tsLS, {}); ngLS.getSemanticDiagnostics(APP_COMPONENT); // Now that global analysis is run, we should have all the typecheck files<|fim▁hole|> expect(externalFiles.length).toBe(1); expect(externalFiles[0].endsWith('app.component.ngtypecheck.ts')).toBeTrue(); }); });<|fim▁end|>
externalFiles = getExternalFiles(project);
<|file_name|>app.go<|end_file_name|><|fim▁begin|>package main import ( "strings" "strconv" "time" "bufio" "regexp" "fmt" "os" ) var rules = map[string]map[string]*regexp.Regexp{ "food": map[string]*regexp.Regexp{ "ALBERT HEIJN": regexp.MustCompile(`ALBERT HEIJN`), "Jumbo": regexp.MustCompile(`Jumbo[^a-zA-Z0-9-]`), }, "hema/blokker/other": map[string]*regexp.Regexp{ "HEMA": regexp.MustCompile(`HEMA`), "BEDWORLD": regexp.MustCompile(`BEDWORLD`), }, "electronics": map[string]*regexp.Regexp{ "Apple": regexp.MustCompile(`APPLE STORE`), "Coolblue": regexp.MustCompile(`Coolblue`), "Wiggle": regexp.MustCompile(`Wiggle`), }, "clothes": map[string]*regexp.Regexp{ "Decathlon": regexp.MustCompile(`Decathlon`), "FRONT RUNNER": regexp.MustCompile(`FRONT RUNNER`), }, "medicine": map[string]*regexp.Regexp{ "APOTHEEK": regexp.MustCompile(`APOTHEEK`), }, "deposits": map[string]*regexp.Regexp{ "Wallet": regexp.MustCompile(`IBAN\/NL14ABNA0620233052`), }, "rent": map[string]*regexp.Regexp{ "Dijwater 225": regexp.MustCompile(`IBAN\/NL18ABNA0459968513`), }, "g/w/e/i": map[string]*regexp.Regexp{ "Vodafone": regexp.MustCompile(`CSID\/NL39ZZZ302317620000`), "Energiedirect": regexp.MustCompile(`CSID\/NL71ABNA0629639183`), }, "abonements": map[string]*regexp.Regexp{ "Sport natural": regexp.MustCompile(`SPORT NATURAL`), // IBAN\/NL07ABNA0584314078 }, "insurance": map[string]*regexp.Regexp{ "Dijwater 225": regexp.MustCompile(`CSID\/NL94MNZ505448100000`), }, "salary": map[string]*regexp.Regexp{ "Textkernel": regexp.MustCompile(`IBAN\/NL27INGB0673657841`), }, } var displayOrder = []string{ "food", "rent", "g/w/e/i", "insurance", "abonements", "hema/blokker/other", "medicine", "electronics", "clothes", "deposits", "other", "salary", "unknown_income", "income", "rest", } // var reClarify = regexp.MustCompile(`^\/[^\/]+/[^\/]+/[^\/]+/[^\/]+/[^\/]+/[^\/]+/[^\/]+/([^\/]+)/[^\/]+/[^\/]+/[^\/]+/[^\/]+$`) // var reClarify2 = regexp.MustCompile(`^[^ ]+\s+[^ ]+\s+\d\d\.\d\d\.\d\d\/\d\d\.\d\d\s+(.*),[A-Z0-9 ]+$`) // var reClarify3 = regexp.MustCompile(`^[^ ]+\s+\d\d-\d\d-\d\d \d\d:\d\d\s+[^ ]+\s+(.*),[A-Z0-9 ]+$`) type Payment struct { accountId int64 currency string date time.Time balanceBefore float64 balanceAfter float64 anotherDate time.Time amount float64 description string } func check(e error) { if e != nil { panic(e) } } // func clarifyName(name string) (string, error) { // matches := reClarify.FindStringSubmatch(name) // if len(matches) > 1 { // return matches[1], nil // } // // matches = reClarify2.FindStringSubmatch(name) // if len(matches) > 1 { // return matches[1], nil // } // // matches = reClarify3.FindStringSubmatch(name) // if len(matches) > 1 { // return matches[1], nil // } // // return ``, fmt.Errorf(`Not parsable: %s`, name) // } func main() { file, err := os.Open("input.TAB") check(err) defer file.Close() scanner := bufio.NewScanner(file) re := regexp.MustCompile(`^(\d+)\t([A-Za-z]+)\t(\d+)\t(\d+,\d+)\t(\d+,\d+)\t(\d+)\t([-]?\d+,\d+)\t(.*?)$`); payments := make([]*Payment, 0, 5) for scanner.Scan() { txt := scanner.Text() matched := re.FindAllStringSubmatch(txt, 1) p := &Payment{}; p.accountId, err = strconv.ParseInt(matched[0][1], 10, 64) check(err)<|fim▁hole|> p.date, err = time.Parse("20060102", matched[0][3]) check(err) p.balanceBefore, err = strconv.ParseFloat(strings.Replace(matched[0][4], ",", ".", 1), 64) check(err) p.balanceAfter, err = strconv.ParseFloat(strings.Replace(matched[0][5], ",", ".", 1), 64) check(err) p.anotherDate, err = time.Parse("20060102", matched[0][6]) check(err) p.amount, err = strconv.ParseFloat(strings.Replace(matched[0][7], ",", ".", 1), 64) check(err) p.description = matched[0][8] payments = append(payments, p) } check(scanner.Err()) results := map[string]float64{ `supermarkets`: 0, `other`: 0, `income`: 0, `unknown_income`: 0, `rest`: 0, } var firstPaymentDate time.Time var lastPaymentDate time.Time i := 0 paymentsLen := len(payments) for _, p := range payments { switch i { case 0: firstPaymentDate = p.date case paymentsLen - 1: lastPaymentDate = p.date } other := true for rulesName, rulesList := range rules { for _, re := range rulesList { if re.MatchString(p.description) { if _, ok := results[rulesName]; ok { results[rulesName] += p.amount } else { results[rulesName] = p.amount } other = false break } } } if other && p.amount <= 0 { results[`other`] += p.amount } if other && p.amount > 0 { results[`unknown_income`] += p.amount } if p.amount > 0 { results[`income`] += p.amount } results[`rest`] += p.amount i += 1 } fmt.Printf("Stats between: %s and %s\n", firstPaymentDate.Format(`2006-01-02`), lastPaymentDate.Format(`2006-01-02`)) fmt.Println("-----------------------------------------------") for _, rulesName := range displayOrder { fmt.Printf("%s: %.2f\n", rulesName, results[rulesName]) } }<|fim▁end|>
p.currency = matched[0][2]
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin # Register your models here. from .models import Author, Genre, Book, BookInstance, Language """ # Minimal registration of Models. admin.site.register(Book)<|fim▁hole|>admin.site.register(BookInstance) admin.site.register(Genre) admin.site.register(Language) """ admin.site.register(Genre) admin.site.register(Language) class BooksInline(admin.TabularInline): """ Defines format of inline book insertion (used in AuthorAdmin) """ model = Book @admin.register(Author) class AuthorAdmin(admin.ModelAdmin): """ Administration object for Author models. Defines: - fields to be displayed in list view (list_display) - orders fields in detail view (fields), grouping the date fields horizontally - adds inline addition of books in author view (inlines) """ list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death') fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')] inlines = [BooksInline] class BooksInstanceInline(admin.TabularInline): """ Defines format of inline book instance insertion (used in BookAdmin) """ model = BookInstance class BookAdmin(admin.ModelAdmin): """ Administration object for Book models. Defines: - fields to be displayed in list view (list_display) - adds inline addition of book instances in book view (inlines) """ list_display = ('title', 'author', 'display_genre') inlines = [BooksInstanceInline] admin.site.register(Book, BookAdmin) @admin.register(BookInstance) class BookInstanceAdmin(admin.ModelAdmin): """ Administration object for BookInstance models. Defines: - fields to be displayed in list view (list_display) - filters that will be displayed in sidebar (list_filter) - grouping of fields into sections (fieldsets) """ list_display = ('book', 'status', 'borrower','due_back', 'id') list_filter = ('status', 'due_back') fieldsets = ( (None, { 'fields': ('book','imprint', 'id') }), ('Availability', { 'fields': ('status', 'due_back','borrower') }), )<|fim▁end|>
admin.site.register(Author)
<|file_name|>trimEmptyImports.ts<|end_file_name|><|fim▁begin|>import { ModuleDeclarationDependency } from '../../Chunk'; export default function trimEmptyImports(dependencies: ModuleDeclarationDependency[]) { let i = dependencies.length; while (i--) { const dependency = dependencies[i]; if (dependency.exportsDefault || dependency.exportsNames) { return dependencies.slice(0, i + 1); }<|fim▁hole|> } return []; }<|fim▁end|>
<|file_name|>Matreshka.Array#renderIfPossible.jsdoc.js<|end_file_name|><|fim▁begin|>/** @member {boolean} Seemple.Array#renderIfPossible @importance 3<|fim▁hole|>@example class MyArray extends Seemple.Array { get renderIfPossible() { return false; } // ... }); */<|fim▁end|>
@summary The ``renderIfPossible`` property cancels the array rendering if it equals ``false`` @see {@link Seemple.Array#itemRenderer}
<|file_name|>multicol.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! CSS Multi-column layout http://dev.w3.org/csswg/css-multicol/ #![deny(unsafe_code)] use block::BlockFlow; use context::LayoutContext; use floats::FloatKind; use flow::{FlowClass, Flow, OpaqueFlow}; use fragment::{Fragment, FragmentBorderBoxIterator}; use euclid::{Point2D, Rect}; use std::fmt; use std::sync::Arc; use style::properties::ComputedValues; use util::geometry::Au; use util::logical_geometry::LogicalSize; pub struct MulticolFlow { pub block_flow: BlockFlow, } impl MulticolFlow { pub fn from_fragment(fragment: Fragment, float_kind: Option<FloatKind>) -> MulticolFlow { MulticolFlow { block_flow: BlockFlow::from_fragment(fragment, float_kind) } } } impl Flow for MulticolFlow { fn class(&self) -> FlowClass { FlowClass::Multicol } fn as_mut_multicol(&mut self) -> &mut MulticolFlow { self } fn as_mut_block(&mut self) -> &mut BlockFlow { &mut self.block_flow } fn as_block(&self) -> &BlockFlow { &self.block_flow } fn bubble_inline_sizes(&mut self) { // FIXME(SimonSapin) http://dev.w3.org/csswg/css-sizing/#multicol-intrinsic self.block_flow.bubble_inline_sizes(); } fn assign_inline_sizes(&mut self, ctx: &LayoutContext) { debug!("assign_inline_sizes({}): assigning inline_size for flow", "multicol"); self.block_flow.assign_inline_sizes(ctx); } fn assign_block_size<'a>(&mut self, ctx: &'a LayoutContext<'a>) { debug!("assign_block_size: assigning block_size for multicol"); self.block_flow.assign_block_size(ctx); } fn compute_absolute_position(&mut self, layout_context: &LayoutContext) { self.block_flow.compute_absolute_position(layout_context)<|fim▁hole|> fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) { self.block_flow.update_late_computed_inline_position_if_necessary(inline_position) } fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) { self.block_flow.update_late_computed_block_position_if_necessary(block_position) } fn build_display_list(&mut self, layout_context: &LayoutContext) { debug!("build_display_list_multicol: same process as block flow"); self.block_flow.build_display_list(layout_context) } fn repair_style(&mut self, new_style: &Arc<ComputedValues>) { self.block_flow.repair_style(new_style) } fn compute_overflow(&self) -> Rect<Au> { self.block_flow.compute_overflow() } fn generated_containing_block_size(&self, flow: OpaqueFlow) -> LogicalSize<Au> { self.block_flow.generated_containing_block_size(flow) } fn iterate_through_fragment_border_boxes(&self, iterator: &mut FragmentBorderBoxIterator, level: i32, stacking_context_position: &Point2D<Au>) { self.block_flow.iterate_through_fragment_border_boxes(iterator, level, stacking_context_position) } fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) { self.block_flow.mutate_fragments(mutator) } } impl fmt::Debug for MulticolFlow { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "MulticolFlow: {:?}", self.block_flow) } }<|fim▁end|>
}
<|file_name|>animation.cpp<|end_file_name|><|fim▁begin|>#include "prism/animation.h" #include <stdlib.h> #include "prism/log.h" #include "prism/datastructures.h" #include "prism/memoryhandler.h" #include "prism/system.h" #include "prism/timer.h" #include "prism/stlutil.h" using namespace std; static struct { int mIsPaused; } gPrismAnimationData; int getDurationInFrames(Duration tDuration){ return (int)(tDuration * getInverseFramerateFactor()); } int handleDurationAndCheckIfOver(Duration* tNow, Duration tDuration) { if(gPrismAnimationData.mIsPaused) return 0; (*tNow)++; return isDurationOver(*tNow, tDuration); } int isDurationOver(Duration tNow, Duration tDuration) { if (tNow >= getDurationInFrames(tDuration)) { return 1; } return 0; } int handleTickDurationAndCheckIfOver(Tick * tNow, Tick tDuration) { if (gPrismAnimationData.mIsPaused) return 0; (*tNow)++; return isTickDurationOver(*tNow, tDuration); } int isTickDurationOver(Tick tNow, Tick tDuration) { if (tNow >= tDuration) { return 1; } return 0; } AnimationResult animateWithoutLoop(Animation* tAnimation) { AnimationResult ret = ANIMATION_CONTINUING; if (handleDurationAndCheckIfOver(&tAnimation->mNow, tAnimation->mDuration)) { tAnimation->mNow = 0; tAnimation->mFrame++; if (tAnimation->mFrame >= tAnimation->mFrameAmount) { tAnimation->mFrame = tAnimation->mFrameAmount-1; tAnimation->mNow = getDurationInFrames(tAnimation->mDuration); ret = ANIMATION_OVER; } } return ret; } void animate(Animation* tAnimation) { AnimationResult ret = animateWithoutLoop(tAnimation); if(ret == ANIMATION_OVER){ resetAnimation(tAnimation); } } void resetAnimation(Animation* tAnimation) { tAnimation->mNow = 0; tAnimation->mFrame = 0; } Animation createAnimation(int tFrameAmount, Duration tDuration) { Animation ret = createEmptyAnimation(); ret.mFrameAmount = tFrameAmount; ret.mDuration = tDuration; return ret; } Animation createEmptyAnimation(){ Animation ret; ret.mFrame = 0; ret.mFrameAmount = 0; ret.mNow = 0; ret.mDuration = 1000000000; return ret; } Animation createOneFrameAnimation(){ Animation ret = createEmptyAnimation(); ret.mFrameAmount = 1; return ret; } void pauseDurationHandling() { gPrismAnimationData.mIsPaused = 1; } void resumeDurationHandling() { gPrismAnimationData.mIsPaused = 0; } double getDurationPercentage(Duration tNow, Duration tDuration) { int duration = getDurationInFrames(tDuration); return tNow / (double)duration; } static struct{ map<int, AnimationHandlerElement> mList; int mIsLoaded; } gAnimationHandler; void setupAnimationHandler(){ if(gAnimationHandler.mIsLoaded){ logWarning("Setting up non-empty animation handler; Cleaning up."); shutdownAnimationHandler(); } gAnimationHandler.mList.clear(); gAnimationHandler.mIsLoaded = 1; } static int updateAndRemoveCB(void* tCaller, AnimationHandlerElement& tData) { (void) tCaller; AnimationHandlerElement* cur = &tData; AnimationResult res = animateWithoutLoop(&cur->mAnimation); if(res == ANIMATION_OVER) { if(cur->mCB != NULL) { cur->mCB(cur->mCaller); } if(cur->mIsLooped) { resetAnimation(&cur->mAnimation); } else { return 1; } } return 0; } void updateAnimationHandler(){ stl_int_map_remove_predicate(gAnimationHandler.mList, updateAndRemoveCB); } static Position getAnimationPositionWithAllReferencesIncluded(AnimationHandlerElement* cur) { Position p = cur->mPosition; if (cur->mScreenPositionReference != NULL) { p = vecAdd(p, vecScale(*cur->mScreenPositionReference, -1)); } if (cur->mBasePositionReference != NULL) { p = vecAdd(p, *(cur->mBasePositionReference)); } return p; } static void drawAnimationHandlerCB(void* tCaller, AnimationHandlerElement& tData) { (void) tCaller; AnimationHandlerElement* cur = &tData; if (!cur->mIsVisible) return; int frame = cur->mAnimation.mFrame; Position p = getAnimationPositionWithAllReferencesIncluded(cur); if (cur->mIsRotated) { Position rPosition = cur->mRotationEffectCenter; rPosition = vecAdd(rPosition, p); setDrawingRotationZ(cur->mRotationZ, rPosition); } if(cur->mIsScaled) { Position sPosition = cur->mScaleEffectCenter; sPosition = vecAdd(sPosition, p); scaleDrawing3D(cur->mScale, sPosition); } if (cur->mHasBaseColor) { setDrawingBaseColorAdvanced(cur->mBaseColor.x, cur->mBaseColor.y, cur->mBaseColor.z); } if (cur->mHasTransparency) { setDrawingTransparency(cur->mTransparency); } Rectangle texturePos = cur->mTexturePosition; if(cur->mInversionState.x) { Position center = vecAdd(cur->mCenter, p); double deltaX = center.x - p.x; double nRightX = center.x + deltaX; double nLeftX = nRightX - abs(cur->mTexturePosition.bottomRight.x - cur->mTexturePosition.topLeft.x); p.x = nLeftX; texturePos.topLeft.x = cur->mTexturePosition.bottomRight.x; texturePos.bottomRight.x = cur->mTexturePosition.topLeft.x; } if (cur->mInversionState.y) { Position center = vecAdd(cur->mCenter, p); double deltaY = center.y - p.y; double nDownY = center.y + deltaY; double nUpY = nDownY - abs(cur->mTexturePosition.bottomRight.y - cur->mTexturePosition.topLeft.y); p.y = nUpY; texturePos.topLeft.y = cur->mTexturePosition.bottomRight.y; texturePos.bottomRight.y = cur->mTexturePosition.topLeft.y; } drawSprite(cur->mTextureData[frame], p, texturePos); if(cur->mIsScaled || cur->mIsRotated || cur->mHasBaseColor || cur->mHasTransparency) { setDrawingParametersToIdentity(); } } void drawHandledAnimations() { stl_int_map_map(gAnimationHandler.mList, drawAnimationHandlerCB); } static void emptyAnimationHandler(){ gAnimationHandler.mList.clear(); } static AnimationHandlerElement* playAnimationInternal(const Position& tPosition, TextureData* tTextures, const Animation& tAnimation, const Rectangle& tTexturePosition, AnimationPlayerCB tOptionalCB, void* tCaller, int tIsLooped){ AnimationHandlerElement e; e.mCaller = tCaller; e.mCB = tOptionalCB; e.mIsLooped = tIsLooped; e.mPosition = tPosition; e.mTexturePosition = tTexturePosition; e.mTextureData = tTextures; e.mAnimation = tAnimation; e.mScreenPositionReference = NULL; e.mBasePositionReference = NULL; e.mIsScaled = 0; e.mIsRotated = 0; e.mHasBaseColor = 0; e.mHasTransparency = 0; e.mCenter = Vector3D(0,0,0); e.mInversionState = Vector3DI(0,0,0); e.mIsVisible = 1; int id = stl_int_map_push_back(gAnimationHandler.mList, e); auto& element = gAnimationHandler.mList[id]; element.mID = id; return &element; } AnimationHandlerElement* playAnimation(const Position& tPosition, TextureData* tTextures, const Animation& tAnimation, const Rectangle& tTexturePosition, AnimationPlayerCB tOptionalCB, void* tCaller){ return playAnimationInternal(tPosition, tTextures, tAnimation, tTexturePosition, tOptionalCB, tCaller, 0); } AnimationHandlerElement* playAnimationLoop(const Position& tPosition, TextureData* tTextures, const Animation& tAnimation, const Rectangle& tTexturePosition){ return playAnimationInternal(tPosition, tTextures, tAnimation, tTexturePosition, NULL, NULL, 1); } AnimationHandlerElement* playOneFrameAnimationLoop(const Position& tPosition, TextureData* tTextures) { Animation anim = createOneFrameAnimation(); Rectangle rect = makeRectangleFromTexture(tTextures[0]); return playAnimationLoop(tPosition, tTextures, anim, rect); } void changeAnimation(AnimationHandlerElement* e, TextureData* tTextures, const Animation& tAnimation, const Rectangle& tTexturePosition) { e->mTexturePosition = tTexturePosition; e->mTextureData = tTextures; e->mAnimation = tAnimation; } void setAnimationScreenPositionReference(AnimationHandlerElement* e, Position* tScreenPositionReference) { e->mScreenPositionReference = tScreenPositionReference; } void setAnimationBasePositionReference(AnimationHandlerElement* e, Position* tBasePositionReference) { e->mBasePositionReference = tBasePositionReference; } void setAnimationScale(AnimationHandlerElement* e, const Vector3D& tScale, const Position& tCenter) { e->mIsScaled = 1; e->mScaleEffectCenter = tCenter; e->mScale = tScale; } void setAnimationSize(AnimationHandlerElement* e, const Vector3D& tSize, const Position& tCenter) { e->mIsScaled = 1; e->mScaleEffectCenter = tCenter; double dx = tSize.x / e->mTextureData[0].mTextureSize.x; double dy = tSize.y / e->mTextureData[0].mTextureSize.y; e->mScale = Vector3D(dx, dy, 1); } <|fim▁hole|> e->mRotationEffectCenter = tCenter; e->mRotationZ = tAngle; } void setAnimationRotationZ(AnimationHandlerElement* e, double tAngle, const Position& tCenter) { setAnimationRotationZ_internal(e, tAngle, tCenter); } static void setAnimationColor_internal(AnimationHandlerElement* e, double r, double g, double b) { e->mHasBaseColor = 1; e->mBaseColor = Vector3D(r, g, b); } void setAnimationColor(AnimationHandlerElement* e, double r, double g, double b) { setAnimationColor_internal(e, r, g, b); } void setAnimationColorType(AnimationHandlerElement* e, Color tColor) { double r, g, b; getRGBFromColor(tColor, &r, &g, &b); setAnimationColor(e, r, g, b); } void setAnimationTransparency(AnimationHandlerElement* e, double a) { e->mHasTransparency = 1; e->mTransparency = a; } void setAnimationVisibility(AnimationHandlerElement* e, int tIsVisible) { e->mIsVisible = tIsVisible; } void setAnimationCenter(AnimationHandlerElement* e, const Position& tCenter) { e->mCenter = tCenter; } void setAnimationCB(AnimationHandlerElement* e, AnimationPlayerCB tCB, void* tCaller) { e->mCB = tCB; e->mCaller = tCaller; } void setAnimationPosition(AnimationHandlerElement* e, const Position& tPosition) { e->mPosition = tPosition; } void setAnimationTexturePosition(AnimationHandlerElement* e, const Rectangle& tTexturePosition) { e->mTexturePosition = tTexturePosition; } void setAnimationLoop(AnimationHandlerElement* e, int tIsLooping) { e->mIsLooped = tIsLooping; } void removeAnimationCB(AnimationHandlerElement* e) { setAnimationCB(e, NULL, NULL); } typedef struct { AnimationHandlerElement* mElement; Vector3D mColor; Duration mDuration; } AnimationColorIncrease; static void increaseAnimationColor(void* tCaller) { AnimationColorIncrease* e = (AnimationColorIncrease*)tCaller; e->mColor = vecAdd(e->mColor, Vector3D(1.0 / e->mDuration, 1.0 / e->mDuration, 1.0 / e->mDuration)); if (e->mColor.x >= 1) e->mColor = Vector3D(1, 1, 1); setAnimationColor(e->mElement, e->mColor.x, e->mColor.y, e->mColor.z); if (e->mColor.x >= 1) { freeMemory(e); } else addTimerCB(0,increaseAnimationColor, e); } void fadeInAnimation(AnimationHandlerElement* tElement, Duration tDuration) { AnimationColorIncrease* e = (AnimationColorIncrease*)allocMemory(sizeof(AnimationColorIncrease)); e->mElement = tElement; e->mColor = Vector3D(0, 0, 0); e->mDuration = tDuration; addTimerCB(0, increaseAnimationColor, e); setAnimationColor(tElement, e->mColor.x, e->mColor.y, e->mColor.z); } void inverseAnimationVertical(AnimationHandlerElement* e) { e->mInversionState.x ^= 1; } void inverseAnimationHorizontal(AnimationHandlerElement* e) { e->mInversionState.y ^= 1; } void setAnimationVerticalInversion(AnimationHandlerElement* e, int tValue) { e->mInversionState.x = tValue; } void setAnimationHorizontalInversion(AnimationHandlerElement* e, int tValue) { e->mInversionState.y = tValue; } typedef struct { double mAngle; Vector3D mCenter; } ScreenRotationZ; static void setScreenRotationZForSingleAnimation(ScreenRotationZ* tRot, AnimationHandlerElement& tData) { AnimationHandlerElement* e = &tData; const auto p = getAnimationPositionWithAllReferencesIncluded(e); const auto center = vecSub(tRot->mCenter, p); setAnimationRotationZ_internal(e, tRot->mAngle, center); } void setAnimationHandlerScreenRotationZ(double tAngle, const Vector3D& tCenter) { ScreenRotationZ rot; rot.mAngle = tAngle; rot.mCenter = tCenter; stl_int_map_map(gAnimationHandler.mList, setScreenRotationZForSingleAnimation, &rot); } typedef struct { double r; double g; double b; } AnimationHandlerScreenTint; static void setAnimationHandlerScreenTintSingle(AnimationHandlerScreenTint* tTint, AnimationHandlerElement& tData) { AnimationHandlerElement* e = &tData; setAnimationColor_internal(e, tTint->r, tTint->g, tTint->b); } void setAnimationHandlerScreenTint(double r, double g, double b) { AnimationHandlerScreenTint tint; tint.r = r; tint.g = g; tint.b = b; stl_int_map_map(gAnimationHandler.mList, setAnimationHandlerScreenTintSingle, &tint); } void resetAnimationHandlerScreenTint() { setAnimationHandlerScreenTint(1, 1, 1); } double* getAnimationTransparencyReference(AnimationHandlerElement* e) { return &e->mTransparency; } Position* getAnimationPositionReference(AnimationHandlerElement* e) { return &e->mPosition; } void removeHandledAnimation(AnimationHandlerElement* e) { gAnimationHandler.mList.erase(e->mID); } int isHandledAnimation(AnimationHandlerElement* e) { return stl_map_contains(gAnimationHandler.mList, e->mID); } void shutdownAnimationHandler(){ emptyAnimationHandler(); gAnimationHandler.mList.clear(); gAnimationHandler.mIsLoaded = 0; }<|fim▁end|>
static void setAnimationRotationZ_internal(AnimationHandlerElement* e, double tAngle, const Vector3D& tCenter) { e->mIsRotated = 1;
<|file_name|>RegistrationHandler.py<|end_file_name|><|fim▁begin|>from piston.handler import BaseHandler from piston.utils import rc from telemaco.models import User from django.db.utils import IntegrityError class RegistrationHandler(BaseHandler): class _UserProxy(User): pass allowed_methods = ('POST') model = _UserProxy def create(self, request): try: # Sing-up print "Create user:", request.data <|fim▁hole|> obj = User(username=request.data['username']) obj.set_password(request.data['password']) obj.save() return rc.CREATED except IntegrityError, e: print e return rc.DUPLICATE_ENTRY except Exception, e: print e return rc.BAD_REQUEST<|fim▁end|>
<|file_name|>graphite.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Resource information of graphite """ def get_name(friendly=False): """Get name of this resource :return: name of this resource :rtype: str """ if friendly: # pragma: no cover return 'Graphite connection' return 'graphite' def get_doc(): # pragma: no cover """Get documentation of this resource :return: rst string :rtype: str """ return """ The ``graphite`` model contains information to provide the monitored system performance data to Carbon/Graphite. The Alignak backend will use those information to connect to a Carbon daemon and send the timeseries data. If you are using a StatsD daemon as a front-end of the Carbon daemon create a relation with a StatsD data model instance. To make the Alignak backend create some Grafana panels for the metrics sent to Graphite create a relation with a Grafana data model instance. """ def get_schema(): """Schema structure of this resource :return: schema dictionary :rtype: dict """ return { 'schema': { 'schema_version': { 'type': 'integer', 'default': 2, }, 'name': { 'schema_version': 1, 'title': 'Graphite connection name', 'comment': 'Unique Graphite connection name', 'type': 'string', 'required': True, 'empty': False, 'unique': True, }, 'carbon_address': { 'schema_version': 1, 'title': 'Carbon daemon address', 'comment': '', 'type': 'string', 'required': True, 'empty': False, }, 'carbon_port': { 'schema_version': 1, 'title': 'Carbon daemon port', 'comment': '', 'type': 'integer', 'empty': False, 'default': 2004 }, 'graphite_address': { 'schema_version': 1, 'title': 'Graphite address', 'comment': '', 'type': 'string', 'required': True, 'empty': False, }, 'graphite_port': { 'schema_version': 1, 'title': 'Graphite port', 'comment': '', 'type': 'integer', 'empty': False, 'default': 8080 }, 'prefix': { 'schema_version': 1, 'title': 'Metrics prefix', 'comment': 'Prefix that will be prepended to the metrics sent to this TS DB.', 'type': 'string', 'default': '', }, 'realms_prefix': { 'schema_version': 2, "title": "Realms prefix", "comment": "Include the realms prefix for the metrics sent to this TS DB.", 'type': 'boolean', 'default': True }, 'grafana': { 'schema_version': 1, 'title': 'Grafana relation', 'comment': 'If set, the Alignak backend will use this Grafana relation for ' 'the metrics sent to the Influx DB. It will create/update the ' 'Grafana panels accordindgly.', 'type': 'objectid', 'data_relation': { 'resource': 'grafana', 'embeddable': True },<|fim▁hole|> 'default': None }, 'statsd': { 'schema_version': 1, 'title': 'StatsD relation', 'comment': 'If set, the Alignak backend will use this StatsD relation for ' 'the metrics sent to the Influx DB.', 'type': 'objectid', 'data_relation': { 'resource': 'statsd', 'embeddable': True }, 'nullable': True, 'default': None }, # Realm '_realm': { 'schema_version': 1, 'title': 'Realm', 'comment': 'Realm this element belongs to.', 'type': 'objectid', 'data_relation': { 'resource': 'realm', 'embeddable': True }, 'required': True, }, '_sub_realm': { 'schema_version': 1, 'title': 'Sub-realms', 'comment': 'Is this element visible in the sub-realms of its realm?', 'type': 'boolean', 'default': True }, # Users CRUD permissions '_users_read': { 'schema_version': 1, 'type': 'list', 'schema': { 'type': 'objectid', 'data_relation': { 'resource': 'user', 'embeddable': True, } }, }, '_users_update': { 'schema_version': 1, 'type': 'list', 'schema': { 'type': 'objectid', 'data_relation': { 'resource': 'user', 'embeddable': True, } }, }, '_users_delete': { 'schema_version': 1, 'type': 'list', 'schema': { 'type': 'objectid', 'data_relation': { 'resource': 'user', 'embeddable': True, } }, }, }, 'schema_deleted': {} }<|fim▁end|>
'nullable': True,
<|file_name|>PrintSurfaceTypeArrayListener.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>energymodels.PrintSurfaceTypeArrayListener<|fim▁end|>
<|file_name|>tpl-py-0001.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python #################################### # # --- TEXTPATGEN TEMPLATE --- # # Users can change the output by editing # this file directly. # #################################### import sys<|fim▁hole|> sys.stdout.write('####################################\n') sys.stdout.write('#\n') sys.stdout.write('# -- TEXTPATGEN GENERATED FILE --\n') sys.stdout.write('#\n') sys.stdout.write('# -- Created from a Python script.\n') sys.stdout.write('#\n') sys.stdout.write("####################################\n") num=0 for length in range(0, 16): for width in range(0, 15): sys.stdout.write('X-%04X ' % num) num=num+1 width=width+1 length=length+1 sys.stdout.write('X-%04X\n' % num) num=num+1 sys.stdout.write('# -- End of file.\n'); sys.stdout.flush()<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) Facebook, Inc. and its affiliates. *<|fim▁hole|> * LICENSE file in the root directory of this source tree. */ use fixture_tests::Fixture; use relay_transforms::{ transform_connections, transform_refetchable_fragment, ConnectionInterface, }; use graphql_test_helpers::apply_transform_for_test; pub fn transform_fixture(fixture: &Fixture<'_>) -> Result<String, String> { apply_transform_for_test(fixture, |program| { let program = transform_connections(&program, &ConnectionInterface::default()); let base_fragments = Default::default(); transform_refetchable_fragment(&program, &base_fragments, false) }) }<|fim▁end|>
* This source code is licensed under the MIT license found in the
<|file_name|>gstr_1.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors // License: GNU General Public License v3. See license.txt frappe.query_reports["GSTR-1"] = { "filters": [ { "fieldname":"company", "label": __("Company"), "fieldtype": "Link", "options": "Company", "reqd": 1, "default": frappe.defaults.get_user_default("Company") }, { "fieldname":"from_date", "label": __("From Date"), "fieldtype": "Date", "reqd": 1, "default": frappe.datetime.add_months(frappe.datetime.get_today(), -3), "width": "80" },<|fim▁hole|> "label": __("To Date"), "fieldtype": "Date", "reqd": 1, "default": frappe.datetime.get_today() }, { "fieldname":"type_of_business", "label": __("Type of Business"), "fieldtype": "Select", "reqd": 1, "options": ["B2B", "B2C Large", "B2C Small"], "default": "B2B" } ] }<|fim▁end|>
{ "fieldname":"to_date",
<|file_name|>problem-048.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 Peter Beard // Distributed under the GNU GPL v2. For full terms, see the LICENSE file. // #![feature(test)] extern crate test; /// Compute n^p mod b fn modular_pow(n: u64, p: u64, b: u64) -> u64 { let mut pow = n % b; for _ in 1..p { pow = (pow * (n % b)) % b; } pow } pub fn solution() -> u64 { const BASE: u64 = 10000000000; let mut sum = 0; for n in 1..1001 { sum += modular_pow(n, n, BASE); } sum % BASE<|fim▁hole|>} fn main() { println!("The sum is {}", solution()); } #[cfg(test)] mod tests { use super::*; use test::Bencher; #[test] fn correct() { assert_eq!(9110846700, solution()); } #[bench] fn bench(b: &mut Bencher) { b.iter(|| solution()); } }<|fim▁end|>
<|file_name|>jsonSchema.ts<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ export type JSONSchemaType = 'string' | 'number' | 'integer' | 'boolean' | 'null' | 'array' | 'object'; export interface IJSONSchema { id?: string; $id?: string; $schema?: string; type?: JSONSchemaType | JSONSchemaType[]; title?: string; default?: any; definitions?: IJSONSchemaMap; description?: string; properties?: IJSONSchemaMap; patternProperties?: IJSONSchemaMap; additionalProperties?: boolean | IJSONSchema; minProperties?: number; maxProperties?: number; dependencies?: IJSONSchemaMap | { [prop: string]: string[] }; items?: IJSONSchema | IJSONSchema[]; minItems?: number; maxItems?: number; uniqueItems?: boolean; additionalItems?: boolean | IJSONSchema; pattern?: string; minLength?: number; maxLength?: number; minimum?: number; maximum?: number; exclusiveMinimum?: boolean | number; exclusiveMaximum?: boolean | number; multipleOf?: number; required?: string[]; $ref?: string; anyOf?: IJSONSchema[]; allOf?: IJSONSchema[]; oneOf?: IJSONSchema[]; not?: IJSONSchema; enum?: any[]; format?: string; // schema draft 06 const?: any; contains?: IJSONSchema; propertyNames?: IJSONSchema; // schema draft 07 $comment?: string; if?: IJSONSchema; then?: IJSONSchema; else?: IJSONSchema; // VSCode extensions defaultSnippets?: IJSONSchemaSnippet[]; // VSCode extension errorMessage?: string; // VSCode extension<|fim▁hole|> enumDescriptions?: string[]; // VSCode extension markdownEnumDescriptions?: string[]; // VSCode extension markdownDescription?: string; // VSCode extension doNotSuggest?: boolean; // VSCode extension allowComments?: boolean; // VSCode extension allowTrailingCommas?: boolean; // VSCode extension } export interface IJSONSchemaMap { [name: string]: IJSONSchema; } export interface IJSONSchemaSnippet { label?: string; description?: string; body?: any; // a object that will be JSON stringified bodyText?: string; // an already stringified JSON object that can contain new lines (\n) and tabs (\t) }<|fim▁end|>
patternErrorMessage?: string; // VSCode extension deprecationMessage?: string; // VSCode extension